repo_name
stringlengths 5
92
| path
stringlengths 4
232
| copies
stringclasses 19
values | size
stringlengths 4
7
| content
stringlengths 721
1.04M
| license
stringclasses 15
values | hash
int64 -9,223,277,421,539,062,000
9,223,102,107B
| line_mean
float64 6.51
99.9
| line_max
int64 15
997
| alpha_frac
float64 0.25
0.97
| autogenerated
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|
migonzalvar/mfs2011-practicum-saas | webclient/agenda/views.py | 1 | 5663 | import datetime
import time
from django.http import Http404, HttpResponse
from django.shortcuts import render_to_response, redirect
from django.template import RequestContext
from django.core.urlresolvers import reverse
from django.contrib import messages
from django.utils.translation import ugettext as _
from django.views.generic import TemplateView
from django.utils.timezone import utc, get_current_timezone
import socket
import requests
import pytz
from forms import ShiftForm, AppointmentForm, QuickAppointmentForm
from server_models import (Shift, Appointment, Slot, datetime_to_dtstring,
DEFAULT_SLOT_LENGTH, TIMEFORMAT, FIELD_SEPARATOR)
# API helpers
def str_to_datetime(str_date, str_time):
"""Converts a local date and a time strings into datetime UTC."""
tz = get_current_timezone()
isostring_naive_local = str_date + "T" + str_time
dt_naive_local = datetime.datetime.strptime(isostring_naive_local, "%Y-%m-%dT%H:%M")
dt_aware_local = tz.localize(dt_naive_local)
dt_aware_utc = dt_aware_local.astimezone(utc)
return dt_aware_utc
# Actual views
def index(request):
data_dict = dict(version=1)
return render_to_response('agenda/index.html', data_dict,
context_instance=RequestContext(request))
class ResourceView(TemplateView):
def get_context_data(self, **kwargs):
context = super(ResourceView, self).get_context_data(**kwargs)
context[self.resource] = list(self.Model.all())
return context
def get(self, request, *args, **kwargs):
context = self.get_context_data(**kwargs)
context["form"] = self.Form()
return self.render_to_response(context)
def post(self, request, *args, **kwargs):
if request.POST.get("method", "") == "delete":
return self.pseudodelete(request, *args, **kwargs)
form = self.Form(request.POST)
if form.is_valid():
d = self.prepare_form_data(form)
resource = self.SaveModel(**d)
resource.save()
messages.success(request,
_('Resource %(id)s saved.') % {"id": resource.id})
redirect_url = request.POST.get("redirect", reverse(self.resource))
return redirect(redirect_url)
else:
messages.error(request, "Error validating data: %s" % repr(form))
context = self.get_context_data(**kwargs)
context["form"] = form
return self.render_to_response(context)
def pseudodelete(self, request, *args, **kwargs):
context = self.get_context_data(**kwargs)
oid = request.POST.get("id", None)
try:
resource = self.Model.delete_id(oid)
except self.Model.DoesNotExist:
raise Http404
messages.success(request,
_('Resource %(id)s deleted.') % {"id": oid})
return redirect(reverse(self.resource))
def prepare_form_data(self, form):
raise NotImplemented
class ShiftView(ResourceView):
resource = "shifts"
Model = Shift
Form = ShiftForm
SaveModel = Shift
template_name = 'agenda/shifts.html'
def prepare_form_data(self, form):
date = form.cleaned_data["date"]
start = str_to_datetime(date, form.cleaned_data["start"])
end = str_to_datetime(date, form.cleaned_data["end"])
return {FIELD_SEPARATOR.join(("start", "datetime")): start,
FIELD_SEPARATOR.join(("end", "datetime")): end}
class AppointmentView(ResourceView):
resource = "appointments"
Model = Appointment
Form = AppointmentForm
SaveModel = Appointment
template_name = 'agenda/appointments.html'
def prepare_form_data(self, form):
date = form.cleaned_data["date"]
start = str_to_datetime(date, form.cleaned_data["start"])
end = str_to_datetime(date, form.cleaned_data["end"])
return {
FIELD_SEPARATOR.join(("start", "datetime")): start,
FIELD_SEPARATOR.join(("end", "datetime")): end}
class SlotView(ResourceView):
resource = "freeslots"
Model = Slot
Form = QuickAppointmentForm
SaveModel = Appointment
template_name = "agenda/slots.html"
def get_context_data(self, **kwargs):
context = super(ResourceView, self).get_context_data(**kwargs)
try:
year = int(kwargs['year'])
month = int(kwargs['month'])
day = int(kwargs['day'])
basedate = datetime.date(year, month, day)
except:
basedate = datetime.date.today()
prev = basedate - datetime.timedelta(days=1)
next = basedate + datetime.timedelta(days=1)
selectdate = [basedate + datetime.timedelta(days=i) for i in range(-1, 7)]
start = datetime.datetime.combine(basedate, datetime.time(0))
end = datetime.datetime.combine(basedate, datetime.time.max)
context["basedate"] = basedate
context["prev"] = prev
context["next"] = next
context["selectdate"] = selectdate
context[self.resource] = self.Model.all(length=DEFAULT_SLOT_LENGTH,
start=datetime_to_dtstring(start),
end=datetime_to_dtstring(end))
return context
def prepare_form_data(self, form):
start = form.cleaned_data["start_dt"].astimezone(utc)
end = form.cleaned_data["end_dt"].astimezone(utc)
return {
FIELD_SEPARATOR.join(("start", "datetime")): start,
FIELD_SEPARATOR.join(("end", "datetime")): end, }
| isc | -347,853,227,394,681,300 | 35.535484 | 88 | 0.620696 | false |
nexiles/nexiles.gateway.example | src/nexiles.gateway.example/setup.py | 1 | 1111 | # -*- coding: utf-8 -*-
import os
from setuptools import setup, find_packages
def read(*rnames):
return open(os.path.join(os.path.dirname(__file__), *rnames)).read()
version = '0.1.0'
long_description = (read('../../readme.rst'))
setup(name='nexiles.gateway.example',
version=version,
description="A example nexiles|gateway service",
long_description=long_description,
classifiers=[
'Intended Audience :: Developers',
'Topic :: Software Development :: Libraries :: Python Modules',
],
keywords='',
author='Stefan Eletzhofer',
author_email='[email protected]',
url='https://github.com/nexiles/nexiles.gateway.example',
license='proprietary',
packages=find_packages('.', exclude=['ez_setup']),
package_dir={'': '.'},
package_data={"nexiles.gateway.example": ["templates/*"]},
namespace_packages=['nexiles', 'nexiles.gateway'],
include_package_data=True,
zip_safe=True,
install_requires=['setuptools',
# 'nexiles.tools>=1.5.0'
],
)
| bsd-2-clause | -5,127,291,134,699,237,000 | 29.861111 | 73 | 0.594959 | false |
CalebBell/fluids | fluids/safety_valve.py | 1 | 22965 | # -*- coding: utf-8 -*-
"""Chemical Engineering Design Library (ChEDL). Utilities for process modeling.
Copyright (C) 2016, Caleb Bell <[email protected]>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
This module contains functions for sizing and rating pressure relief valves.
At present, this consists of several functions from API 520.
For reporting bugs, adding feature requests, or submitting pull requests,
please use the `GitHub issue tracker <https://github.com/CalebBell/fluids/>`_
or contact the author at [email protected].
.. contents:: :local:
Interfaces
----------
.. autofunction:: API520_A_g
.. autofunction:: API520_A_steam
Functions and Data
------------------
.. autofunction:: API520_round_size
.. autofunction:: API520_C
.. autofunction:: API520_F2
.. autofunction:: API520_Kv
.. autofunction:: API520_N
.. autofunction:: API520_SH
.. autofunction:: API520_B
.. autofunction:: API520_W
.. autodata:: API526_letters
.. autodata:: API526_A_sq_inch
.. autodata:: API526_A
"""
from __future__ import division
from math import exp, sqrt
from fluids.constants import psi, inch, atm
from fluids.compressible import is_critical_flow
from fluids.numerics import interp, tck_interp2d_linear, bisplev
__all__ = ['API526_A_sq_inch', 'API526_letters', 'API526_A',
'API520_round_size', 'API520_C', 'API520_F2', 'API520_Kv', 'API520_N',
'API520_SH', 'API520_B', 'API520_W', 'API520_A_g', 'API520_A_steam']
API526_A_sq_inch = [0.110, 0.196, 0.307, 0.503, 0.785, 1.287, 1.838, 2.853, 3.60,
4.34, 6.38, 11.05, 16.00, 26.00] # square inches
'''list: Nominal relief area in for different valve sizes in API 520, [in^2]'''
API526_letters = ['D', 'E', 'F', 'G', 'H', 'J', 'K', 'L', 'M', 'N', 'P', 'Q', 'R','T']
'''list: Letter size designations for different valve sizes in API 520'''
inch2 = inch*inch
API526_A = [i*inch2 for i in API526_A_sq_inch]
'''list: Nominal relief area in for different valve sizes in API 520, [m^2]'''
del inch2
def API520_round_size(A):
r'''Rounds up the area from an API 520 calculation to an API526 standard
valve area. The returned area is always larger or equal to the input area.
Parameters
----------
A : float
Minimum discharge area [m^2]
Returns
-------
area : float
Actual discharge area [m^2]
Notes
-----
To obtain the letter designation of an input area, lookup the area with
the following:
API526_letters[API526_A.index(area)]
An exception is raised if the required relief area is larger than any of
the API 526 sizes.
Examples
--------
From [1]_, checked with many points on Table 8.
>>> API520_round_size(1E-4)
0.00012645136
>>> API526_letters[API526_A.index(API520_round_size(1E-4))]
'E'
References
----------
.. [1] API Standard 526.
'''
for area in API526_A:
if area >= A:
return area
raise ValueError('Required relief area is larger than can be provided with one valve')
def API520_C(k):
r'''Calculates coefficient C for use in API 520 critical flow relief valve
sizing.
.. math::
C = 0.03948\sqrt{k\left(\frac{2}{k+1}\right)^\frac{k+1}{k-1}}
Parameters
----------
k : float
Isentropic coefficient or ideal gas heat capacity ratio [-]
Returns
-------
C : float
Coefficient `C` [-]
Notes
-----
If C cannot be established, assume a coefficient of 0.0239,
the highest value possible for C.
Although not dimensional, C varies with the units used.
If k is exactly equal to 1, the expression is undefined, and the formula
must be simplified as follows from an application of L'Hopital's rule.
.. math::
C = 0.03948\sqrt{\frac{1}{e}}
Examples
--------
From [1]_, checked with many points on Table 8.
>>> API520_C(1.35)
0.02669419967057233
References
----------
.. [1] API Standard 520, Part 1 - Sizing and Selection.
'''
if k != 1:
return 0.03948*sqrt(k*(2./(k+1.))**((k+1.)/(k-1.)))
else:
return 0.03948*sqrt(1./exp(1))
def API520_F2(k, P1, P2):
r'''Calculates coefficient F2 for subcritical flow for use in API 520
subcritical flow relief valve sizing.
.. math::
F_2 = \sqrt{\left(\frac{k}{k-1}\right)r^\frac{2}{k}
\left[\frac{1-r^\frac{k-1}{k}}{1-r}\right]}
.. math::
r = \frac{P_2}{P_1}
Parameters
----------
k : float
Isentropic coefficient or ideal gas heat capacity ratio [-]
P1 : float
Upstream relieving pressure; the set pressure plus the allowable
overpressure, plus atmospheric pressure, [Pa]
P2 : float
Built-up backpressure; the increase in pressure during flow at the
outlet of a pressure-relief device after it opens, [Pa]
Returns
-------
F2 : float
Subcritical flow coefficient `F2` [-]
Notes
-----
F2 is completely dimensionless.
Examples
--------
From [1]_ example 2, matches.
>>> API520_F2(1.8, 1E6, 7E5)
0.8600724121105563
References
----------
.. [1] API Standard 520, Part 1 - Sizing and Selection.
'''
r = P2/P1
return sqrt(k/(k-1)*r**(2./k) * ((1-r**((k-1.)/k))/(1.-r)))
def API520_Kv(Re):
r'''Calculates correction due to viscosity for liquid flow for use in
API 520 relief valve sizing.
.. math::
K_v = \left(0.9935 + \frac{2.878}{Re^{0.5}} + \frac{342.75}
{Re^{1.5}}\right)^{-1}
Parameters
----------
Re : float
Reynolds number for flow out the valve [-]
Returns
-------
Kv : float
Correction due to viscosity [-]
Notes
-----
Reynolds number in the standard is defined as follows, with Q in L/min, G1
as specific gravity, mu in centipoise, and area in mm^2:
.. math::
Re = \frac{Q(18800G_1)}{\mu \sqrt{A}}
It is unclear how this expression was derived with a constant of 18800;
the following code demonstrates what the constant should be:
>>> from scipy.constants import *
>>> liter/minute*1000./(0.001*(milli**2)**0.5)
16666.666666666668
Examples
--------
From [1]_, checked with example 5.
>>> API520_Kv(100)
0.6157445891444229
References
----------
.. [1] API Standard 520, Part 1 - Sizing and Selection.
'''
return (0.9935 + 2.878/sqrt(Re) + 342.75/Re**1.5)**-1.0
def API520_N(P1):
r'''Calculates correction due to steam pressure for steam flow for use in
API 520 relief valve sizing.
.. math::
K_N = \frac{0.02764P_1-1000}{0.03324P_1-1061}
Parameters
----------
P1 : float
Upstream relieving pressure; the set pressure plus the allowable
overpressure, plus atmospheric pressure, [Pa]
Returns
-------
KN : float
Correction due to steam temperature [-]
Notes
-----
Although not dimensional, KN varies with the units used.
For temperatures above 922 K or 22057 kPa, KN is not defined.
Internally, units of kPa are used to match the equation in the standard.
Examples
--------
Custom example:
>>> API520_N(1774700)
0.9490406958152466
References
----------
.. [1] API Standard 520, Part 1 - Sizing and Selection.
'''
P1 = P1/1000. # Pa to kPa
return (0.02764*P1-1000.)/(0.03324*P1-1061)
_KSH_psigs = [15, 20, 40, 60, 80, 100, 120, 140, 160, 180, 200, 220, 240, 260,
280, 300, 350, 400, 500, 600, 800, 1000, 1250, 1500, 1750, 2000,
2500, 3000]
_KSH_tempFs = [300, 400, 500, 600, 700, 800, 900, 1000, 1100, 1200]
# _KSH_psigs converted from psig to Pa
_KSH_Pa = [204746.3593975254, 239220.14586336722, 377115.29172673443,
515010.4375901016, 652905.5834534689, 790800.7293168361,
928695.8751802032, 1066591.0210435705, 1204486.1669069377,
1342381.312770305, 1480276.4586336722, 1618171.6044970395,
1756066.7503604065, 1893961.8962237737, 2031857.042087141,
2169752.187950508, 2514490.0526089263, 2859227.9172673444,
3548703.64658418, 4238179.375901016, 5617130.834534689,
6996082.29316836, 8719771.616460452, 10443460.939752541,
12167150.263044631, 13890839.58633672, 17338218.232920904,
20785596.879505083]
# _KSH_tempFs converted from F to K
_KSH_tempKs = [422.03888888888889, 477.59444444444443, 533.14999999999998,
588.70555555555552, 644.26111111111106, 699.81666666666661,
755.37222222222226, 810.92777777777769, 866.48333333333335,
922.03888888888889]
_KSH_factors = [[1, 0.98, 0.93, 0.88, 0.84, 0.8, 0.77, 0.74, 0.72, 0.7],
[1, 0.98, 0.93, 0.88, 0.84, 0.8, 0.77, 0.74, 0.72, 0.7],
[1, 0.99, 0.93, 0.88, 0.84, 0.81, 0.77, 0.74, 0.72, 0.7],
[1, 0.99, 0.93, 0.88, 0.84, 0.81, 0.77, 0.75, 0.72, 0.7],
[1, 0.99, 0.93, 0.88, 0.84, 0.81, 0.77, 0.75, 0.72, 0.7],
[1, 0.99, 0.94, 0.89, 0.84, 0.81, 0.77, 0.75, 0.72, 0.7],
[1, 0.99, 0.94, 0.89, 0.84, 0.81, 0.78, 0.75, 0.72, 0.7],
[1, 0.99, 0.94, 0.89, 0.85, 0.81, 0.78, 0.75, 0.72, 0.7],
[1, 0.99, 0.94, 0.89, 0.85, 0.81, 0.78, 0.75, 0.72, 0.7],
[1, 0.99, 0.94, 0.89, 0.85, 0.81, 0.78, 0.75, 0.72, 0.7],
[1, 0.99, 0.95, 0.89, 0.85, 0.81, 0.78, 0.75, 0.72, 0.7],
[1, 0.99, 0.95, 0.89, 0.85, 0.81, 0.78, 0.75, 0.72, 0.7],
[1, 1, 0.95, 0.9, 0.85, 0.81, 0.78, 0.75, 0.72, 0.7],
[1, 1, 0.95, 0.9, 0.85, 0.81, 0.78, 0.75, 0.72, 0.7],
[1, 1, 0.96, 0.9, 0.85, 0.81, 0.78, 0.75, 0.72, 0.7],
[1, 1, 0.96, 0.9, 0.85, 0.81, 0.78, 0.75, 0.72, 0.7],
[1, 1, 0.96, 0.9, 0.86, 0.82, 0.78, 0.75, 0.72, 0.7],
[1, 1, 0.96, 0.91, 0.86, 0.82, 0.78, 0.75, 0.72, 0.7],
[1, 1, 0.96, 0.92, 0.86, 0.82, 0.78, 0.75, 0.73, 0.7],
[1, 1, 0.97, 0.92, 0.87, 0.82, 0.79, 0.75, 0.73, 0.7],
[1, 1, 1, 0.95, 0.88, 0.83, 0.79, 0.76, 0.73, 0.7],
[1, 1, 1, 0.96, 0.89, 0.84, 0.78, 0.76, 0.73, 0.71],
[1, 1, 1, 0.97, 0.91, 0.85, 0.8, 0.77, 0.74, 0.71],
[1, 1, 1, 1, 0.93, 0.86, 0.81, 0.77, 0.74, 0.71],
[1, 1, 1, 1, 0.94, 0.86, 0.81, 0.77, 0.73, 0.7],
[1, 1, 1, 1, 0.95, 0.86, 0.8, 0.76, 0.72, 0.69],
[1, 1, 1, 1, 0.95, 0.85, 0.78, 0.73, 0.69, 0.66],
[1, 1, 1, 1, 1, 0.82, 0.74, 0.69, 0.65, 0.62]]
API520_KSH_tck = tck_interp2d_linear(_KSH_tempKs, _KSH_Pa, _KSH_factors)
def API520_SH(T1, P1):
r'''Calculates correction due to steam superheat for steam flow for use in
API 520 relief valve sizing. 2D interpolation among a table with 28
pressures and 10 temperatures is performed.
Parameters
----------
T1 : float
Temperature of the fluid entering the valve [K]
P1 : float
Upstream relieving pressure; the set pressure plus the allowable
overpressure, plus atmospheric pressure, [Pa]
Returns
-------
KSH : float
Correction due to steam superheat [-]
Notes
-----
For P above 20679 kPag, use the critical flow model.
Superheat cannot be above 649 degrees Celsius.
If T1 is above 149 degrees Celsius, returns 1.
Examples
--------
Custom example from table 9:
>>> API520_SH(593+273.15, 1066.325E3)
0.7201800000000002
References
----------
.. [1] API Standard 520, Part 1 - Sizing and Selection.
'''
if P1 > 20780325.0: # 20679E3+atm
raise ValueError('For P above 20679 kPag, use the critical flow model')
if T1 > 922.15:
raise ValueError('Superheat cannot be above 649 degrees Celcius')
if T1 < 422.15:
return 1. # No superheat under 15 psig
return float(bisplev(T1, P1, API520_KSH_tck))
# Kw, for liquids. Applicable for all overpressures.
Kw_x = [15., 16.5493, 17.3367, 18.124, 18.8235, 19.5231, 20.1351, 20.8344,
21.4463, 22.0581, 22.9321, 23.5439, 24.1556, 24.7674, 25.0296, 25.6414,
26.2533, 26.8651, 27.7393, 28.3511, 28.9629, 29.6623, 29.9245, 30.5363,
31.2357, 31.8475, 32.7217, 33.3336, 34.0329, 34.6448, 34.8196, 35.4315,
36.1308, 36.7428, 37.7042, 38.3162, 39.0154, 39.7148, 40.3266, 40.9384,
41.6378, 42.7742, 43.386, 43.9978, 44.6098, 45.2216, 45.921, 46.5329,
47.7567, 48.3685, 49.0679, 49.6797, 50.]
Kw_y = [1, 0.996283, 0.992565, 0.987918, 0.982342, 0.976766, 0.97119, 0.964684,
0.958178, 0.951673, 0.942379, 0.935874, 0.928439, 0.921933, 0.919145,
0.912639, 0.906134, 0.899628, 0.891264, 0.884758, 0.878253, 0.871747,
0.868959, 0.862454, 0.855948, 0.849442, 0.841078, 0.834572, 0.828067,
0.821561, 0.819703, 0.814126, 0.806691, 0.801115, 0.790892, 0.785316,
0.777881, 0.771375, 0.76487, 0.758364, 0.751859, 0.740706, 0.734201,
0.727695, 0.722119, 0.715613, 0.709108, 0.702602, 0.69052, 0.684015,
0.677509, 0.671004, 0.666357]
def API520_W(Pset, Pback):
r'''Calculates capacity correction due to backpressure on balanced
spring-loaded PRVs in liquid service. For pilot operated valves,
this is always 1. Applicable up to 50% of the percent gauge backpressure,
For use in API 520 relief valve sizing. 1D interpolation among a table with
53 backpressures is performed.
Parameters
----------
Pset : float
Set pressure for relief [Pa]
Pback : float
Backpressure, [Pa]
Returns
-------
KW : float
Correction due to liquid backpressure [-]
Notes
-----
If the calculated gauge backpressure is less than 15%, a value of 1 is
returned.
Examples
--------
Custom example from figure 31:
>>> API520_W(1E6, 3E5) # 22% overpressure
0.9511471848008564
References
----------
.. [1] API Standard 520, Part 1 - Sizing and Selection.
'''
gauge_backpressure = (Pback-atm)/(Pset-atm)*100.0 # in percent
if gauge_backpressure < 15.0:
return 1.0
return interp(gauge_backpressure, Kw_x, Kw_y)
# Kb Backpressure correction factor, for gases
Kb_16_over_x = [37.6478, 38.1735, 38.6991, 39.2904, 39.8817, 40.4731, 40.9987,
41.59, 42.1156, 42.707, 43.2326, 43.8239, 44.4152, 44.9409,
45.5322, 46.0578, 46.6491, 47.2405, 47.7661, 48.3574, 48.883,
49.4744, 50.0]
Kb_16_over_y = [0.998106, 0.994318, 0.99053, 0.985795, 0.982008, 0.97822,
0.973485, 0.96875, 0.964962, 0.961174, 0.956439, 0.951705,
0.947917, 0.943182, 0.939394, 0.935606, 0.930871, 0.926136,
0.921402, 0.918561, 0.913826, 0.910038, 0.90625]
Kb_10_over_x = [30.0263, 30.6176, 31.1432, 31.6689, 32.1945, 32.6544, 33.18,
33.7057, 34.1656, 34.6255, 35.0854, 35.5453, 36.0053, 36.4652,
36.9251, 37.385, 37.8449, 38.2392, 38.6334, 39.0276, 39.4875,
39.9474, 40.4074, 40.8016, 41.1958, 41.59, 42.0499, 42.4442,
42.8384, 43.2326, 43.6925, 44.0867, 44.4809, 44.8752, 45.2694,
45.6636, 46.0578, 46.452, 46.8463, 47.2405, 47.6347, 48.0289,
48.4231, 48.883, 49.2773, 49.6715]
Kb_10_over_y = [0.998106, 0.995265, 0.99053, 0.985795, 0.981061, 0.975379,
0.969697, 0.963068, 0.957386, 0.950758, 0.945076, 0.938447,
0.930871, 0.925189, 0.918561, 0.910985, 0.904356, 0.897727,
0.891098, 0.883523, 0.876894, 0.870265, 0.862689, 0.856061,
0.848485, 0.840909, 0.83428, 0.827652, 0.820076, 0.8125,
0.805871, 0.798295, 0.79072, 0.783144, 0.775568, 0.768939,
0.762311, 0.754735, 0.747159, 0.739583, 0.732008, 0.724432,
0.716856, 0.70928, 0.701705, 0.695076]
def API520_B(Pset, Pback, overpressure=0.1):
r'''Calculates capacity correction due to backpressure on balanced
spring-loaded PRVs in vapor service. For pilot operated valves,
this is always 1. Applicable up to 50% of the percent gauge backpressure,
For use in API 520 relief valve sizing. 1D interpolation among a table with
53 backpressures is performed.
Parameters
----------
Pset : float
Set pressure for relief [Pa]
Pback : float
Backpressure, [Pa]
overpressure : float, optional
The maximum fraction overpressure; one of 0.1, 0.16, or 0.21, [-]
Returns
-------
Kb : float
Correction due to vapor backpressure [-]
Notes
-----
If the calculated gauge backpressure is less than 30%, 38%, or 50% for
overpressures of 0.1, 0.16, or 0.21, a value of 1 is returned.
Percent gauge backpressure must be under 50%.
Examples
--------
Custom examples from figure 30:
>>> API520_B(1E6, 5E5)
0.7929945420944432
References
----------
.. [1] API Standard 520, Part 1 - Sizing and Selection.
'''
gauge_backpressure = (Pback-atm)/(Pset-atm)*100.0 # in percent
if overpressure not in (0.1, 0.16, 0.21):
raise ValueError('Only overpressure of 10%, 16%, or 21% are permitted')
if (overpressure == 0.1 and gauge_backpressure < 30.0) or (
overpressure == 0.16 and gauge_backpressure < 38.0) or (
overpressure == 0.21 and gauge_backpressure < 50.0):
return 1.0
elif gauge_backpressure > 50.0:
raise ValueError('Gauge pressure must be < 50%')
if overpressure == 0.16:
Kb = interp(gauge_backpressure, Kb_16_over_x, Kb_16_over_y)
elif overpressure == 0.1:
Kb = interp(gauge_backpressure, Kb_10_over_x, Kb_10_over_y)
return Kb
def API520_A_g(m, T, Z, MW, k, P1, P2=101325, Kd=0.975, Kb=1, Kc=1):
r'''Calculates required relief valve area for an API 520 valve passing
a gas or a vapor, at either critical or sub-critical flow.
For critical flow:
.. math::
A = \frac{m}{CK_dP_1K_bK_c}\sqrt{\frac{TZ}{M}}
For sub-critical flow:
.. math::
A = \frac{17.9m}{F_2K_dK_c}\sqrt{\frac{TZ}{MP_1(P_1-P_2)}}
Parameters
----------
m : float
Mass flow rate of vapor through the valve, [kg/s]
T : float
Temperature of vapor entering the valve, [K]
Z : float
Compressibility factor of the vapor, [-]
MW : float
Molecular weight of the vapor, [g/mol]
k : float
Isentropic coefficient or ideal gas heat capacity ratio [-]
P1 : float
Upstream relieving pressure; the set pressure plus the allowable
overpressure, plus atmospheric pressure, [Pa]
P2 : float, optional
Built-up backpressure; the increase in pressure during flow at the
outlet of a pressure-relief device after it opens, [Pa]
Kd : float, optional
The effective coefficient of discharge, from the manufacturer or for
preliminary sizing, using 0.975 normally or 0.62 when used with a
rupture disc as described in [1]_, []
Kb : float, optional
Correction due to vapor backpressure [-]
Kc : float, optional
Combination correction factor for installation with a ruture disk
upstream of the PRV, []
Returns
-------
A : float
Minimum area for relief valve according to [1]_, [m^2]
Notes
-----
Units are interlally kg/hr, kPa, and mm^2 to match [1]_.
Examples
--------
Example 1 from [1]_ for critical flow, matches:
>>> API520_A_g(m=24270/3600., T=348., Z=0.90, MW=51., k=1.11, P1=670E3, Kb=1, Kc=1)
0.0036990460646834414
Example 2 from [1]_ for sub-critical flow, matches:
>>> API520_A_g(m=24270/3600., T=348., Z=0.90, MW=51., k=1.11, P1=670E3, P2=532E3, Kd=0.975, Kb=1, Kc=1)
0.004248358775943481
The mass flux in (kg/(s*m^2)) can be found by dividing the specified mass
flow by the calculated area:
>>> (24270/3600.)/API520_A_g(m=24270/3600., T=348., Z=0.90, MW=51., k=1.11, P1=670E3, Kb=1, Kc=1)
1822.541960488834
References
----------
.. [1] API Standard 520, Part 1 - Sizing and Selection.
'''
P1, P2 = P1/1000., P2/1000. # Pa to Kpa in the standard
m = m*3600. # kg/s to kg/hr
if is_critical_flow(P1, P2, k):
C = API520_C(k)
A = m/(C*Kd*Kb*Kc*P1)*sqrt(T*Z/MW)
else:
F2 = API520_F2(k, P1, P2)
A = 17.9*m/(F2*Kd*Kc)*sqrt(T*Z/(MW*P1*(P1-P2)))
return A*0.001**2 # convert mm^2 to m^2
def API520_A_steam(m, T, P1, Kd=0.975, Kb=1, Kc=1):
r'''Calculates required relief valve area for an API 520 valve passing
a steam, at either saturation or superheat but not partially condensed.
.. math::
A = \frac{190.5m}{P_1 K_d K_b K_c K_N K_{SH}}
Parameters
----------
m : float
Mass flow rate of steam through the valve, [kg/s]
T : float
Temperature of steam entering the valve, [K]
P1 : float
Upstream relieving pressure; the set pressure plus the allowable
overpressure, plus atmospheric pressure, [Pa]
Kd : float, optional
The effective coefficient of discharge, from the manufacturer or for
preliminary sizing, using 0.975 normally or 0.62 when used with a
rupture disc as described in [1]_, []
Kb : float, optional
Correction due to vapor backpressure [-]
Kc : float, optional
Combination correction factor for installation with a rupture disk
upstream of the PRV, []
Returns
-------
A : float
Minimum area for relief valve according to [1]_, [m^2]
Notes
-----
Units are interlally kg/hr, kPa, and mm^2 to match [1]_.
With the provided temperature and pressure, the KN coefficient is
calculated with the function API520_N; as is the superheat correction KSH,
with the function API520_SH.
Examples
--------
Example 4 from [1]_, matches:
>>> API520_A_steam(m=69615/3600., T=592.5, P1=12236E3, Kd=0.975, Kb=1, Kc=1)
0.0011034712423692733
References
----------
.. [1] API Standard 520, Part 1 - Sizing and Selection.
'''
KN = API520_N(P1)
KSH = API520_SH(T, P1)
P1 = P1/1000. # Pa to kPa
m = m*3600. # kg/s to kg/hr
A = 190.5*m/(P1*Kd*Kb*Kc*KN*KSH)
return A*0.001**2 # convert mm^2 to m^2
| mit | -2,173,164,278,899,110,400 | 32.722467 | 107 | 0.610886 | false |
untrustbank/litecoin | test/functional/test_framework/test_node.py | 1 | 10982 | #!/usr/bin/env python3
# Copyright (c) 2017 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Class for bitcoind node under test"""
import decimal
import errno
import http.client
import json
import logging
import os
import re
import subprocess
import time
from .authproxy import JSONRPCException
from .util import (
assert_equal,
delete_cookie_file,
get_rpc_proxy,
rpc_url,
wait_until,
p2p_port,
)
# For Python 3.4 compatibility
JSONDecodeError = getattr(json, "JSONDecodeError", ValueError)
BITCOIND_PROC_WAIT_TIMEOUT = 60
class TestNode():
"""A class for representing a bitcoind node under test.
This class contains:
- state about the node (whether it's running, etc)
- a Python subprocess.Popen object representing the running process
- an RPC connection to the node
- one or more P2P connections to the node
To make things easier for the test writer, any unrecognised messages will
be dispatched to the RPC connection."""
def __init__(self, i, dirname, extra_args, rpchost, timewait, binary, stderr, mocktime, coverage_dir, use_cli=False):
self.index = i
self.datadir = os.path.join(dirname, "node" + str(i))
self.rpchost = rpchost
if timewait:
self.rpc_timeout = timewait
else:
# Wait for up to 60 seconds for the RPC server to respond
self.rpc_timeout = 60
if binary is None:
self.binary = os.getenv("LITECOIND", "litecoind")
else:
self.binary = binary
self.stderr = stderr
self.coverage_dir = coverage_dir
# Most callers will just need to add extra args to the standard list below. For those callers that need more flexibity, they can just set the args property directly.
self.extra_args = extra_args
self.args = [self.binary, "-datadir=" + self.datadir, "-server", "-keypool=1", "-discover=0", "-rest", "-logtimemicros", "-debug", "-debugexclude=libevent", "-debugexclude=leveldb", "-mocktime=" + str(mocktime), "-uacomment=testnode%d" % i]
self.cli = TestNodeCLI(os.getenv("LITECOINCLI", "litecoin-cli"), self.datadir)
self.use_cli = use_cli
self.running = False
self.process = None
self.rpc_connected = False
self.rpc = None
self.url = None
self.log = logging.getLogger('TestFramework.node%d' % i)
self.cleanup_on_exit = True # Whether to kill the node when this object goes away
self.p2ps = []
def __del__(self):
# Ensure that we don't leave any bitcoind processes lying around after
# the test ends
if self.process and self.cleanup_on_exit:
# Should only happen on test failure
# Avoid using logger, as that may have already been shutdown when
# this destructor is called.
print("Cleaning up leftover process")
self.process.kill()
def __getattr__(self, name):
"""Dispatches any unrecognised messages to the RPC connection or a CLI instance."""
if self.use_cli:
return getattr(self.cli, name)
else:
assert self.rpc_connected and self.rpc is not None, "Error: no RPC connection"
return getattr(self.rpc, name)
def start(self, extra_args=None, stderr=None, *args, **kwargs):
"""Start the node."""
if extra_args is None:
extra_args = self.extra_args
if stderr is None:
stderr = self.stderr
# Delete any existing cookie file -- if such a file exists (eg due to
# unclean shutdown), it will get overwritten anyway by bitcoind, and
# potentially interfere with our attempt to authenticate
delete_cookie_file(self.datadir)
self.process = subprocess.Popen(self.args + extra_args, stderr=stderr, *args, **kwargs)
self.running = True
self.log.debug("litecoind started, waiting for RPC to come up")
def wait_for_rpc_connection(self):
"""Sets up an RPC connection to the bitcoind process. Returns False if unable to connect."""
# Poll at a rate of four times per second
poll_per_s = 4
for _ in range(poll_per_s * self.rpc_timeout):
assert self.process.poll() is None, "litecoind exited with status %i during initialization" % self.process.returncode
try:
self.rpc = get_rpc_proxy(rpc_url(self.datadir, self.index, self.rpchost), self.index, timeout=self.rpc_timeout, coveragedir=self.coverage_dir)
self.rpc.getblockcount()
# If the call to getblockcount() succeeds then the RPC connection is up
self.rpc_connected = True
self.url = self.rpc.url
self.log.debug("RPC successfully started")
return
except IOError as e:
if e.errno != errno.ECONNREFUSED: # Port not yet open?
raise # unknown IO error
except JSONRPCException as e: # Initialization phase
if e.error['code'] != -28: # RPC in warmup?
raise # unknown JSON RPC exception
except ValueError as e: # cookie file not found and no rpcuser or rpcassword. bitcoind still starting
if "No RPC credentials" not in str(e):
raise
time.sleep(1.0 / poll_per_s)
raise AssertionError("Unable to connect to litecoind")
def get_wallet_rpc(self, wallet_name):
if self.use_cli:
return self.cli("-rpcwallet={}".format(wallet_name))
else:
assert self.rpc_connected
assert self.rpc
wallet_path = "wallet/%s" % wallet_name
return self.rpc / wallet_path
def stop_node(self):
"""Stop the node."""
if not self.running:
return
self.log.debug("Stopping node")
try:
self.stop()
except http.client.CannotSendRequest:
self.log.exception("Unable to stop node.")
del self.p2ps[:]
def is_node_stopped(self):
"""Checks whether the node has stopped.
Returns True if the node has stopped. False otherwise.
This method is responsible for freeing resources (self.process)."""
if not self.running:
return True
return_code = self.process.poll()
if return_code is None:
return False
# process has stopped. Assert that it didn't return an error code.
assert_equal(return_code, 0)
self.running = False
self.process = None
self.rpc_connected = False
self.rpc = None
self.log.debug("Node stopped")
return True
def wait_until_stopped(self, timeout=BITCOIND_PROC_WAIT_TIMEOUT):
wait_until(self.is_node_stopped, timeout=timeout)
def node_encrypt_wallet(self, passphrase):
""""Encrypts the wallet.
This causes bitcoind to shutdown, so this method takes
care of cleaning up resources."""
self.encryptwallet(passphrase)
self.wait_until_stopped()
def add_p2p_connection(self, p2p_conn, *args, **kwargs):
"""Add a p2p connection to the node.
This method adds the p2p connection to the self.p2ps list and also
returns the connection to the caller."""
if 'dstport' not in kwargs:
kwargs['dstport'] = p2p_port(self.index)
if 'dstaddr' not in kwargs:
kwargs['dstaddr'] = '127.0.0.1'
p2p_conn.peer_connect(*args, **kwargs)
self.p2ps.append(p2p_conn)
return p2p_conn
@property
def p2p(self):
"""Return the first p2p connection
Convenience property - most tests only use a single p2p connection to each
node, so this saves having to write node.p2ps[0] many times."""
assert self.p2ps, "No p2p connection"
return self.p2ps[0]
def disconnect_p2ps(self):
"""Close all p2p connections to the node."""
for p in self.p2ps:
p.peer_disconnect()
del self.p2ps[:]
class TestNodeCLIAttr:
def __init__(self, cli, command):
self.cli = cli
self.command = command
def __call__(self, *args, **kwargs):
return self.cli.send_cli(self.command, *args, **kwargs)
def get_request(self, *args, **kwargs):
return lambda: self(*args, **kwargs)
class TestNodeCLI():
"""Interface to bitcoin-cli for an individual node"""
def __init__(self, binary, datadir):
self.options = []
self.binary = binary
self.datadir = datadir
self.input = None
self.log = logging.getLogger('TestFramework.bitcoincli')
def __call__(self, *options, input=None):
# TestNodeCLI is callable with bitcoin-cli command-line options
cli = TestNodeCLI(self.binary, self.datadir)
cli.options = [str(o) for o in options]
cli.input = input
return cli
def __getattr__(self, command):
return TestNodeCLIAttr(self, command)
def batch(self, requests):
results = []
for request in requests:
try:
results.append(dict(result=request()))
except JSONRPCException as e:
results.append(dict(error=e))
return results
def send_cli(self, command=None, *args, **kwargs):
"""Run bitcoin-cli command. Deserializes returned string as python object."""
pos_args = [str(arg) for arg in args]
named_args = [str(key) + "=" + str(value) for (key, value) in kwargs.items()]
assert not (pos_args and named_args), "Cannot use positional arguments and named arguments in the same bitcoin-cli call"
p_args = [self.binary, "-datadir=" + self.datadir] + self.options
if named_args:
p_args += ["-named"]
if command is not None:
p_args += [command]
p_args += pos_args + named_args
self.log.debug("Running litecoin-cli command: %s" % command)
process = subprocess.Popen(p_args, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True)
cli_stdout, cli_stderr = process.communicate(input=self.input)
returncode = process.poll()
if returncode:
match = re.match(r'error code: ([-0-9]+)\nerror message:\n(.*)', cli_stderr)
if match:
code, message = match.groups()
raise JSONRPCException(dict(code=int(code), message=message))
# Ignore cli_stdout, raise with cli_stderr
raise subprocess.CalledProcessError(returncode, self.binary, output=cli_stderr)
try:
return json.loads(cli_stdout, parse_float=decimal.Decimal)
except JSONDecodeError:
return cli_stdout.rstrip("\n")
| mit | 5,765,723,632,740,926,000 | 37.805654 | 248 | 0.616099 | false |
fluidinfo/fom | tests/test_errors.py | 1 | 2470 |
import unittest
from fom.api import FluidApi
from fom.errors import (
Fluid400Error,
Fluid401Error,
Fluid404Error,
Fluid406Error,
Fluid412Error,
Fluid413Error,
Fluid500Error,
)
from _base import FakeFluidDB
class ErrorTest(unittest.TestCase):
def setUp(self):
self.db = FakeFluidDB()
self.api = FluidApi(self.db)
def test400(self):
self.db.add_resp(400, 'application/json', 'Not Found')
self.assertRaises(Fluid400Error,
self.api.namespaces['test'].delete)
def test401(self):
self.db.add_resp(401, 'text/plain', 'Unauthorized')
self.assertRaises(Fluid401Error,
self.api.namespaces['test'].delete)
def test404(self):
self.db.add_resp(404, 'text/plain', 'Not Found')
self.assertRaises(Fluid404Error,
self.api.namespaces['test'].delete)
def test406(self):
self.db.add_resp(406, 'text/plain', 'Not Acceptable')
self.assertRaises(Fluid406Error,
self.api.namespaces['test'].delete)
def test412(self):
self.db.add_resp(412, 'text/plain', 'Precondition Failed')
self.assertRaises(Fluid412Error,
self.api.namespaces['test'].delete)
def test413(self):
self.db.add_resp(413, 'text/plain', 'Request Entity Too Large')
self.assertRaises(Fluid413Error,
self.api.namespaces['test'].delete)
def test500(self):
self.db.add_resp(500, 'text/plain', 'Internal Server Error')
self.assertRaises(Fluid500Error,
self.api.namespaces['test'].delete)
def testErrorObject(self):
"""
Ensures that the exception object has the correct attributes.
"""
class FakeResponse(object):
"""
Mock class.
"""
def __init__(self, status, error, request_id):
self.status = status
self.error = error
self.request_id = request_id
fake_response = FakeResponse('500', 'Server Error', '12345')
err = Fluid500Error(fake_response)
self.assertEqual('500', err.status)
self.assertEqual('Server Error', err.fluid_error)
self.assertEqual('12345', err.request_id)
self.assertEqual(fake_response, err.response)
if __name__ == '__main__':
unittest.main()
| mit | 6,300,827,892,538,906,000 | 28.759036 | 71 | 0.583401 | false |
rueckstiess/jiratopic | onlineldavb/onlineldavb.py | 1 | 11963 | # onlineldavb.py: Package of functions for fitting Latent Dirichlet
# Allocation (LDA) with online variational Bayes (VB).
#
# Copyright (C) 2010 Matthew D. Hoffman
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import sys, re, time, string
import numpy as n
from scipy.special import gammaln, psi
n.random.seed(100000001)
meanchangethresh = 0.001
def dirichlet_expectation(alpha):
"""
For a vector theta ~ Dir(alpha), computes E[log(theta)] given alpha.
"""
if (len(alpha.shape) == 1):
return(psi(alpha) - psi(n.sum(alpha)))
return(psi(alpha) - psi(n.sum(alpha, 1))[:, n.newaxis])
def parse_doc_list(docs, vocab):
"""
Parse a document into a list of word ids and a list of counts,
or parse a set of documents into two lists of lists of word ids
and counts.
Arguments:
docs: List of D documents. Each document must be represented as
a single string. (Word order is unimportant.) Any
words not in the vocabulary will be ignored.
vocab: Dictionary mapping from words to integer ids.
Returns a pair of lists of lists.
The first, wordids, says what vocabulary tokens are present in
each document. wordids[i][j] gives the jth unique token present in
document i. (Don't count on these tokens being in any particular
order.)
The second, wordcts, says how many times each vocabulary token is
present. wordcts[i][j] is the number of times that the token given
by wordids[i][j] appears in document i.
"""
if (type(docs).__name__ == 'str'):
temp = list()
temp.append(docs)
docs = temp
D = len(docs)
wordids = list()
wordcts = list()
for d in range(0, D):
docs[d] = docs[d].lower()
docs[d] = re.sub(r'-', ' ', docs[d])
docs[d] = re.sub(r'[^a-z ]', '', docs[d])
docs[d] = re.sub(r' +', ' ', docs[d])
words = string.split(docs[d])
ddict = dict()
for word in words:
if (word in vocab):
wordtoken = vocab[word]
if (not wordtoken in ddict):
ddict[wordtoken] = 0
ddict[wordtoken] += 1
wordids.append(ddict.keys())
wordcts.append(ddict.values())
return((wordids, wordcts))
class OnlineLDA:
"""
Implements online VB for LDA as described in (Hoffman et al. 2010).
"""
def __init__(self, vocab, K, D, alpha, eta, tau0, kappa, init_lambda=None):
"""
Arguments:
K: Number of topics
vocab: A set of words to recognize. When analyzing documents, any word
not in this set will be ignored.
D: Total number of documents in the population. For a fixed corpus,
this is the size of the corpus. In the truly online setting, this
can be an estimate of the maximum number of documents that
could ever be seen.
alpha: Hyperparameter for prior on weight vectors theta
eta: Hyperparameter for prior on topics beta
tau0: A (positive) learning parameter that downweights early iterations
kappa: Learning rate: exponential decay rate---should be between
(0.5, 1.0] to guarantee asymptotic convergence.
Note that if you pass the same set of D documents in every time and
set kappa=0 this class can also be used to do batch VB.
"""
self._vocab = dict()
for word in vocab:
word = word.lower()
word = re.sub(r'[^a-z]', '', word)
self._vocab[word] = len(self._vocab)
self._K = K
self._W = len(self._vocab)
self._D = D
self._alpha = alpha
self._eta = eta
self._tau0 = tau0 + 1
self._kappa = kappa
self._updatect = 0
# Initialize the variational distribution q(beta|lambda)
if init_lambda != None:
self._lambda = init_lambda
else:
self._lambda = 1*n.random.gamma(100., 1./100., (self._K, self._W))
self._Elogbeta = dirichlet_expectation(self._lambda)
self._expElogbeta = n.exp(self._Elogbeta)
def do_e_step(self, docs):
"""
Given a mini-batch of documents, estimates the parameters
gamma controlling the variational distribution over the topic
weights for each document in the mini-batch.
Arguments:
docs: List of D documents. Each document must be represented
as a string. (Word order is unimportant.) Any
words not in the vocabulary will be ignored.
Returns a tuple containing the estimated values of gamma,
as well as sufficient statistics needed to update lambda.
"""
# This is to handle the case where someone just hands us a single
# document, not in a list.
if (type(docs).__name__ == 'string'):
temp = list()
temp.append(docs)
docs = temp
(wordids, wordcts) = parse_doc_list(docs, self._vocab)
batchD = len(docs)
# Initialize the variational distribution q(theta|gamma) for
# the mini-batch
gamma = 1*n.random.gamma(100., 1./100., (batchD, self._K))
Elogtheta = dirichlet_expectation(gamma)
expElogtheta = n.exp(Elogtheta)
sstats = n.zeros(self._lambda.shape)
# Now, for each document d update that document's gamma and phi
it = 0
meanchange = 0
for d in range(0, batchD):
# These are mostly just shorthand (but might help cache locality)
ids = wordids[d]
cts = wordcts[d]
gammad = gamma[d, :]
Elogthetad = Elogtheta[d, :]
expElogthetad = expElogtheta[d, :]
expElogbetad = self._expElogbeta[:, ids]
# The optimal phi_{dwk} is proportional to
# expElogthetad_k * expElogbetad_w. phinorm is the normalizer.
phinorm = n.dot(expElogthetad, expElogbetad) + 1e-100
# Iterate between gamma and phi until convergence
for it in range(0, 100):
lastgamma = gammad
# We represent phi implicitly to save memory and time.
# Substituting the value of the optimal phi back into
# the update for gamma gives this update. Cf. Lee&Seung 2001.
gammad = self._alpha + expElogthetad * \
n.dot(cts / phinorm, expElogbetad.T)
Elogthetad = dirichlet_expectation(gammad)
expElogthetad = n.exp(Elogthetad)
phinorm = n.dot(expElogthetad, expElogbetad) + 1e-100
# If gamma hasn't changed much, we're done.
meanchange = n.mean(abs(gammad - lastgamma))
if (meanchange < meanchangethresh):
break
gamma[d, :] = gammad
# Contribution of document d to the expected sufficient
# statistics for the M step.
sstats[:, ids] += n.outer(expElogthetad.T, cts/phinorm)
# This step finishes computing the sufficient statistics for the
# M step, so that
# sstats[k, w] = \sum_d n_{dw} * phi_{dwk}
# = \sum_d n_{dw} * exp{Elogtheta_{dk} + Elogbeta_{kw}} / phinorm_{dw}.
sstats = sstats * self._expElogbeta
return((gamma, sstats))
def update_lambda(self, docs):
"""
First does an E step on the mini-batch given in wordids and
wordcts, then uses the result of that E step to update the
variational parameter matrix lambda.
Arguments:
docs: List of D documents. Each document must be represented
as a string. (Word order is unimportant.) Any
words not in the vocabulary will be ignored.
Returns gamma, the parameters to the variational distribution
over the topic weights theta for the documents analyzed in this
update.
Also returns an estimate of the variational bound for the
entire corpus for the OLD setting of lambda based on the
documents passed in. This can be used as a (possibly very
noisy) estimate of held-out likelihood.
"""
# rhot will be between 0 and 1, and says how much to weight
# the information we got from this mini-batch.
rhot = pow(self._tau0 + self._updatect, -self._kappa)
self._rhot = rhot
# Do an E step to update gamma, phi | lambda for this
# mini-batch. This also returns the information about phi that
# we need to update lambda.
(gamma, sstats) = self.do_e_step(docs)
# Estimate held-out likelihood for current values of lambda.
bound = self.approx_bound(docs, gamma)
# Update lambda based on documents.
self._lambda = self._lambda * (1-rhot) + \
rhot * (self._eta + self._D * sstats / len(docs))
self._Elogbeta = dirichlet_expectation(self._lambda)
self._expElogbeta = n.exp(self._Elogbeta)
self._updatect += 1
return(gamma, bound)
def approx_bound(self, docs, gamma):
"""
Estimates the variational bound over *all documents* using only
the documents passed in as "docs." gamma is the set of parameters
to the variational distribution q(theta) corresponding to the
set of documents passed in.
The output of this function is going to be noisy, but can be
useful for assessing convergence.
"""
# This is to handle the case where someone just hands us a single
# document, not in a list.
if (type(docs).__name__ == 'string'):
temp = list()
temp.append(docs)
docs = temp
(wordids, wordcts) = parse_doc_list(docs, self._vocab)
batchD = len(docs)
score = 0
Elogtheta = dirichlet_expectation(gamma)
expElogtheta = n.exp(Elogtheta)
# E[log p(docs | theta, beta)]
for d in range(0, batchD):
gammad = gamma[d, :]
ids = wordids[d]
cts = n.array(wordcts[d])
phinorm = n.zeros(len(ids))
for i in range(0, len(ids)):
temp = Elogtheta[d, :] + self._Elogbeta[:, ids[i]]
tmax = max(temp)
phinorm[i] = n.log(sum(n.exp(temp - tmax))) + tmax
score += n.sum(cts * phinorm)
# oldphinorm = phinorm
# phinorm = n.dot(expElogtheta[d, :], self._expElogbeta[:, ids])
# print oldphinorm
# print n.log(phinorm)
# score += n.sum(cts * n.log(phinorm))
# E[log p(theta | alpha) - log q(theta | gamma)]
score += n.sum((self._alpha - gamma)*Elogtheta)
score += n.sum(gammaln(gamma) - gammaln(self._alpha))
score += sum(gammaln(self._alpha*self._K) - gammaln(n.sum(gamma, 1)))
# Compensate for the subsampling of the population of documents
score = score * self._D / len(docs)
# E[log p(beta | eta) - log q (beta | lambda)]
score = score + n.sum((self._eta-self._lambda)*self._Elogbeta)
score = score + n.sum(gammaln(self._lambda) - gammaln(self._eta))
score = score + n.sum(gammaln(self._eta*self._W) -
gammaln(n.sum(self._lambda, 1)))
return(score)
| apache-2.0 | 4,364,099,052,186,242,600 | 38.481848 | 79 | 0.593413 | false |
kfricke/micropython-esp8266uart | test_esp8266uart.py | 1 | 2523 | import esp8266uart
esp = esp8266uart.ESP8266(1, 115200)
print('Testing generic methods')
print('=======================')
print('AT startup...')
if esp.test():
print('Success!')
else:
print('Failed!')
#print('Soft-Reset...')
#if esp.reset():
# print('Success!')
#else:
# print('Failed!')
print('Another AT startup...')
if esp.test():
print('Success!')
else:
print('Failed!')
print()
print('Testing WIFI methods')
print('====================')
wifi_mode = 1
print("Testing get_mode/set_mode of value '%s'(%i)..." % (esp8266uart.WIFI_MODES[wifi_mode], wifi_mode))
esp.set_mode(wifi_mode)
if esp.get_mode() == wifi_mode:
print('Success!')
else:
print('Failed!')
print('Disconnecting from WLAN...')
if esp.disconnect():
print('Success!')
else:
print('Failed!')
print('Disconnecting from WLAN again...')
if esp.disconnect():
print('Success!')
else:
print('Failed!')
print('Checking if not connected WLAN...')
if esp.get_accesspoint() == None:
print('Success!')
else:
print('Failed!')
print('Scanning for WLANs...')
wlans = esp.list_all_accesspoints()
for wlan in wlans:
print(wlan)
print("Scanning for WLAN '%s'..." % (wlan['ssid']))
for wlan2 in esp.list_accesspoints(wlan['ssid']):
print(wlan2)
print('Setting access point mode...')
if esp.set_mode(esp8266uart.WIFI_MODES['Access Point + Station']):
print('Failed!')
else:
print('Success!')
print('Reading access point configuration')
print(esp.get_accesspoint_config())
print('Listing all stations connected to the module in access point mode...')
print(esp.list_stations())
print('Checking DHCP client and server settings...')
for mode in range(3):
print(esp.set_dhcp_config(mode, 0))
print(esp.set_dhcp_config(mode, 1))
print(esp.set_dhcp_config(mode, True))
print(esp.set_dhcp_config(mode, False))
try:
print(esp.set_dhcp_config(0, 2))
except esp8266uart.CommandError:
print('Obvious error caught!')
try:
print(esp.set_dhcp_config(4, 1))
except esp8266uart.CommandError:
print('Obvious error caught!')
print('Setting autoconnect to access point in station mode...')
esp.set_autoconnect(True)
esp.set_autoconnect(False)
esp.set_autoconnect(True)
print('Reading and setting the station IP...')
print(esp.get_station_ip())
esp.set_station_ip('192.168.1.10')
print(esp.get_station_ip())
print('Reading and setting the access point IP...')
print(esp.get_accesspoint_ip())
esp.set_accesspoint_ip('192.168.1.1')
print(esp.get_accesspoint_ip()) | mit | -2,034,658,239,474,637,800 | 23.269231 | 104 | 0.663892 | false |
realzzt/BitCoin2013 | qa/rpc-tests/rpcbind_test.py | 1 | 4774 | #!/usr/bin/env python3
# Copyright (c) 2014-2016 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
# Test for -rpcbind, as well as -rpcallowip and -rpcconnect
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import *
from test_framework.netutil import *
class RPCBindTest(BitcoinTestFramework):
def __init__(self):
super().__init__()
self.setup_clean_chain = True
self.num_nodes = 1
def setup_network(self):
pass
def setup_nodes(self):
pass
def run_bind_test(self, allow_ips, connect_to, addresses, expected):
'''
Start a node with requested rpcallowip and rpcbind parameters,
then try to connect, and check if the set of bound addresses
matches the expected set.
'''
expected = [(addr_to_hex(addr), port) for (addr, port) in expected]
base_args = ['-disablewallet', '-nolisten']
if allow_ips:
base_args += ['-rpcallowip=' + x for x in allow_ips]
binds = ['-rpcbind='+addr for addr in addresses]
self.nodes = start_nodes(self.num_nodes, self.options.tmpdir, [base_args + binds], connect_to)
try:
pid = bitcoind_processes[0].pid
assert_equal(set(get_bind_addrs(pid)), set(expected))
finally:
stop_nodes(self.nodes)
wait_bitcoinds()
def run_allowip_test(self, allow_ips, rpchost, rpcport):
'''
Start a node with rpcallow IP, and request getnetworkinfo
at a non-localhost IP.
'''
base_args = ['-disablewallet', '-nolisten'] + ['-rpcallowip='+x for x in allow_ips]
self.nodes = start_nodes(self.num_nodes, self.options.tmpdir, [base_args])
try:
# connect to node through non-loopback interface
node = get_rpc_proxy(rpc_url(0, "%s:%d" % (rpchost, rpcport)), 0)
node.getnetworkinfo()
finally:
node = None # make sure connection will be garbage collected and closed
stop_nodes(self.nodes)
wait_bitcoinds()
def run_test(self):
# due to OS-specific network stats queries, this test works only on Linux
assert(sys.platform.startswith('linux'))
# find the first non-loopback interface for testing
non_loopback_ip = None
for name,ip in all_interfaces():
if ip != '127.0.0.1':
non_loopback_ip = ip
break
if non_loopback_ip is None:
assert(not 'This test requires at least one non-loopback IPv4 interface')
print("Using interface %s for testing" % non_loopback_ip)
defaultport = rpc_port(0)
# check default without rpcallowip (IPv4 and IPv6 localhost)
self.run_bind_test(None, '127.0.0.1', [],
[('127.0.0.1', defaultport), ('::1', defaultport)])
# check default with rpcallowip (IPv6 any)
self.run_bind_test(['127.0.0.1'], '127.0.0.1', [],
[('::0', defaultport)])
# check only IPv4 localhost (explicit)
self.run_bind_test(['127.0.0.1'], '127.0.0.1', ['127.0.0.1'],
[('127.0.0.1', defaultport)])
# check only IPv4 localhost (explicit) with alternative port
self.run_bind_test(['127.0.0.1'], '127.0.0.1:32171', ['127.0.0.1:32171'],
[('127.0.0.1', 32171)])
# check only IPv4 localhost (explicit) with multiple alternative ports on same host
self.run_bind_test(['127.0.0.1'], '127.0.0.1:32171', ['127.0.0.1:32171', '127.0.0.1:32172'],
[('127.0.0.1', 32171), ('127.0.0.1', 32172)])
# check only IPv6 localhost (explicit)
self.run_bind_test(['[::1]'], '[::1]', ['[::1]'],
[('::1', defaultport)])
# check both IPv4 and IPv6 localhost (explicit)
self.run_bind_test(['127.0.0.1'], '127.0.0.1', ['127.0.0.1', '[::1]'],
[('127.0.0.1', defaultport), ('::1', defaultport)])
# check only non-loopback interface
self.run_bind_test([non_loopback_ip], non_loopback_ip, [non_loopback_ip],
[(non_loopback_ip, defaultport)])
# Check that with invalid rpcallowip, we are denied
self.run_allowip_test([non_loopback_ip], non_loopback_ip, defaultport)
try:
self.run_allowip_test(['1.1.1.1'], non_loopback_ip, defaultport)
assert(not 'Connection not denied by rpcallowip as expected')
except JSONRPCException:
pass
if __name__ == '__main__':
RPCBindTest().main()
| mit | -2,371,935,150,693,618,700 | 41.4 | 102 | 0.575827 | false |
Sudy/ScrapyJD | ScrapyJdAzw/pipelines.py | 1 | 2146 | # Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: http://doc.scrapy.org/en/latest/topics/item-pipeline.html
from scrapy import log
#from scrapy.core.exceptions import DropItem
from twisted.enterprise import adbapi
import time
import MySQLdb.cursors
class ScrapyjdazwPipeline(object):
def __init__(self):
# @@@ hardcoded db settings
# TODO: make settings configurable through settings
self.dbpool = adbapi.ConnectionPool('MySQLdb',
host='192.168.1.153'
db='jddata',
user='spider',
passwd='spider1234',
cursorclass=MySQLdb.cursors.DictCursor,
charset='utf8',
use_unicode=True
)
def process_item(self, item, spider):
# run db query in thread pool
if item.has_key("pinfo"):
result = ""
for it in item["pinfo"]:
result += it.strip()
item["pinfo"] = result
query = self.dbpool.runInteraction(self._conditional_insert, item)
query.addErrback(self.handle_error)
return item
def _conditional_insert(self, tx, item):
# create record if doesn't exist.
# all this block run on it's own thread
if item.has_key("pinfo"):
tx.execute(\
"insert into product_table (pro_id, pro_info, pro_price) "
"values (%s, %s, %s)",
( item['proid'],
item['pinfo'],
item['pricejd'],
)
)
else:
tx.execute(\
"insert into comment_table (pro_id, user, time, score, comment) "
"values (%s, %s, %s, %s, %s)",
( item['proid'],
item['user'],
item['time'],
item['score'],
item['comment'],
)
)
log.msg("Item stored in db: %s" % item["proid"], level=log.INFO)
def handle_error(self, e):
log.err(e) | mit | -3,673,628,681,913,933,000 | 30.573529 | 81 | 0.503728 | false |
gsauthof/utility | benchmark.py | 1 | 11990 | #!/usr/bin/env python3
# 2016, Georg Sauthoff <[email protected]>, GPLv3+
import argparse
import collections
import csv
import datetime
import itertools
import logging
# importing it conditionally iff svg generation is selected
# otherwise, it may fail on a system with minimal matplotlib
# install, i.e. where one of the backends loaded by default
# throws
#import matplotlib.pyplot as plt
# importing it conditionally iff csv or not quiet
#import numpy as np
import os
import subprocess
import sys
import tempfile
import time
try:
import colorlog
have_colorlog = True
except ImportError:
have_colorlog = False
def mk_arg_parser():
p = argparse.ArgumentParser(
formatter_class=argparse.RawDescriptionHelpFormatter,
description='run command multiple times and gather stats',
epilog='''Examples:
Run 3 programs 20 times each and write stats to stdout and the raw
data to a file:
$ benchmark --cmd ./find_memchr ./find_find --raw raw.dat -n 20 \\
./find_unroll2 3000 in
Create boxplot SVG (and nicely format the stdout and also write
the stats to a CSV file):
$ benchmark --input raw.dat --svg rss.svg --csv rss.csv \\
| column -t -s, -o ' | '
In case the benchmarked program needs some options the `--` delimiter
has its usual meaning (also explicitly specifiying a tag):
$ benchmark --tags mode2 -n 1000 -- ./find_unroll2 --mode 2
# 2016, Georg Sauthoff <[email protected]>, GPLv3+
'''
)
p.add_argument('argv', nargs='*', help='ARG0.. of the child')
p.add_argument('--cmd', '--cmds', nargs='+', default=[],
help='extra commands to run')
p.add_argument('--cols', nargs='+', default=[1,2,3,4],
help='columns to generate stats for')
p.add_argument('--csv', nargs='?', const='benchmark.csv',
help='also write results as csv')
p.add_argument('--debug', nargs='?', metavar='FILE',
const='benchmark.log', help='log debug messages into file')
p.add_argument('--graph-item', help='item to plot in a graph')
p.add_argument('--height', type=float, help='height of the graph (inch)')
p.add_argument('--input', '-i', metavar='FILE',
help='include raw data from a previous run')
p.add_argument('--items', nargs='+', default=['wall', 'user', 'sys', 'rss'],
help='names for the selected columns')
p.add_argument('--null-out', type=bool, default=True,
help='redirect stdout to /dev/null')
p.add_argument('--pstat', action=InitPstat,
help='set options for `perf stat` instead of GNU time')
p.add_argument('--precision', type=int, default=3,
help='precision for printing values')
p.add_argument('--quiet', '-q', action='store_true', default=False,
help='avoid printing table to stdout')
p.add_argument('--raw', nargs='?', metavar='FILE', const='data.csv',
help='write measurement results to file')
p.add_argument('--repeat', '-n', type=int, default=2,
help='number of times to repeat the measurement')
p.add_argument('--sleep', type=float, default=0.0, metavar='SECONDS',
help='sleep between runs')
p.add_argument('--svg', nargs='?', const='benchmark.svg',
help='write boxplot')
p.add_argument('--tags', nargs='+', default=[],
help='alternative names for the different commands')
p.add_argument('--time', default='/usr/bin/time',
help='measurement program (default: GNU time)')
p.add_argument('--time-args', nargs='+',
default=[ '--append', '--format', '%e,%U,%S,%M', '--output', '$<' ],
help='default arguments to measurement program')
p.add_argument('--timeout', help='timeout for waiting on a child')
p.add_argument('--title', help='title of the graph')
p.add_argument('--width', type=float, help='width of the graph (inch)')
p.add_argument('--xlabel', default='experiment', help='x-axis label')
p.add_argument('--xrotate', type=int,
help='rotate x-labels (default: 75 degrees if more than 4 present')
p.add_argument('--ylabel', default='time (s)', help='y-axis label')
p.add_argument('--ymax', type=float,
help='set upper y-axis limit')
p.add_argument('--ymin', type=float, default=0.0,
help='set lower y-axis limit')
return p
class InitPstat(argparse.Action):
def __init__(self, option_strings, dest, **kwargs):
super(InitPstat, self).__init__(
option_strings, dest, nargs=0, **kwargs)
def __call__(self, parser, args, values, option_string=None):
args.time = 'perfstat.sh'
args.time_args = [ '-o', '$<' ]
args.cols = list(range(1,12))
args.items = [ 'nsec','cswitch','cpu_migr','page_fault','cycles','ghz','ins','ins_cyc','br','br_mis','br_mis_rate' ]
if not args.graph_item:
args.graph_item = 'ins_cyc'
args.title = 'Counter ({})'.format(args.graph_item)
args.ylabel = 'rate'
def parse_args(xs = None):
arg_parser = mk_arg_parser()
if xs or xs == []:
args = arg_parser.parse_args(xs)
else:
args = arg_parser.parse_args()
if not args.argv and not args.input:
raise ValueError('Neither cmd+args nor --input option present')
if args.debug:
setup_file_logging(args.debug)
if args.argv:
args.cmd = [ args.argv[0] ] + args.cmd
args.argv = args.argv[1:]
args.cols = [ int(x) for x in args.cols ]
if args.tags and args.tag.__len__() != args.cmd.__len__():
raise ValueError('not enough tags specified')
if not args.tags:
args.tags = [ os.path.basename(x) for x in args.cmd ]
if not args.graph_item:
args.graph_item = args.items[0]
if not args.title:
args.title = 'Runtime ({})'.format(args.graph_item)
if args.svg:
#import matplotlib.pyplot as plt
global matplotlib
global plt
matplotlib = __import__('matplotlib.pyplot', globals(), locals())
plt = matplotlib.pyplot
if args.csv or not args.quiet or args.svg:
global np
numpy = __import__('numpy', globals(), locals())
np = numpy
#import numpy as np
return args
log_format = '%(asctime)s - %(levelname)-8s - %(message)s'
log_date_format = '%Y-%m-%d %H:%M:%S'
def mk_formatter():
f = logging.Formatter(log_format, log_date_format)
return f
def mk_logger():
log = logging.getLogger() # root logger
log.setLevel(logging.DEBUG)
#log.setLevel(logging.INFO)
if have_colorlog:
cformat = '%(log_color)s' + log_format
cf = colorlog.ColoredFormatter(cformat, log_date_format,
log_colors = { 'DEBUG': 'reset', 'INFO': 'reset',
'WARNING' : 'bold_yellow' , 'ERROR': 'bold_red',
'CRITICAL': 'bold_red'})
else:
cf = logging.Formatter(log_format, log_date_format)
ch = logging.StreamHandler()
ch.setLevel(logging.WARNING)
if os.isatty(2):
ch.setFormatter(cf)
else:
ch.setFormatter(f)
log.addHandler(ch)
return logging.getLogger(__name__)
log = mk_logger()
def setup_file_logging(filename):
log = logging.getLogger()
fh = logging.FileHandler(filename)
fh.setLevel(logging.DEBUG)
f = logging.Formatter(log_format + ' - [%(name)s]', log_date_format)
fh.setFormatter(f)
log.addHandler(fh)
# Reasons for using an external `time` command instead of
# calling e.g. `getrusage()`:
# - the forked child will start
# with the RSS of the python parent - thus, it will be reported
# too high if child actually uses less memory
# - same code path as for other measurement tools
# - elapsed time would have to be measured separately, otherwise
def measure(tag, cmd, args):
errors = 0
if args.null_out:
stdout = subprocess.DEVNULL
else:
stdout = None
with tempfile.NamedTemporaryFile(mode='w+', newline='') as temp_file:
time_args = args.time_args.copy()
time_args[time_args.index('$<')] = temp_file.name
a = [ args.time ] + time_args + [cmd] + args.argv
rc = -1
with subprocess.Popen(a, stdout=stdout) as p:
rc = p.wait(timeout=args.timeout)
if rc != 0:
log.error('Command {} failed with rc: {}'.format(cmd, rc))
errors = errors + 1
reader = csv.reader(temp_file)
r = [tag] + next(reader)
r.append(datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S'))
r.append(rc)
r.append(cmd)
r.append(str(args.argv))
return (r, errors)
def execute(args):
xs = []
esum = 0
for (tag, cmd) in zip(args.tags, args.cmd):
rs = []
for i in range(args.repeat):
try:
m, errors = measure(tag, cmd, args)
if args.sleep > 0:
time.sleep(args.sleep)
rs.append(m)
esum = esum + errors
except StopIteration:
esum = esum + 1
log.error("Couldn't read measurements from teporary file"
+ '- {} - {}'.format(tag, i))
xs.append( (tag, rs) )
return (xs, esum)
def read_raw(filename):
with open(filename, 'r', newline='') as f:
reader = csv.reader(f)
rs = []
next(reader)
xs = [ (k, list(l))
for (k, l) in itertools.groupby(reader, lambda row: row[0])]
# is equivalent to:
# prev = None
# xs = []
# l = []
# for row in reader:
# if prev != row[0]:
# l = []
# xs.append( (row[0], l) )
# l.append(row)
# prev = row[0]
return xs
def write_raw(rrs, args, filename):
with open(filename, 'a', newline='') as f:
writer = csv.writer(f)
writer.writerow(['tag'] + args.items + ['date', 'rc', 'cmd', 'args' ])
for rs in rrs:
for row in rs[1]:
writer.writerow(row)
def write_svg(ys, args, filename):
tags, items_l = zip(*ys)
xrotate = args.xrotate
if not xrotate and tags.__len__() > 4:
xrotate = 75
if args.width and args.height:
plt.figure(figsize=(args.width, args.height))
r = plt.boxplot( [ items[args.graph_item] for items in items_l ],
labels=tags )
ymax = args.ymax
if not args.ymax:
m = np.amax([np.amax(items[args.graph_item]) for items in items_l ])
ymax = np.ceil(m + (m - args.ymin) / 10)
plt.ylim(ymin=args.ymin, ymax=ymax)
plt.title(args.title)
if xrotate:
plt.xticks(rotation=xrotate) # 70 # 90
plt.xlabel(args.xlabel)
plt.ylabel(args.ylabel)
plt.tight_layout()
plt.savefig(filename)
# normally, we would just use a csv.writer() but
# we want to control the number of significant figures
def write_csv(zs, args, f):
if not zs:
return
header = ['tag'] + list(zs[0][1]._fields)
fstr = '{:1.'+str(args.precision)+'f}'
print(','.join(header), file=f)
for (tag, stat) in zs:
row = [tag] + list(stat)
srow = []
for r in row:
if type(r) is float or type(r) is np.float64:
srow.append(fstr.format(r))
else:
srow.append(str(r))
print(','.join(srow), file=f)
def get_items(rs, args):
m = np.zeros(rs.__len__(), dtype=[(x, 'float64') for x in args.items ] )
i = 0
for row in rs:
j = 0
for c in args.cols:
v = row[c]
m[i][j] = 0 if v == '' else v
j = j + 1
i = i + 1
return m
Stat = collections.namedtuple('Stat',
['n', 'min', 'Q1', 'median', 'Q3', 'max', 'mean', 'dev', 'item' ])
def gen_stats(items, args):
#for name in items.dtype.names:
name = args.graph_item
c = items[name]
ps = np.percentile(c, [25, 50, 75] )
# there is also np.median()
s = Stat(n=c.__len__(), min=np.amin(c), Q1=ps[0], median=ps[1],
Q3=ps[2], max=np.amax(c),
mean=np.mean(c), dev=np.std(c), item=name)
return s
def run(args):
xs = []
errors = 0
if args.input:
xs = xs + read_raw(args.input)
if args.cmd:
rxs, errors = execute(args)
xs = xs + rxs
if args.csv or not args.quiet or args.svg:
ys = [ (tag, get_items(rs, args)) for (tag, rs) in xs ]
if args.csv or not args.quiet:
zs = [ (tag, gen_stats(items, args)) for (tag, items) in ys ]
if args.csv:
with open(args.csv, 'w') as f:
write_csv(zs, args, f)
if not args.quiet:
write_csv(zs, args, sys.stdout)
if args.raw:
write_raw(xs, args, args.raw)
if args.svg:
write_svg(ys, args, args.svg)
return int(errors != 0)
def main():
args = parse_args()
return run(args)
if __name__ == '__main__':
sys.exit(main())
| gpl-3.0 | 8,276,299,148,018,042,000 | 30.973333 | 120 | 0.621101 | false |
EricE/evelink | evelink/api.py | 1 | 9272 | from cStringIO import StringIO
import calendar
import functools
import logging
import re
import time
from urllib import urlencode
import urllib2
from xml.etree import ElementTree
_log = logging.getLogger('evelink.api')
try:
import requests
_has_requests = True
except ImportError:
_log.info('`requests` not available, falling back to urllib2')
_has_requests = None
def _clean(v):
"""Convert parameters into an acceptable format for the API."""
if isinstance(v, (list, set, tuple)):
return ",".join(str(i) for i in v)
else:
return str(v)
def parse_ts(v):
"""Parse a timestamp from EVE API XML into a unix-ish timestamp."""
if v == '':
return None
ts = calendar.timegm(time.strptime(v, "%Y-%m-%d %H:%M:%S"))
# Deal with EVE's nonexistent 0001-01-01 00:00:00 timestamp
return ts if ts > 0 else None
def get_named_value(elem, field):
"""Returns the string value of the named child element."""
try:
return elem.find(field).text
except AttributeError:
return None
def get_ts_value(elem, field):
"""Returns the timestamp value of the named child element."""
val = get_named_value(elem, field)
if val:
return parse_ts(val)
return None
def get_int_value(elem, field):
"""Returns the integer value of the named child element."""
val = get_named_value(elem, field)
if val:
return int(val)
return val
def get_float_value(elem, field):
"""Returns the float value of the named child element."""
val = get_named_value(elem, field)
if val:
return float(val)
return val
def get_bool_value(elem, field):
"""Returns the boolean value of the named child element."""
val = get_named_value(elem, field)
if val == 'True':
return True
elif val == 'False':
return False
return None
def elem_getters(elem):
"""Returns a tuple of (_str, _int, _float, _bool, _ts) functions.
These are getters closed around the provided element.
"""
_str = lambda key: get_named_value(elem, key)
_int = lambda key: get_int_value(elem, key)
_float = lambda key: get_float_value(elem, key)
_bool = lambda key: get_bool_value(elem, key)
_ts = lambda key: get_ts_value(elem, key)
return _str, _int, _float, _bool, _ts
def parse_keyval_data(data_string):
"""Parse 'key: value' lines from a LF-delimited string."""
keyval_pairs = data_string.strip().split('\n')
results = {}
for pair in keyval_pairs:
key, _, val = pair.strip().partition(': ')
if 'Date' in key:
val = parse_ms_date(val)
elif val == 'null':
val = None
elif re.match(r"^-?\d+$", val):
val = int(val)
elif re.match(r"-?\d+\.\d+", val):
val = float(val)
results[key] = val
return results
def parse_ms_date(date_string):
"""Convert MS date format into epoch"""
return int(date_string)/10000000 - 11644473600;
class APIError(Exception):
"""Exception raised when the EVE API returns an error."""
def __init__(self, code=None, message=None):
self.code = code
self.message = message
def __repr__(self):
return "APIError(%r, %r)" % (self.code, self.message)
def __str__(self):
return "%s (code=%d)" % (self.message, int(self.code))
class APICache(object):
"""Minimal interface for caching API requests.
This very basic implementation simply stores values in
memory, with no other persistence. You can subclass it
to define a more complex/featureful/persistent cache.
"""
def __init__(self):
self.cache = {}
def get(self, key):
"""Return the value referred to by 'key' if it is cached.
key:
a result from the Python hash() function.
"""
result = self.cache.get(key)
if not result:
return None
value, expiration = result
if expiration < time.time():
del self.cache[key]
return None
return value
def put(self, key, value, duration):
"""Cache the provided value, referenced by 'key', for the given duration.
key:
a result from the Python hash() function.
value:
an xml.etree.ElementTree.Element object
duration:
a number of seconds before this cache entry should expire.
"""
expiration = time.time() + duration
self.cache[key] = (value, expiration)
class API(object):
"""A wrapper around the EVE API."""
def __init__(self, base_url="api.eveonline.com", cache=None, api_key=None):
self.base_url = base_url
cache = cache or APICache()
if not isinstance(cache, APICache):
raise ValueError("The provided cache must subclass from APICache.")
self.cache = cache
self.CACHE_VERSION = '1'
if api_key and len(api_key) != 2:
raise ValueError("The provided API key must be a tuple of (keyID, vCode).")
self.api_key = api_key
self._set_last_timestamps()
def _set_last_timestamps(self, current_time=0, cached_until=0):
self.last_timestamps = {
'current_time': current_time,
'cached_until': cached_until,
}
def _cache_key(self, path, params):
sorted_params = sorted(params.iteritems())
# Paradoxically, Shelve doesn't like integer keys.
return '%s-%s' % (self.CACHE_VERSION, hash((path, tuple(sorted_params))))
def get(self, path, params=None):
"""Request a specific path from the EVE API.
The supplied path should be a slash-separated path
frament, e.g. "corp/AssetList". (Basically, the portion
of the API url in between the root / and the .xml bit.)
"""
params = params or {}
params = dict((k, _clean(v)) for k,v in params.iteritems())
_log.debug("Calling %s with params=%r", path, params)
if self.api_key:
_log.debug("keyID and vCode added")
params['keyID'] = self.api_key[0]
params['vCode'] = self.api_key[1]
key = self._cache_key(path, params)
response = self.cache.get(key)
cached = response is not None
if not cached:
# no cached response body found, call the API for one.
params = urlencode(params)
full_path = "https://%s/%s.xml.aspx" % (self.base_url, path)
response = self.send_request(full_path, params)
else:
_log.debug("Cache hit, returning cached payload")
tree = ElementTree.parse(StringIO(response))
current_time = get_ts_value(tree, 'currentTime')
expires_time = get_ts_value(tree, 'cachedUntil')
self._set_last_timestamps(current_time, expires_time)
if not cached:
# Have to split this up from above as timestamps have to be
# extracted.
self.cache.put(key, response, expires_time - current_time)
error = tree.find('error')
if error is not None:
code = error.attrib['code']
message = error.text.strip()
exc = APIError(code, message)
_log.error("Raising API error: %r" % exc)
raise exc
result = tree.find('result')
return result
def send_request(self, full_path, params):
if _has_requests:
return self.requests_request(full_path, params)
else:
return self.urllib2_request(full_path, params)
def urllib2_request(self, full_path, params):
try:
if params:
# POST request
_log.debug("POSTing request")
r = urllib2.urlopen(full_path, params)
else:
# GET request
_log.debug("GETting request")
r = urllib2.urlopen(full_path)
result = r.read()
r.close()
return result
except urllib2.URLError as e:
# TODO: Handle this better?
raise e
def requests_request(self, full_path, params):
session = getattr(self, 'session', None)
if not session:
session = requests.Session()
self.session = session
try:
if params:
# POST request
_log.debug("POSTing request")
r = session.post(full_path, params=params)
else:
# GET request
_log.debug("GETting request")
r = session.get(full_path)
return r.content
except requests.exceptions.RequestException as e:
# TODO: Handle this better?
raise e
def auto_api(func):
"""A decorator to automatically provide an API instance.
Functions decorated with this will have the api= kwarg
automatically supplied with a default-initialized API()
object if no other API object is supplied.
"""
@functools.wraps(func)
def wrapper(*args, **kwargs):
if 'api' not in kwargs:
kwargs['api'] = API()
return func(*args, **kwargs)
return wrapper
# vim: set ts=4 sts=4 sw=4 et:
| mit | 5,219,783,712,813,521,000 | 29.201954 | 87 | 0.584232 | false |
KonradBreitsprecher/espresso | samples/ekboundaries.py | 1 | 1441 |
from espressomd import System, shapes, electrokinetics
import sys
system = System(box_l = [10, 10, 10])
system.set_random_state_PRNG()
#system.seed = system.cell_system.get_state()['n_nodes'] * [1234]
system.cell_system.skin = 0.4
system.time_step = 0.1
ek = electrokinetics.Electrokinetics(
lb_density=1, friction=1, agrid=1, viscosity=1, T=1, prefactor=1)
pos = electrokinetics.Species(
density=0.05, D=0.1, valency=1, ext_force=[0, 0, 1.])
neg = electrokinetics.Species(
density=0.05, D=0.1, valency=-1, ext_force=[0, 0, -1.])
ek.add_species(pos)
ek.add_species(neg)
system.actors.add(ek)
print(ek.get_params())
print(pos.get_params())
print(neg.get_params())
print(pos[5, 5, 5].density)
ek_wall_left = electrokinetics.EKBoundary(
shape=shapes.Wall(dist=1, normal=[1, 0, 0]), charge_density=-0.01)
ek_wall_right = electrokinetics.EKBoundary(
shape=shapes.Wall(dist=-9, normal=[-1, 0, 0]), charge_density=0.01)
system.ekboundaries.add(ek_wall_left)
system.ekboundaries.add(ek_wall_right)
for i in range(1000):
system.integrator.run(100)
sys.stdout.write("\rIntegrating: %03i" % i)
sys.stdout.flush()
pos.print_vtk_density("ek/pos_dens_%i.vtk" % i)
neg.print_vtk_density("ek/neg_dens_%i.vtk" % i)
pos.print_vtk_flux("ek/pos_flux_%i.vtk" % i)
neg.print_vtk_flux("ek/neg_flux_%i.vtk" % i)
ek.print_vtk_velocity("ek/ekv_%i.vtk" % i)
ek.print_vtk_boundary("ek/ekb_%i.vtk" % i)
| gpl-3.0 | -5,257,314,025,805,871,000 | 29.659574 | 71 | 0.676613 | false |
DataDog/integrations-extras | riak_repl/tests/test_check.py | 1 | 4306 | import pytest
from datadog_checks.base.errors import CheckException
from datadog_checks.dev.utils import get_metadata_metrics
from datadog_checks.riak_repl import RiakReplCheck
from .common import INSTANCE
def test_config():
c = RiakReplCheck('riak_repl', {}, {}, None)
# Empty instance
instance = {}
with pytest.raises(CheckException):
c.check(instance)
# Timeout
instance = {'url': 'http://foobar'}
with pytest.raises(CheckException):
c.check(instance)
# Statuscode
instance = {'url': 'https://google.com/404'}
with pytest.raises(CheckException):
c.check(instance)
# Decode Error
instance = {'url': 'https://google.com'}
with pytest.raises(CheckException):
c.check(instance)
@pytest.mark.integration
def test_check(aggregator, dd_environment):
init_config = {
'keys': [
"riak_repl.server_bytes_sent",
"riak_repl.server_bytes_recv",
"riak_repl.server_connects",
"riak_repl.server_connect_errors",
"riak_repl.server_fullsyncs",
"riak_repl.client_bytes_sent",
"riak_repl.client_bytes_recv",
"riak_repl.client_connects",
"riak_repl.client_connect_errors",
"riak_repl.client_redirect",
"riak_repl.objects_dropped_no_clients",
"riak_repl.objects_dropped_no_leader",
"riak_repl.objects_sent",
"riak_repl.objects_forwarded",
"riak_repl.elections_elected",
"riak_repl.elections_leader_changed",
"riak_repl.rt_source_errors",
"riak_repl.rt_sink_errors",
"riak_repl.rt_dirty",
"riak_repl.realtime_send_kbps",
"riak_repl.realtime_recv_kbps",
"riak_repl.fullsync_send_kbps",
"riak_repl.fullsync_recv_kbps",
"riak_repl.realtime_queue_stats.percent_bytes_used",
"riak_repl.realtime_queue_stats.bytes",
"riak_repl.realtime_queue_stats.max_bytes",
"riak_repl.realtime_queue_stats.overload_drops",
"riak_repl.realtime_queue_stats.consumers.unacked",
"riak_repl.realtime_queue_stats.consumers.errs",
"riak_repl.realtime_queue_stats.consumers.drops",
"riak_repl.realtime_queue_stats.consumers.pending",
"riak_repl.fullsync_coordinator.queued",
"riak_repl.fullsync_coordinator.in_progress",
"riak_repl.fullsync_coordinator.waiting_for_retry",
"riak_repl.fullsync_coordinator.starting",
"riak_repl.fullsync_coordinator.successful_exits",
"riak_repl.fullsync_coordinator.error_exits",
"riak_repl.fullsync_coordinator.retry_exits",
"riak_repl.fullsync_coordinator.soft_retry_exits",
"riak_repl.fullsync_coordinator.busy_nodes",
"riak_repl.fullsync_coordinator.fullsyncs_completed",
"riak_repl.fullsync_coordinator.last_fullsync_duration",
"riak_repl.realtime_source.connected.hb_rtt",
"riak_repl.realtime_source.connected.objects",
"riak_repl.realtime_sink.connected.deactivated",
"riak_repl.realtime_sink.connected.source_drops",
"riak_repl.realtime_sink.connected.pending",
]
}
c = RiakReplCheck('riak_repl', init_config, {}, None)
c.check(INSTANCE)
for key in init_config['keys']:
aggregator.assert_metric(key, tags=[], at_least=0)
# Assert coverage for this check on this instance
aggregator.assert_all_metrics_covered()
# TODO: there are metrics missing in metadata.csv
missing_metrics = [
'riak_repl.realtime_queue_stats.consumers.drops',
'riak_repl.realtime_queue_stats.consumers.errs',
'riak_repl.realtime_queue_stats.consumers.pending',
'riak_repl.realtime_queue_stats.consumers.unacked',
'riak_repl.realtime_sink.connected.deactivated',
'riak_repl.realtime_sink.connected.pending',
'riak_repl.realtime_sink.connected.source_drops',
'riak_repl.realtime_source.connected.hb_rtt',
'riak_repl.realtime_source.connected.objects',
]
aggregator.assert_metrics_using_metadata(get_metadata_metrics(), exclude=missing_metrics)
| bsd-3-clause | 8,266,446,927,148,476,000 | 38.87037 | 93 | 0.632606 | false |
btwn2thvs/skype-me | skype-search/skype-dao.py | 1 | 1424 | import sqlite3
import logging
DATABASE = '/Users/wcampbell/Library/Application Support/Skype/willcampbell_ha/main.db'
unique_participants_sql = 'SELECT DISTINCT(participants) FROM Chats'
messages_by_author_sql = 'SELECT from_dispname, body_xml FROM Messages where dialog_partner = ?'
def most_common(t):
word_counter = {}
for word in t:
if word and word != "willcampbell_ha":
if word in word_counter:
word_counter[word] += 1
else:
word_counter[word] = 1
popular_words = sorted(word_counter, key = word_counter.get, reverse = True)
return popular_words
class BaseDao(object):
def __init__(self, db):
logging.info('Opening a sqlite db connection')
self.conn = sqlite3.connect(db)
self.c = self.conn.cursor()
def get_all_messages(self, *authors):
'''
Return a list of messages by authors
'''
self.c.execute(messages_by_author_sql, authors)
return self.c.fetchall()
def get_unique_participants(self):
self.c.execute(unique_participants_sql)
return self.c.fetchall()
b = BaseDao(DATABASE)
#print b.get_all_messages("stacy.vanderworth")
p = []
for participants in b.get_unique_participants():
participant_list = participants[0]
if participant_list:
p += participant_list.split()
print most_common(p)[:3] | mit | -6,106,299,922,487,574,000 | 29.319149 | 96 | 0.632725 | false |
vgrem/Office365-REST-Python-Client | tests/sharepoint/test_publishing.py | 1 | 1894 | from office365.sharepoint.publishing.primary_city_time import PrimaryCityTime
from office365.sharepoint.publishing.site_page_metadata_collection import SitePageMetadataCollection
from office365.sharepoint.publishing.site_page_service import SitePageService
from office365.sharepoint.publishing.video_service_discoverer import VideoServiceDiscoverer
from tests.sharepoint.sharepoint_case import SPTestCase
class TestSPPublishing(SPTestCase):
@classmethod
def setUpClass(cls):
super(TestSPPublishing, cls).setUpClass()
@classmethod
def tearDownClass(cls):
pass
def test1_init_site_page_service(self):
svc = SitePageService(self.client).get().execute_query()
self.assertIsNotNone(svc.resource_path)
def test2_get_site_pages(self):
svc = SitePageService(self.client)
pages = svc.pages().get().execute_query()
self.assertIsInstance(pages, SitePageMetadataCollection)
def test3_get_time_zone(self):
time_zone = SitePageService.get_time_zone(self.client, "Moscow").execute_query()
self.assertIsInstance(time_zone, PrimaryCityTime)
self.assertEqual(time_zone.properties.get("Location"), "Moscow, Russia")
def test4_compute_file_name(self):
result = SitePageService.compute_file_name(self.client, "Test page").execute_query()
self.assertIsNotNone(result.value)
def test5_file_picker_tab_options(self):
result = SitePageService.file_picker_tab_options(self.client).execute_query()
self.assertIsNotNone(result.value)
def test6_org_assets(self):
result = SitePageService.org_assets(self.client).execute_query()
self.assertIsNotNone(result.value)
def test7_get_video_service_manager(self):
discoverer = VideoServiceDiscoverer(self.client).get().execute_query()
self.assertIsNotNone(discoverer.resource_path)
| mit | -887,073,155,801,701,000 | 40.173913 | 100 | 0.73548 | false |
google/eng-edu | ml/guides/text_classification/batch_train_sequence_model.py | 1 | 6910 | """Module to train sequence model with batches of data.
Vectorizes training and validation texts into sequences and uses that for
training a sequence model - a sepCNN model. We use sequence model for text
classification when the ratio of number of samples to number of words per
sample for the given dataset is very large (>~15K). This module is identical to
the `train_sequence_model` module except that we pass the data in batches for
training. This is required when you have a very large dataset that does not fit
into memory.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import argparse
import time
import tensorflow as tf
import numpy as np
import build_model
import load_data
import vectorize_data
import explore_data
FLAGS = None
# Limit on the number of features. We use the top 20K features.
TOP_K = 20000
def _data_generator(x, y, num_features, batch_size):
"""Generates batches of vectorized texts for training/validation.
# Arguments
x: np.matrix, feature matrix.
y: np.ndarray, labels.
num_features: int, number of features.
batch_size: int, number of samples per batch.
# Returns
Yields feature and label data in batches.
"""
num_samples = x.shape[0]
num_batches = num_samples // batch_size
if num_samples % batch_size:
num_batches += 1
while 1:
for i in range(num_batches):
start_idx = i * batch_size
end_idx = (i + 1) * batch_size
if end_idx > num_samples:
end_idx = num_samples
x_batch = x[start_idx:end_idx]
y_batch = y[start_idx:end_idx]
yield x_batch, y_batch
def batch_train_sequence_model(data,
learning_rate=1e-3,
epochs=1000,
batch_size=128,
blocks=2,
filters=64,
dropout_rate=0.2,
embedding_dim=200,
kernel_size=3,
pool_size=3):
"""Trains sequence model on the given dataset.
# Arguments
data: tuples of training and test texts and labels.
learning_rate: float, learning rate for training model.
epochs: int, number of epochs.
batch_size: int, number of samples per batch.
blocks: int, number of pairs of sepCNN and pooling blocks in the model.
filters: int, output dimension of sepCNN layers in the model.
dropout_rate: float: percentage of input to drop at Dropout layers.
embedding_dim: int, dimension of the embedding vectors.
kernel_size: int, length of the convolution window.
pool_size: int, factor by which to downscale input at MaxPooling layer.
# Raises
ValueError: If validation data has label values which were not seen
in the training data.
"""
# Get the data.
(train_texts, train_labels), (val_texts, val_labels) = data
# Verify that validation labels are in the same range as training labels.
num_classes = explore_data.get_num_classes(train_labels)
unexpected_labels = [v for v in val_labels if v not in range(num_classes)]
if len(unexpected_labels):
raise ValueError('Unexpected label values found in the validation set:'
' {unexpected_labels}. Please make sure that the '
'labels in the validation set are in the same range '
'as training labels.'.format(
unexpected_labels=unexpected_labels))
# Vectorize texts.
x_train, x_val, word_index = vectorize_data.sequence_vectorize(
train_texts, val_texts)
# Number of features will be the embedding input dimension. Add 1 for the
# reserved index 0.
num_features = min(len(word_index) + 1, TOP_K)
# Create model instance.
model = build_model.sepcnn_model(blocks=blocks,
filters=filters,
kernel_size=kernel_size,
embedding_dim=embedding_dim,
dropout_rate=dropout_rate,
pool_size=pool_size,
input_shape=x_train.shape[1:],
num_classes=num_classes,
num_features=num_features)
# Compile model with learning parameters.
if num_classes == 2:
loss = 'binary_crossentropy'
else:
loss = 'sparse_categorical_crossentropy'
optimizer = tf.keras.optimizers.Adam(lr=learning_rate)
model.compile(optimizer=optimizer, loss=loss, metrics=['acc'])
# Create callback for early stopping on validation loss. If the loss does
# not decrease in two consecutive tries, stop training.
callbacks = [tf.keras.callbacks.EarlyStopping(
monitor='val_loss', patience=2)]
# Create training and validation generators.
training_generator = _data_generator(
x_train, train_labels, num_features, batch_size)
validation_generator = _data_generator(
x_val, val_labels, num_features, batch_size)
# Get number of training steps. This indicated the number of steps it takes
# to cover all samples in one epoch.
steps_per_epoch = x_train.shape[0] // batch_size
if x_train.shape[0] % batch_size:
steps_per_epoch += 1
# Get number of validation steps.
validation_steps = x_val.shape[0] // batch_size
if x_val.shape[0] % batch_size:
validation_steps += 1
# Train and validate model.
history = model.fit_generator(
generator=training_generator,
steps_per_epoch=steps_per_epoch,
validation_data=validation_generator,
validation_steps=validation_steps,
callbacks=callbacks,
epochs=epochs,
verbose=2) # Logs once per epoch.
# Print results.
history = history.history
print('Validation accuracy: {acc}, loss: {loss}'.format(
acc=history['val_acc'][-1], loss=history['val_loss'][-1]))
# Save model.
model.save('amazon_reviews_sepcnn_model.h5')
return history['val_acc'][-1], history['val_loss'][-1]
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--data_dir', type=str, default='./data',
help='input data directory')
FLAGS, unparsed = parser.parse_known_args()
# Using the Amazon reviews dataset to demonstrate training of
# sequence model with batches of data.
data = load_data.load_amazon_reviews_sentiment_analysis_dataset(
FLAGS.data_dir)
batch_train_sequence_model(data)
| apache-2.0 | -3,906,409,651,647,072,000 | 37.176796 | 79 | 0.607236 | false |
IanDCarroll/xox | tests/test_tactical_cortex_slot.py | 1 | 5679 | import unittest
from Training.tactical_3x3_lobe_slot import *
class Tactical3x3LobeTestCase(unittest.TestCase):
def setUp(self):
self.lobe = TacticalLobe()
self.ana_f = [0,0,0,0,0,0,0,0]
self.ana_0 = [2,0,0,0,0,0,0,0]
self.ana_1 = [0,2,0,0,0,0,0,0]
self.ana_2 = [0,0,2,0,0,0,0,0]
self.ana_3 = [0,0,0,2,0,0,0,0]
self.ana_4 = [0,0,0,0,2,0,0,0]
self.ana_5 = [0,0,0,0,0,2,0,0]
self.ana_6 = [0,0,0,0,0,0,2,0]
self.ana_7 = [0,0,0,0,0,0,0,2]
self.opt_f = [ 0,1,2,
3,4,5,
6,7,8 ]
self.row_0 = [ 2,
3,4,5,
6,7,8 ]
self.row_1 = [ 0,1,2,
5,
6,7,8 ]
self.row_2 = [ 0,1,2,
3,4,5,
7 ]
self.col_3 = [ 1,2,
3,4,5,
7,8 ]
self.col_4 = [ 0,1,2,
3, 5,
6, 8 ]
self.col_5 = [ 0,1,
3,4,
6,7,8 ]
self.diag6 = [ 0,1,2,
3, 5,
6,7, ]
self.diag7 = [ 0,1,
3, 5,
6,7,8 ]
self.expected = [False, 2,5,7, 3,1,8, 0,6]
self.empty_board = { 'board': [0,0,0, 0,0,0, 0,0,0],
'analysis': [0,0,0, 0,0,0, 0,0] }
self.fork_with_6a = { 'board': [1,10,0, 0,1,0, 0,0,10] }
self.fork_with_6b = { 'board': [10,0,0, 0,1,10, 0,0,1] }
self.fork_with_8a = { 'board': [0,10,1, 0,1,0, 10,0,0] }
self.fork_with_8b = { 'board': [0,0,10, 10,1,0, 1,0,0] }
self.avoid_fork_1 = { 'analysis': [1,10,1, 1,10,1, 12,10] }
self.avoid_fork_2 = { 'analysis': [1,10,1, 1,10,1, 10,12] }
def test_take_win_chance(self):
dic_f = { 'analysis': self.ana_f,'options': self.opt_f,
'marker_code': 1 }
dic_0 = { 'analysis': self.ana_0,'options': self.row_0,
'marker_code': 1 }
dic_1 = { 'analysis': self.ana_1,'options': self.row_1,
'marker_code': 1 }
dic_2 = { 'analysis': self.ana_2,'options': self.row_2,
'marker_code': 1 }
dic_3 = { 'analysis': self.ana_3,'options': self.col_3,
'marker_code': 1 }
dic_4 = { 'analysis': self.ana_4,'options': self.col_4,
'marker_code': 1 }
dic_5 = { 'analysis': self.ana_5,'options': self.col_5,
'marker_code': 1 }
dic_6 = { 'analysis': self.ana_6,'options': self.diag6,
'marker_code': 1 }
dic_7 = { 'analysis': self.ana_7,'options': self.diag7,
'marker_code': 1 }
tf = self.lobe.take_win_chance(dic_f)
t0 = self.lobe.take_win_chance(dic_0)
t1 = self.lobe.take_win_chance(dic_1)
t2 = self.lobe.take_win_chance(dic_2)
t3 = self.lobe.take_win_chance(dic_3)
t4 = self.lobe.take_win_chance(dic_4)
t5 = self.lobe.take_win_chance(dic_5)
t6 = self.lobe.take_win_chance(dic_6)
t7 = self.lobe.take_win_chance(dic_7)
test_yields = [tf, t0,t1,t2, t3,t4,t5, t6,t7]
self.assertEqual(test_yields, self.expected)
def test_avoid_losing(self):
dic_f = { 'analysis': self.ana_f,'options': self.opt_f,
'enemy_code': 1 }
dic_0 = { 'analysis': self.ana_0,'options': self.row_0,
'enemy_code': 1 }
dic_1 = { 'analysis': self.ana_1,'options': self.row_1,
'enemy_code': 1 }
dic_2 = { 'analysis': self.ana_2,'options': self.row_2,
'enemy_code': 1 }
dic_3 = { 'analysis': self.ana_3,'options': self.col_3,
'enemy_code': 1 }
dic_4 = { 'analysis': self.ana_4,'options': self.col_4,
'enemy_code': 1 }
dic_5 = { 'analysis': self.ana_5,'options': self.col_5,
'enemy_code': 1 }
dic_6 = { 'analysis': self.ana_6,'options': self.diag6,
'enemy_code': 1 }
dic_7 = { 'analysis': self.ana_7,'options': self.diag7,
'enemy_code': 1 }
tf = self.lobe.avoid_losing(dic_f)
t0 = self.lobe.avoid_losing(dic_0)
t1 = self.lobe.avoid_losing(dic_1)
t2 = self.lobe.avoid_losing(dic_2)
t3 = self.lobe.avoid_losing(dic_3)
t4 = self.lobe.avoid_losing(dic_4)
t5 = self.lobe.avoid_losing(dic_5)
t6 = self.lobe.avoid_losing(dic_6)
t7 = self.lobe.avoid_losing(dic_7)
test_yields = [tf, t0,t1,t2, t3,t4,t5, t6,t7]
self.assertEqual(test_yields, self.expected)
def test_take_fork_chance(self):
test_f = self.lobe.take_fork_chance(self.empty_board)
test_6a = self.lobe.take_fork_chance(self.fork_with_6a)
test_6b = self.lobe.take_fork_chance(self.fork_with_6b)
test_8a = self.lobe.take_fork_chance(self.fork_with_8a)
test_8b = self.lobe.take_fork_chance(self.fork_with_8b)
self.assertEqual(test_f, False)
self.assertEqual(test_6a, 6)
self.assertEqual(test_6b, 6)
self.assertEqual(test_8a, 8)
self.assertEqual(test_8b, 8)
def test_avoid_fork(self):
test_f = self.lobe.avoid_fork(self.empty_board)
test_1 = self.lobe.avoid_fork(self.avoid_fork_1)
test_2 = self.lobe.avoid_fork(self.avoid_fork_2)
self.assertEqual(test_f, False)
self.assertEqual(test_1, 1)
self.assertEqual(test_2, 1)
| mit | -2,480,244,237,276,248,600 | 38.165517 | 67 | 0.47121 | false |
TelematicaUSM/EduRT | src/exceptions.py | 2 | 2179 | # -*- coding: UTF-8 -*-
# COPYRIGHT (c) 2016 Cristóbal Ganter
#
# GNU AFFERO GENERAL PUBLIC LICENSE
# Version 3, 19 November 2007
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
class NotDictError(TypeError):
"""Raise when an object is not an instance of dict.
.. automethod:: __init__
"""
def __init__(self, name, *args):
"""Initialize a new NotDictError.
:param str name:
Name of the object that is not a dictionary.
"""
super().__init__(
'{} is not a dictionary.'.format(name),
*args
)
class NotStringError(TypeError):
"""Raise when an object is not an instance of str.
.. automethod:: __init__
"""
def __init__(self, name, *args):
"""Initialize a new NotStringError.
:param str name:
Name of the object that is not a string.
"""
super().__init__(
'{} is not a string.'.format(name),
*args
)
class MissingFieldError(ValueError):
"""Raise when a dict doesn't have a required field.
.. automethod:: __init__
"""
def __init__(self, dict_name, field_name, *args):
"""Initialize a new MissingFieldError.
:param str dict_name:
The name of the dictionary.
:param str field_name:
The name of the field that is missing in the
dictionary.
"""
super().__init__(
"{} should have a '{}' field.".format(
dict_name, field_name),
*args
)
| agpl-3.0 | -3,886,322,217,009,293,300 | 26.923077 | 74 | 0.599633 | false |
jchampio/apache-websocket | test/present.py | 1 | 3920 | #! /usr/bin/env python
#
# Presents the results of an Autobahn TestSuite run in TAP format.
#
# Copyright 2015 Jacob Champion
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from __future__ import print_function
from distutils.version import StrictVersion
import json
import os.path
import sys
import textwrap
import yamlish
def filter_report(report):
"""Filters a test report dict down to only the interesting keys."""
INTERESTING_KEYS = [
'behavior',
'behaviorClose',
'expected',
'received',
'expectedClose',
'remoteCloseCode'
]
return { key: report[key] for key in INTERESTING_KEYS }
def prepare_description(report):
"""Constructs a description from a test report."""
raw = report['description']
# Wrap to at most 80 characters.
wrapped = textwrap.wrap(raw, 80)
description = wrapped[0]
if len(wrapped) > 1:
# If the text is longer than one line, add an ellipsis.
description += '...'
return description
#
# MAIN
#
# Read the index.
results_dir = 'test-results'
with open(os.path.join(results_dir, 'index.json'), 'r') as index_file:
index = json.load(index_file)['AutobahnPython']
# Sort the tests by numeric ID so we print them in a sane order.
test_ids = index.keys()
test_ids.sort(key=StrictVersion)
# Print the TAP header.
print('TAP version 13')
print('1..{0!s}'.format(len(test_ids)))
count = 0
skipped_count = 0
failed_count = 0
for test_id in test_ids:
count += 1
passed = True
skipped = False
report = None
result = index[test_id]
# Try to get additional information from this test's report file.
try:
path = os.path.join(results_dir, result['reportfile'])
with open(path, 'r') as f:
report = json.load(f)
description = prepare_description(report)
except Exception as e:
description = '[could not load report file: {0!s}]'.format(e)
test_result = result['behavior']
close_result = result['behaviorClose']
# Interpret the result for this test.
if test_result != 'OK' and test_result != 'INFORMATIONAL':
if test_result == 'UNIMPLEMENTED':
skipped = True
else:
passed = False
elif close_result != 'OK' and close_result != 'INFORMATIONAL':
passed = False
# Print the TAP result.
print(u'{0} {1} - [{2}] {3}{4}'.format('ok' if passed else 'not ok',
count,
test_id,
description,
' # SKIP unimplemented' if skipped
else ''))
# Print a YAMLish diagnostic for failed tests.
if report and not passed:
output = filter_report(report)
diagnostic = yamlish.dumps(output)
for line in diagnostic.splitlines():
print(' ' + line)
if not passed:
failed_count += 1
if skipped:
skipped_count += 1
# Print a final result.
print('# Autobahn|TestSuite {0}'.format('PASSED' if not failed_count else 'FAILED'))
print('# total {0}'.format(count))
print('# passed {0}'.format(count - failed_count - skipped_count))
print('# skipped {0}'.format(skipped_count))
print('# failed {0}'.format(failed_count))
exit(0 if not failed_count else 1)
| apache-2.0 | -2,418,566,364,146,643,000 | 27.823529 | 84 | 0.616071 | false |
jwmatthews/cloud_forms_and_rhev_scripts | list_ips_of_vms.py | 1 | 1405 | #! /usr/bin/env python
import os
import sys
try:
from ovirtsdk.api import API
from ovirtsdk.xml import params
except:
print "Please re-run after you have installed 'ovirt-engine-sdk-python'"
print "Example: easy_install ovirt-engine-sdk-python"
sys.exit()
ENV_IP = "OVIRT_IP"
ENV_USERNAME = "OVIRT_USERNAME"
ENV_PASSWORD = "OVIRT_PASSWORD"
def get_all_vms(api):
return api.vms.list()
def print_all_vms(api):
vms = get_all_vms(api)
for vm in vms:
print "Name: %s, IP: %s" % (vm.name, get_guest_ip(vm))
def get_guest_ip(vm):
info = vm.get_guest_info()
if info is None:
return None
return info.get_ips().get_ip()[0].get_address()
if __name__ == "__main__":
for env_var in [ENV_IP, ENV_USERNAME, ENV_PASSWORD]:
if env_var not in os.environ:
print "Please re-run after you have set an environment variable for '%s'" % (env_var)
sys.exit()
ip = os.environ[ENV_IP]
password = os.environ[ENV_PASSWORD]
username = os.environ[ENV_USERNAME]
url = "https://%s" % (ip)
api = API(url=url, username=username, password=password, insecure=True)
if not api:
print "Failed to connect to '%s'" % (url)
sys.exit()
print_all_vms(api)
vms2 = api.vms.list(query='name=CloudForms_JWM')
if vms2:
vm = vms2[0]
print vm.name
print get_guest_ip(vm)
| gpl-2.0 | 3,157,357,523,736,450,000 | 24.545455 | 97 | 0.6121 | false |
ppp2006/runbot_number0 | neo_questions/iSmarthomeCtrl.py | 1 | 2532 | #!/usr/bin/env python
#-*- coding:utf-8 -*-
#Copyright (C) 2012-2013 Thecorpora Inc.
#
#This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later version.
#
#This program is distributed in the hope that it will be useful,but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
#
#You should have received a copy of the GNU General Public License along with this program; if not, write to the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
import rospy
import os
import subprocess
from questions import *
from qbo_smart_home_services.srv import *
def smarthomefenci(sentence,language):
seg_list=[]
seg_list1=jieba.cut(sentence)
for seg in seg_list1:
seg_list.append(seg)
for location in location_list:
if location in seg_list:
param_location = location
print param_location
for device in device_list:
if device in seg_list:
param_device = device
print param_device
for action in action_list:
if action in seg_list:
param_action = action
print param_action
if param_location!="" and param_device!="" and param_action!="":
print "param OK"
def smarthomectrl(sentence,language):
# call "/say" service to speak selected words
rospy.wait_for_service("/smart_home_set_host")
client_sethost = rospy.ServiceProxy("/smart_home_set_host", SetHost)
rospy.wait_for_service("/smart_home_single_ctrl")
client_singlectrl = rospy.ServiceProxy("/smart_home_single_ctrl", SingleCtrl)
# print "sentence:%s"%sentence
# print "language:%s"%language
# sentencelist=sentence.split(' ',2)
# print sentencelist[1]
# txtname=sentencelist[1]
smarthomefenci(sentence,language)
client_sethost("192.168.0.134")
# client_singlectrl("客厅", "吊灯左", "开")
client_singlectrl(param_location, param_device, param_action)
# client_speak("客厅吊灯开")
# rospy.wait_for_service("/qbo_face_recognition/train");
# service_facetrain = rospy.ServiceProxy('/qbo_face_recognition/train', Train)
# res = service_facetrain()
# rospy.loginfo(res.taught)
# return "你好%s,我已经认识你了"%personname
| lgpl-2.1 | 4,922,438,022,231,389,000 | 41.965517 | 240 | 0.696228 | false |
jiherrero4/spark | app_local.py | 1 | 13545 | #!/usr/bin/env python
# encoding: utf-8
import urllib
import json
import os
import requests
import sys
import webbrowser
import gspread
from oauth2client.service_account import ServiceAccountCredentials
from flask import Flask
from flask import request
from flask import make_response
from flask_restful import Resource, Api
from flaskext.mysql import MySQL
# Flask app should start in global layout
# Flask es un web framework, de forma que podemos programar acciones determinadas basadas
# en que tipo de mensaje web nos llega a nuestra aplicacion
#
#
app = Flask(__name__)
# Utilizamos labels para guardar el identificador de la sala de spark de casa sesión
# Sino lo sabemos vamos a buscarlo..
labels = [["f0b38c60-9a87-11e6-9343-85f91990429b",
"Y2lzY29zcGFyazovL3VzL1JPT00vM2I5OGI5NTMtMGQyNC0zZDY5LWIyNTMtNzkxNzljOWZkNTVj"]]
# Ahora vamos a definir que hacer si nuestra aplicacion recibe un webhook tipo POST
@app.route('/webhook', methods=['POST'])
def webhook():
req = request.get_json(silent=True, force=True)
print("PASO1: Request recibido de api.ai:")
# Con indent lo que hacemos es introducir espacios en el formato de salida
# de forma que se lea mejor, no simplemente un texto plano..
print(json.dumps(req, indent=4))
res = processRequest(req)
# Transformo res a un formato json tabulado.
res = json.dumps(res, indent=4)
# print(res)
# La respuesta tiene que ser tipo application/json
# La funcion make_response pertenece a la libreria de Flask
r = make_response(res)
r.headers['Content-Type'] = 'application/json'
return r
# En esta funcion vamos a procesar el mensaje que hemos recibido, webhook (post).
# Lo primero que vamos a buscar es la accion a realizar.
#
#
def processRequest(req):
dato = ""
# Datos de Acceso del Bot: Token del BOT
bot_token = "MDc0OWJkYjgtZWM4Yy00MzgyLThmNDAtNzQ2ZDliMmE1Y2VkMmE5ODM3OWQtMDQ1"
# Datos de Acceso de un moderador, me he puesto a mí por defecto. Es útil ya que el bot tiene ciertas limitaciones
# de acceso a datos (configuradas por seguridad por Cisco)
moderator_token = "YjI2NDhkMTYtYjkxMS00ZGYwLWIxNjQtYzQyYTIwOTVhNWI3NDU0YmY2OTYtZjYx"
if req.get("result").get("action") == "creaSala":
creaSalaSpark(moderator_token)
elif req.get("result").get("action") == "creaGrupo":
creaGrupoSpark()
elif req.get("result").get("action") == "llama":
llamaSala()
elif req.get("result").get("action") == "gestionado":
dato = leeExcel(req)
elif req.get("result").get("action") == "Inventario":
dato = leeInventario(req)
elif req.get("result").get("action") == "Ayuda":
dato = get_room_sessions_id(req, bot_token, moderator_token)
texto = help_definition()
status = post_message_markDown(dato, bot_token,texto)
dato = proporcionaAyuda(req)
elif req.get("result").get("action") == "InformacionSala":
dato = get_room_sessions_id(req,bot_token,moderator_token)
status = post_message(dato, bot_token, "probando")
print (status)
else:
return {}
res = makeWebhookResult(dato)
return res
######################################################################################################################
# Acciones desencadenadas de las peticiones de los clientes
# - Crear una sala.
# - Conseguir información de una base de datos.
# - Mostrar las opciones del asistente.
# - ...
######################################################################################################################
def creaSalaSpark(myToken):
print("funcion creaSalaSpark iniciado")
roomTitle = "PruebaCreacionSala"
headers = {"Authorization": "Bearer " + myToken, "Content-type": "application/json"}
# Define the action to be taken in the HTTP request
roomInfo = {"title": roomTitle}
# Execute HTTP POST request to create the Spark Room
r = requests.post("https://api.ciscospark.com/v1/rooms", headers=headers, json=roomInfo)
print("funcion creaSalaSpark completado")
room = r.json()
def creaGrupoSpark():
print("funcion creaGrupoSpark iniciado")
myToken = "YjI2NDhkMTYtYjkxMS00ZGYwLWIxNjQtYzQyYTIwOTVhNWI3NDU0YmY2OTYtZjYx"
# emailFile = userlist.txt
roomTitle = "Ojete" # second argument
# Read the email file and save the emails in an list
# emails = [line.strip() for line in open(emailFile)]
emails = ["[email protected]", "[email protected]", "[email protected]", "[email protected]",
"[email protected]"]
print("funcion creaGrupoSpark, paso2")
# Define header used for authentication
headers = {"Authorization": "Bearer " + myToken,
"Content-type": "application/json"}
# Define the action to be taken in the HTTP request
roomInfo = {"title": roomTitle}
# Execute HTTP POST request to create the Spark Room
r = requests.post("https://api.ciscospark.com/v1/rooms", headers=headers, json=roomInfo)
room = r.json()
# Print the result of the HTTP POST request
print(room)
for email in emails:
# if it's an blank line don't add:
if email == "": continue
# Set the HTTP request payload (action)
membershipInfo = {"roomId": room["id"],
"personEmail": email}
# Execute HTTP POST request to create the Spark Room
r = requests.post("https://api.ciscospark.com/v1/memberships",
headers=headers, json=membershipInfo)
membership = r.json()
print(membership)
print()
def llamaSala():
new = 2 # open in a new tab, if possible
# open a public URL, in this case, the webbrowser docs
# url = "http://expansion.es"
url = "https://pxdemo.ttrends.es/webapp/#/[email protected]"
webbrowser.open(url, new=new)
# Lee informacion de un archivo excel
def leeExcel(req):
# print ("vamos a leer el excel")
valorBuscado = ""
result = req.get("result")
parameters = result.get("parameters")
nombreCliente = parameters.get("Clientes")
tipoInformacion = parameters.get("detalle_de_servicios_gestionados")
scope = ['https://spreadsheets.google.com/feeds']
credentials = ServiceAccountCredentials.from_json_keyfile_name('My Project-e08df21666bc.json', scope)
gc = gspread.authorize(credentials)
wks = gc.open("prueba1")
worksheet = wks.worksheet("gestionados")
cliente = worksheet.find(nombreCliente)
servicio = worksheet.find(tipoInformacion)
column = cliente.col
row = servicio.row
# print("row: ",row, "column: ",column)
valorBuscado = worksheet.cell(row, column).value
print("valor Buscado: ", valorBuscado)
return valorBuscado
def leeInventario(req):
datos_inventario = parameters.get("datos_inventario")
######################################################################################################################
# Funciones sobre salas de Spark
# - Conseguir identificadores de sala
# - Leer mensajes de las salas
# - ...
######################################################################################################################
# El objetivo de esta función es asociar el número de la sesión que nos envía api.ai
# con el identificador de sala de spark (que no envía api.ai)
# Mapeando el id de la sesión con el id de la sala el envio de mensajes a la sala
# puede ser directo y más eficiente.
def get_room_sessions_id(req,bot_token,moderator_token):
sessionId = req.get("sessionId")
for c in range(len(labels)):
if (labels[c][0] == sessionId):
print("ya dispongo del identificador de la sala, lo envio...")
return labels[c][1]
else:
roomId = informacionSala(req,bot_token,moderator_token)
labels.append([sessionId,roomId])
print("Anadiendo un nuevo identificador de sesion: ", sessionId, "-> con roomId: ",roomId)
return roomId
def informacionSala(req,bot_token,moderator_token):
identificador_sala = get_bot_room_id(req,bot_token,moderator_token)
print ("el identificador de esta sala es: ", identificador_sala)
return identificador_sala
def proporcionaAyuda(req):
ayuda = "Esto es una \n prueba"
return ayuda
def get_bot_room_id(req,bot_token,moderator_token):
result = req.get("result")
ultima_peticion= result.get("resolvedQuery")
identificador_sala = get_rooms(ultima_peticion,bot_token,moderator_token)
return identificador_sala
def get_rooms(ultima_peticion,bot_token,moderator_token):
header = {'Authorization': "Bearer "+ bot_token, 'content-type': 'application/json'}
result = requests.get(url='https://api.ciscospark.com/v1/rooms', headers=header)
JSONresponse = result.json()
roomlist_array = []
for EachRoom in JSONresponse['items']:
roomlist_array.append(EachRoom.get('title') + ' ** ' + EachRoom.get('id'))
last_message = get_last_message(EachRoom.get('id'),bot_token,moderator_token)
print("Last Message:", last_message)
if (last_message.__contains__(ultima_peticion)):
return EachRoom.get('id')
return "sala no encontrada"
#print("Rooms:", roomlist_array)
def get_last_message(roomid,bot_token,moderator_token):
num_mensajes = 2
header = {'Authorization': "Bearer "+ bot_token, 'content-type': 'application/json'}
payload = {'roomId': roomid, 'max': num_mensajes}
result = requests.get(url='https://api.ciscospark.com/v1/messages', headers=header,params=payload)
# en caso de fallo en el acceso al último mensaje, es que es una sala grupal, y el bot no tiene permisos para conseguir los mensajes
# tendrá que ser un moderador (no un bot) que este presente en la sala grupal para acceder a los mensajes
if result.status_code != 200:
header = {'Authorization': "Bearer " + moderator_token , 'content-type': 'application/json'}
payload = {'roomId': roomid, 'max': num_mensajes}
result = requests.get(url='https://api.ciscospark.com/v1/messages', headers=header, params=payload)
# si vuelve a fallar, entonces no podemos conseguir la información y por tanto el id de la sala...
if result.status_code != 200:
return ""
JSONresponse = result.json()
messagelist_array = []
#print (JSONresponse)
for EachMessage in JSONresponse['items']:
messagelist_array.append(EachMessage.get('text'))
#print("Messages:",messagelist_array)
return messagelist_array[0]
def get_session_id(req):
session_id = req.get("sessionId")
return session_id
def post_message(roomid,bot_token,text):
header = {'Authorization': "Bearer " + bot_token, 'content-type': 'application/json'}
payload = {'roomId': roomid, 'text': text}
print("RoomId:", roomid)
print("Bottoken: ", bot_token)
result = requests.post(url='https://api.ciscospark.com/v1/messages', headers=header, json=payload)
# en caso de fallo en el acceso al último mensaje, es que es una sala grupal, y el bot no tiene permisos para conseguir los mensajes
# tendrá que ser un moderador (no un bot) que este presente en la sala grupal para acceder a los mensajes
if result.status_code != 200:
return result.json()
print ("RoomId:",roomid)
print ("Bottoken: ", bot_token)
else:
return "mensaje enviado correctamente..."
def post_message_markDown(roomid,bot_token,markdown):
header = {'Authorization': "Bearer " + bot_token, 'content-type': 'application/json'}
payload = {'roomId': roomid, 'markdown': markdown}
print("RoomId:", roomid)
print("Bottoken: ", bot_token)
result = requests.post(url='https://api.ciscospark.com/v1/messages', headers=header, json=payload)
# en caso de fallo en el acceso al último mensaje, es que es una sala grupal, y el bot no tiene permisos para conseguir los mensajes
# tendrá que ser un moderador (no un bot) que este presente en la sala grupal para acceder a los mensajes
if result.status_code != 200:
return result.json()
print ("RoomId:",roomid)
print ("Bottoken: ", bot_token)
else:
return "mensaje enviado correctamente..."
######################################################################################################################
# Definicion de opciones y dialogos con los clientes
# - Mensaje de ayuda
# - Mensaje por defecto en caso de no encontrar la respuesta.
######################################################################################################################
# Definición de las opciones de ayuda.
def help_definition():
text = "Hola, soy Andy! \nEstos son los temas sobre los que te puedo ayudar: \n 1. **Informes de estadisticas.**\n 2. **Informacion de inventario** \n 3. **Actas de reuniones**\n 4. **Soporte Techno Trends**"
return text
def makeWebhookResult(data):
# print ("preparando el mensaje de vuelta")
if data is None or data == "":
speech = "no he encontrado lo que me pides, por favor especifica mas tu peticion..."
else:
speech = data
print("Response:")
print(speech)
return {
"speech": speech,
"displayText": speech,
"source": "from spark"
}
if __name__ == '__main__':
port = int(os.getenv('PORT', 5000))
print("Starting app on port %d" % port)
app.run(debug=False, port=port, host='0.0.0.0')
| apache-2.0 | 2,362,968,916,008,415,000 | 33.771208 | 212 | 0.640766 | false |
wxgeo/geophar | wxgeometrie/modules/cryptographie/__init__.py | 1 | 12975 | # -*- coding: utf-8 -*-
##--------------------------------------#######
# Cryptographie #
##--------------------------------------#######
# WxGeometrie
# Dynamic geometry, graph plotter, and more for french mathematic teachers.
# Copyright (C) 2005-2013 Nicolas Pourcelot
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
from string import ascii_uppercase as majuscules
from functools import partial
from random import shuffle
import re
from PyQt5.QtWidgets import QVBoxLayout, QInputDialog, QPushButton,\
QTextEdit, QGridLayout, QLabel, QLineEdit, QSpacerItem
from PyQt5.QtCore import Qt, QTimer
from ...GUI.menu import MenuBar
from ...GUI.panel import Panel_simple
from ...pylib import print_error
#~ from ... import param
dict_accents = {
"é": "E",
"É": "E",
"ê": "E",
"Ê": "E",
"è": "E",
"È": "E",
"à": "A",
"À": "A",
"â": "A",
"Â": "A",
"ô": "O",
"Ô": "O",
"î": "I",
"Î": "I",
"ù": "U",
"Ù": "U",
"û": "U",
"Û": "U",
"ç": "C",
"Ç": "C",
}
class CaseLettre(QLineEdit):
def __init__(self, parent):
self.parent = parent
QLineEdit.__init__(self, parent)
self.setAlignment(Qt.AlignCenter)
def keyPressEvent(self, evt):
self.parent.message('')
n = evt.key()
if 65 <= n <= 90 or 97 <= n <= 122:
c = chr(n).upper()
for case in self.parent.cases.values():
if case.text() == c:
self.parent.message('La lettre %s est déjà utilisée !' %c)
return
self.setText(c)
elif n in (Qt.Key_Backspace, Qt.Key_Delete):
self.clear()
##QLineEdit.keyPressEvent(self, evt)
class CryptographieMenuBar(MenuBar):
def __init__(self, panel):
MenuBar.__init__(self, panel)
self.ajouter("Fichier", ["quitter"])
self.ajouter("Affichage", ["onglet"], ["plein_ecran"])
self.ajouter("Outils",
["Coder un message", "Code le message par substitution mono-alphabétique.",
"Ctrl+K", panel.coder],
["Coder avec espaces", "Code le message en conservant les espaces (substitution mono-alphabétique).",
"Ctrl+Shift+K", partial(panel.coder, espaces=True)],
["Générer une nouvelle clé", "Générer une nouvelle permutation de l'alphabet.", None, panel.generer_cle],
["Modifier la clé", "Générer une nouvelle permutation de l'alphabet.", None, panel.DlgModifierCle],
None,
["Coder avec Vigenère", "Codage par la méthode de Vigenère (substitution poly-alphabétique).",
None, partial(panel.coder_vigenere, ask=True)],
None,
["options"])
self.ajouter("avance2")
self.ajouter("?")
class Cryptographie(Panel_simple):
titre = "Cryptographie" # Donner un titre à chaque module
def __init__(self, *args, **kw):
Panel_simple.__init__(self, *args, **kw)
self._freeze = False
self.widget_modifie = None
# La clé est la permutation de l'alphabet actuellement utilisée
# pour le codage par substitution mono-alphabétique.
self.generer_cle()
# La clé de chiffrement pour le codage par substitution poly-alphabétique
# (appelé aussi chiffre de Vigenère).
self.cle_vigenere = 'EXEMPLE'
# Signe indiquant un caractère non déchiffré
self.symbole = '-' # '.'
self.sizer = QVBoxLayout()
self.textes = QGridLayout()
self.textes.setSpacing(5)
size = (400, 300)
txt_clair = QLabel("<b>Texte en clair</b>")
self.clair = QTextEdit()
self.clair.setMinimumSize(*size)
formater_clair = partial(self.formater, widget=self.clair)
self.clair.textChanged.connect(formater_clair)
self.clair.cursorPositionChanged.connect(formater_clair)
self.copier_clair = QPushButton('Copier le texte en clair')
self.copier_clair.clicked.connect(partial(self.copier, widget=self.clair))
txt_code = QLabel("<b>Texte codé</b>")
self.code = QTextEdit()
self.code.setMinimumSize(*size)
self.code.textChanged.connect(self.code_modifie)
self.code.cursorPositionChanged.connect(partial(self.formater, widget=self.code))
self.copier_code = QPushButton('Copier le texte codé')
self.copier_code.clicked.connect(partial(self.copier, widget=self.code))
self.textes.addWidget(txt_clair, 0, 0)
self.textes.addItem(QSpacerItem(50, 1), 0, 1)
self.textes.addWidget(txt_code, 0, 2)
self.textes.addWidget(self.clair, 1, 0)
self.textes.addWidget(self.code, 1, 2)
self.textes.addWidget(self.copier_code, 2, 2)
self.textes.addWidget(self.copier_clair, 2, 0)
self.table = QGridLayout()
self.table.setSpacing(3)
self.cases = {}
self.table.addWidget(QLabel("Codé : ", self), 0, 0)
self.table.addWidget(QLabel("Clair : ", self), 1, 0)
##self.table.setColumnStretch(0, 100)
for i, l in enumerate(majuscules):
lettre = QLineEdit(l, self)
lettre.setAlignment(Qt.AlignCenter)
lettre.setReadOnly(True)
lettre.setEnabled(False)
self.table.addWidget(lettre, 0, i + 1)
##self.table.setColumnStretch(i + 1, 1)
for i, l in enumerate(majuscules):
c = self.cases[l] = CaseLettre(self)
c.setMaxLength(1)
self.table.addWidget(c, 1, i + 1)
c.textChanged.connect(self.decoder)
self.sizer.addLayout(self.textes)
self.sizer.addLayout(self.table)
self.setLayout(self.sizer)
##self.adjustSize()
self.couleur1 = "5A28BE" # sky blue
self.couleur2 = "C86400" # Lime Green
self.couleur_position = "FFCDB3"
self.reg = re.compile("([-A-Za-z]|<##>|</##>)+")
##couleur_position = wx.Color(255, 205, 179) # FFCDB3
##couleur1 = wx.Color(90, 40, 190) # 5A28BE
##couleur2 = wx.Color(200, 100, 0) # C86400
##black = wx.Color(0, 0, 0) # 000000
##white = wx.Color(255, 255, 255) # FFFFFF
##self.special = wx.TextAttr(wx.NullColour, couleur_position)
##self.fond = wx.TextAttr(couleur1, wx.NullColour) #"sky blue"
##self.fond2 = wx.TextAttr(couleur2, wx.NullColour) # "Lime Green"
##self.defaut = wx.TextAttr(black, white)
##
##self.Bind(wx.EVT_IDLE, self.OnIdle)
timer = QTimer(self)
timer.timeout.connect(self.OnIdle)
timer.start(100)
# DEBUG:
##self.code.setPlainText('WR IRAMXPZRHRDZ IK HRYYOVR AL IRYYBKY RYZ NOALWLZR POM WR NOLZ FKR W BD O VOMIR WRY YLVDRY IR PBDAZKOZLBD RZ WRY RYPOARY RDZMR WRY HBZY OWBMY FKR I QOELZKIR BD VMBKPR WRY WRZZMRY ALDF POM ALDF')
def copier(self, evt=None, widget=None):
self.vers_presse_papier(widget.toPlainText())
def DlgModifierCle(self, evt=None):
while True:
text, ok = QInputDialog.getText(self, "Modifier la clé",
"La clé doit être une permutation de l'alphabet,\n"
"ou un chiffre qui indique de combien l'alphabet est décalé.",
text=str(self.cle))
if ok:
try:
self.modifier_cle(text)
except:
print_error()
continue
break
def generer_cle(self):
l = list(majuscules)
shuffle(l)
self.cle = ''.join(l)
def modifier_cle(self, cle):
cle = cle.strip().upper()
if cle.isdigit():
n = int(cle)
cle = majuscules[n:] + majuscules[:n]
# On teste qu'il s'agit bien d'une permutation de l'alphabet:
assert ''.join(sorted(cle)) == majuscules
self.cle = cle
def coder(self, evt=None, cle=None, espaces=False):
cle = (self.cle if cle is None else cle)
clair = self.clair.toPlainText().upper()
for key, val in dict_accents.items():
clair = clair.replace(key, val)
d = dict(zip(majuscules, cle))
code = ''.join(d.get(s, ' ') for s in clair)
code = re.sub(' +', ' ', code)
if not espaces:
code = code.replace(' ', '')
self.code.setPlainText(code)
return code
@staticmethod
def _vigenere(l1, l2):
return chr((ord(l1) + ord(l2) - 130)%26 + 65)
def coder_vigenere(self, evt=None, msg=None, cle=None, ask=False):
def gen():
length = len(cle)
n = 0
for car in clair:
if car.isalpha():
yield self._vigenere(cle[n%length], car)
n += 1
else:
yield car
if ask:
self.DlgModifierCleVigenere()
if cle is None:
cle = self.cle_vigenere
if msg is None:
msg = self.clair.toPlainText()
msg = msg.upper()
if cle is None:
pass
# Pour l'instant, les espaces ne sont pas supportés
clair = msg.replace(' ', '')
clair = self.clair.toPlainText().upper()
for key, val in dict_accents.items():
clair = clair.replace(key, val)
code = ''.join(gen())
self.code.setPlainText(code)
return code
def DlgModifierCleVigenere(self, evt=None):
while True:
text, ok = QInputDialog.getText(self, "Modifier la clé pour Vigenère",
"La clé doit contenir uniquement des lettres.",
text=self.cle_vigenere)
if ok:
text = text.strip()
if not text.isalpha():
continue
self.cle_vigenere = text.upper()
break
def decoder(self, txt=None):
code = self.code.toPlainText().upper()
def f(s):
if s in majuscules:
return self.cases[s].text() or self.symbole
return s
clair = ''.join(f(s) for s in code)
self.clair.setPlainText(clair)
def code_modifie(self, txt=None):
self.decoder(txt)
self.formater(txt, widget=self.code)
def formater(self, evt=None, widget=None):
##evt.Skip()
if self._freeze:
return
self.widget_modifie = widget
def _formater(self, widget_modifie):
# Impossible de formater les 2 textes de la même manière s'ils
# ne sont pas de la même longueur.
# Cela ne devrait se produire que temporairement (par ex.,
# l'utilisateur copie un nouveau texte)
if len(self.code.toPlainText()) != len(self.clair.toPlainText()):
if self.code.toPlainText() and self.clair.toPlainText():
print('Warning: le message codé et le message en clair ne sont '
'pas de même longueur.')
return
def colorier(m, col1=[self.couleur1], col2=[self.couleur2]):
s = m.group(0)
s = "<font color='#%s'>%s</font>" % (col1[0], s)
col1[0], col2[0] = col2[0], col1[0]
return s
self._freeze = True
pos = widget_modifie.textCursor().position()
for w in (self.code, self.clair):
txt = w.toPlainText()
if pos != len(txt):
txt = txt[:pos] + '<##>' + txt[pos] + '</##>' + txt[pos + 1:]
new_txt = re.sub(self.reg, colorier, txt)
new_txt = new_txt.replace("<##>",
"<font style='background-color: #%s;'>" % self.couleur_position)
new_txt = new_txt.replace("</##>", "</font>")
w.setHtml(new_txt)
cursor = widget_modifie.textCursor()
cursor.setPosition(pos)
widget_modifie.setTextCursor(cursor)
self._freeze = False
self.widget_modifie = None
def OnIdle(self, evt=None):
if self.widget_modifie is not None and not self.parent.parent.closing:
self._formater(self.widget_modifie)
| gpl-2.0 | 2,154,801,898,003,441,000 | 35.061453 | 228 | 0.562974 | false |
redtoad/python-amazon-product-api | tests/test_utils.py | 1 | 3255 | import os
import pytest
try:
from types import TypeType
except ImportError:
TypeType = type
from amazonproduct import utils
from amazonproduct.processors import etree, minidom
def test_load_global_file_config(configfiles):
configfiles.add_file('''
[Credentials]
access_key = ABCDEFGH12345
secret_key = zhgsdds8''', path='/etc/amazon-product-api.cfg')
cfg = utils.load_file_config()
assert cfg['access_key'] == 'ABCDEFGH12345'
assert cfg['secret_key'] == 'zhgsdds8'
assert len(cfg) == 2
def test_load_local_file_config(configfiles):
configfiles.add_file('''
[Credentials]
access_key = ABCDEFGH12345
secret_key = zhgsdds8''', path='~/.amazon-product-api')
cfg = utils.load_file_config()
assert cfg['access_key'] == 'ABCDEFGH12345'
assert cfg['secret_key'] == 'zhgsdds8'
assert len(cfg) == 2
def test_load_environment_config(monkeypatch):
monkeypatch.setenv('AWS_ACCESS_KEY', 'ABCDEFGH12345')
monkeypatch.setenv('AWS_SECRET_ACCESS_KEY', 'zhgsdds8')
monkeypatch.setenv('AWS_LOCALE', 'uk')
cfg = utils.load_environment_config()
assert cfg['access_key'] == 'ABCDEFGH12345'
assert cfg['secret_key'] == 'zhgsdds8'
assert cfg['locale'] == 'uk'
assert 'associate_tag' not in cfg
DUMMY_CONFIG = """
# file: /etc/amazon-product-api.cfg
[Credentials]
access_key = global cfg value
secret_key = global cfg value
# file: ~/.amazon-product-api
[Credentials]
secret_key = local cfg value
locale = de
# file: ~/my-config
[Credentials]
secret_key = CUSTOM CONFIG OVERRIDES ALL!
"""
def test_load_config(configfiles, monkeypatch):
configfiles.load_from_string(DUMMY_CONFIG)
for key in ['AWS_ACCESS_KEY', 'AWS_SECRET_ACCESS_KEY', 'AWS_ASSOCIATE_TAG', 'AWS_LOCALE']:
monkeypatch.delenv(key, raising=False)
monkeypatch.setenv('AWS_LOCALE', 'OS VARIABLE')
cfg = utils.load_config()
assert set(cfg.keys()) == set([
'access_key', 'secret_key', 'associate_tag', 'locale'])
assert cfg['access_key'] == 'global cfg value'
assert cfg['secret_key'] == 'local cfg value'
assert cfg['associate_tag'] is None
assert cfg['locale'] == 'OS VARIABLE'
def test_specific_config_file_overrides_all_but_os_variables(configfiles, monkeypatch):
configfiles.load_from_string(DUMMY_CONFIG)
for key in ['AWS_ACCESS_KEY', 'AWS_SECRET_ACCESS_KEY', 'AWS_ASSOCIATE_TAG', 'AWS_LOCALE']:
monkeypatch.delenv(key, raising=False)
monkeypatch.setenv('AWS_LOCALE', 'OS VARIABLE')
path = configfiles.tmpdir.join(os.path.expanduser('~/my-config')).strpath
cfg = utils.load_config(path)
assert set(cfg.keys()) == set([
'access_key', 'secret_key', 'associate_tag', 'locale'])
assert cfg['secret_key'] == 'CUSTOM CONFIG OVERRIDES ALL!'
assert cfg['access_key'] is None
assert cfg['associate_tag'] is None
assert cfg['locale'] == 'OS VARIABLE'
@pytest.mark.parametrize(('txt', 'cls'), [
('amazonproduct.processors.etree.Processor', etree.Processor),
('amazonproduct.processors.minidom.Processor', minidom.Processor),
])
def test_load_class(txt, cls):
loaded = utils.load_class(txt)
assert isinstance(loaded, TypeType)
assert loaded == cls | bsd-3-clause | -4,264,656,500,175,639,600 | 29.716981 | 94 | 0.671889 | false |
mabuchilab/QNET | tests/algebra/test_pauli_matrics.py | 1 | 2399 | """Test for PauliX, PauliY, PauliZ"""
from sympy import I
import pytest
from qnet import (
PauliX, PauliY, PauliZ, LocalSigma, LocalSpace, LocalProjector, SpinSpace)
def test_fock_pauli_matrices():
"""Test correctness of Pauli matrices on a Fock space"""
assert PauliX(1) == LocalSigma(0, 1, hs=1) + LocalSigma(1, 0, hs=1)
assert PauliX(1) == PauliX('1') == PauliX(LocalSpace('1'))
assert PauliY(1).expand() == (
-I * LocalSigma(0, 1, hs=1) + I * LocalSigma(1, 0, hs=1))
assert PauliY(1) == PauliY('1') == PauliY(LocalSpace('1'))
assert PauliZ(1) == LocalProjector(0, hs=1) - LocalProjector(1, hs=1)
assert PauliZ(1) == PauliZ('1') == PauliZ(LocalSpace('1'))
assert PauliX(1, states=(0, 2)) == (
LocalSigma(0, 2, hs=1) + LocalSigma(2, 0, hs=1))
hs = LocalSpace("1", basis=('g', 'e', 'r'))
assert PauliX(hs) == LocalSigma(0, 1, hs=hs) + LocalSigma(1, 0, hs=hs)
assert PauliX(hs) == PauliX(hs, states=('g', 'e'))
assert PauliY(hs).expand() == (
-I * LocalSigma(0, 1, hs=hs) + I * LocalSigma(1, 0, hs=hs))
assert PauliY(hs) == PauliY(hs, states=('g', 'e'))
assert PauliZ(hs) == LocalProjector(0, hs=hs) - LocalProjector(1, hs=hs)
assert PauliZ(hs) == PauliZ(hs, states=('g', 'e'))
assert PauliX(hs, states=(0, 2)) == (
LocalSigma('g', 'r', hs=hs) + LocalSigma('r', 'g', hs=hs))
assert PauliX(hs, states=(0, 2)) == PauliX(hs, states=('g', 'r'))
def test_spin_pauli_matrices():
"""Test correctness of Pauli matrices on a spin space"""
hs = SpinSpace("s", spin='1/2', basis=('down', 'up'))
assert PauliX(hs) == (
LocalSigma('down', 'up', hs=hs) + LocalSigma('up', 'down', hs=hs))
assert PauliX(hs) == PauliX(hs, states=('down', 'up'))
assert PauliY(hs).expand() == (
-I * LocalSigma('down', 'up', hs=hs) +
I * LocalSigma('up', 'down', hs=hs))
assert PauliY(hs) == PauliY(hs, states=('down', 'up'))
assert PauliZ(hs) == (
LocalProjector('down', hs=hs) - LocalProjector('up', hs=hs))
assert PauliZ(hs) == PauliZ(hs, states=('down', 'up'))
hs = SpinSpace("s", spin=1, basis=('-', '0', '+'))
with pytest.raises(TypeError):
PauliX(hs, states=(0, 2))
assert PauliX(hs, states=('-', '+')) == (
LocalSigma('-', '+', hs=hs) + LocalSigma('+', '-', hs=hs))
assert PauliX(hs) == PauliX(hs, states=('-', '0'))
| mit | -3,286,054,655,234,274,000 | 43.425926 | 78 | 0.56857 | false |
ingadhoc/odoo-kinesis-athletics | kinesis_athletics_x/evaluation_detail.py | 1 | 8124 | # -*- coding: utf-8 -*-
from openerp import models, fields, api, _
from openerp.exceptions import Warning
class evaluation_detail(models.Model):
""""""
_inherit = 'kinesis_athletics.evaluation_detail'
@api.multi
def get_test_description(self):
return {
'type': 'ir.actions.act_window',
'res_model': 'kinesis_athletics.test',
'view_mode': 'form',
'res_id': self.test_id.id,
'target': 'new'
}
# Lo sacamos total no interesa que se actualice en tiempo real y sacamos el
# store por un tema de performance
@api.depends(
'result',
# 'test_id',
# 'test_id.type',
# 'test_id.test_range_ids',
# 'test_id.test_range_ids.from_age',
# 'test_id.test_range_ids.to_age',
# 'test_id.test_range_ids.sex',
# 'test_id.test_range_ids.val_max',
# 'test_id.test_range_ids.val_min',
# 'test_id.test_range_ids.extreme_minimum',
# 'test_id.test_range_ids.extreme_maximum',
# 'evaluation_id',
# 'evaluation_id.is_template',
# 'evaluation_id.partner_id',
)
@api.one
def _get_state(self):
test = self.test_id
evaluation = self.evaluation_id
partner = self.evaluation_id.partner_id
result = self.result
state = False
if not evaluation.is_template and partner:
ref_min, ref_max, ref_ext_max, ref_ext_min = test._get_min_max(
test.id, partner.id)
if result > ref_max:
state = self.test_id.rating_over_maximum
if result < ref_min:
state = self.test_id.rating_below_minimum
if result >= ref_min and result <= ref_max:
state = 'ideal'
if ref_ext_min == ref_min and ref_max == ref_ext_max:
state = 'none'
self.state = state
# Lo sacamos total no interesa que se actualice en tiempo real y sacamos el
# store por un tema de performance
# @api.depends(
# 'test_id',
# 'test_id.type',
# 'test_id.test_range_ids',
# 'test_id.test_range_ids.from_age',
# 'test_id.test_range_ids.to_age',
# 'test_id.test_range_ids.sex',
# 'test_id.test_range_ids.val_max',
# 'test_id.test_range_ids.val_min',
# 'test_id.test_range_ids.extreme_minimum',
# 'test_id.test_range_ids.extreme_maximum',
# 'evaluation_id',
# 'evaluation_id.is_template',
# 'evaluation_id.partner_id',
# )
@api.one
def _get_age_avg(self):
test = self.test_id
evaluation = self.evaluation_id
partner = self.evaluation_id.partner_id
age_avg = False
if not evaluation.is_template and partner:
age_range = (partner.age, partner.age)
age_results = test._get_results(
test.id, sex=partner.sex, age_range=age_range)
age_avg = False
if age_results:
age_avg = sum(age_results) / len(age_results)
self.age_avg = age_avg
# Lo sacamos total no interesa que se actualice en tiempo real y sacamos el
# store por un tema de performance
# @api.depends(
# 'test_id',
# 'test_id.type',
# 'test_id.test_range_ids',
# 'test_id.test_range_ids.from_age',
# 'test_id.test_range_ids.to_age',
# 'test_id.test_range_ids.sex',
# 'test_id.test_range_ids.val_max',
# 'test_id.test_range_ids.val_min',
# 'test_id.test_range_ids.extreme_minimum',
# 'test_id.test_range_ids.extreme_maximum',
# 'evaluation_id',
# 'evaluation_id.partner_id',
# )
@api.one
def _get_plotbands_values(self):
test_ranges = self.env['kinesis_athletics.test_range']
test = self.test_id
partner = self.evaluation_id.partner_id
plotband_ext_min = False
plotband_val_min = False
plotband_val_max = False
plotband_ext_max = False
test_ranges = test_ranges.search(
[('test_id', '=', test.id)])
if test_ranges and partner:
plotband_val_min, plotband_val_max, plotband_ext_max, plotband_ext_min = test._get_min_max(
test.id, partner.id)
self.plotband_val_min = format(plotband_val_min, '.2f')
self.plotband_val_max = format(plotband_val_max, '.2f')
self.plotband_ext_max = format(plotband_ext_max, '.2f')
self.plotband_ext_min = format(plotband_ext_min, '.2f')
partner_id = fields.Many2one(
'res.partner',
'Partner',
related='evaluation_id.partner_id',
copy=False,
readonly=True,
store=True)
uom_id = fields.Many2one(
'product.uom',
'Unit',
related='test_id.uom_id',
copy=False,
readonly=True)
age_avg = fields.Float(
compute='_get_age_avg',
string='Age Average',)
plotband_ext_min = fields.Float(
compute='_get_plotbands_values',
# store=True,
string='ext_min',)
plotband_val_min = fields.Float(
compute='_get_plotbands_values',
# store=True,
string="val_min",)
plotband_val_max = fields.Float(
compute='_get_plotbands_values',
# store=True,
string="val_max",)
plotband_ext_max = fields.Float(
compute='_get_plotbands_values',
# store=True,
string="ext_max",)
rating_below_minimum = fields.Selection(
related='test_id.rating_below_minimum',
copy=False,
readonly=True,
string='rating_below_minimum')
rating_between = fields.Selection(
related='test_id.rating_between',
copy=False,
readonly=True,
string='rating_between')
rating_over_maximum = fields.Selection(
related='test_id.rating_over_maximum',
readonly=True,
string='rating_over_maximum')
state = fields.Selection(
[('alert', 'Alert'), ('ideal', 'Ideal'),
('superior', 'Superior'), ('none', 'None')],
compute='_get_state',
string='State',
store=True,)
test_type = fields.Selection(
related='test_id.type',
copy=False,
string="Test Type",
readonly=True)
test_description = fields.Char(
related='test_id.description',
copy=False,
string="Test Description",
readonly=True)
first_parent_id = fields.Many2one(
'kinesis_athletics.test_category',
related='test_id.test_category_id.first_parent_id',
copy=False,
string='Test Class',
readonly=True,
store=True)
group_id = fields.Many2one(
'kinesis_athletics.group',
related='evaluation_id.group_id',
string="Group",
copy=False,
readonly=True,
store=True)
date = fields.Date(
related='evaluation_id.date',
string="Date",
copy=False,
readonly=True,
store=True)
company_id = fields.Many2one(
'res.company',
related='evaluation_id.company_id',
string="Company",
copy=False,
readonly=True,
store=True)
@api.one
@api.constrains('test_id', 'evaluation_id')
def _check_duplicate_test(self):
tests = self.search([
('test_id', '=', self.test_id.id),
('evaluation_id', '=', self.evaluation_id.id)])
if len(tests) > 1:
raise Warning(_('Already loaded the test'))
@api.one
@api.constrains(
'evaluation_id', 'plotband_ext_min', 'plotband_ext_max', 'result')
@api.onchange(
'evaluation_id', 'plotband_ext_min', 'plotband_ext_max', 'result')
def _check_result(self):
if not self.evaluation_id.is_template:
if self.plotband_ext_min and self.plotband_ext_max:
if self.result != 0:
if self.result < self.plotband_ext_min or self.result > self.plotband_ext_max:
raise Warning(_('Result out of range'))
| agpl-3.0 | 2,112,223,470,113,195,500 | 32.570248 | 103 | 0.558469 | false |
hujc91/uw-ffpg | pivTools/loglay_fit.py | 1 | 1749 | #---------- Forematters---------------------------------------------
import numpy as np
#-------------------------------------------------------------------
def loglay_fit(up, yp, ypthresL, ypthresH):
'''
Curve fit for velocity profiles in the log-law layer of a wall-bounded shear flow
u+ = a*log(y+) + b (eq.1)
Inputs:
up - dimensionless velocity scaled by inner-scaling velocity scale (u+)
yp - dimensionless coordiates scaled by inner-scaling length scale (y+)
ypthresL - lower bound of the log-law range (typical value range: [20,35])
ypthresH - upper bound of the log-law range (typical value range: [50,80])
Outputs:
u_grwrt - curve fit coefficient (a) in eq.1
u_intcp - curve fit interception (b) in eq.1
Note:
For fully developed turbulent flow over a flat surface:
a ~= 2.43
b ~ = 5.2
'''
# yplus index
idxloglay = np.where((yp>=ypthresL)&(yp<=ypthresH)==True)
# Take natural logarithm of u and y
ufit = up[idxloglay]
yfit = np.log(yp[idxloglay])
# Estimate the slope for least square regression
idxlsq0 = np.int(np.max(np.argmax(ufit)))
idxlsq1 = np.int(np.min(np.argmin(ufit)))
idxlsq2 = np.int(np.size(ufit)/2)
du = ufit[idxlsq0]-ufit[idxlsq1]
dy = yfit[idxlsq0]-yfit[idxlsq1]
dudy = du/dy
A = np.vstack([yfit, dudy*np.ones(len(yfit))]).T
u_grwrt, u_intcp = np.linalg.lstsq(A, ufit)[0]
# Correction to the interception
u_offset0 = ufit[idxlsq0] - (u_grwrt*yfit[idxlsq0]+u_intcp)
u_offset1 = ufit[idxlsq1] - (u_grwrt*yfit[idxlsq1]+u_intcp)
u_offset2 = ufit[idxlsq2] - (u_grwrt*yfit[idxlsq2]+u_intcp)
u_intcp = u_intcp + (u_offset0 + u_offset1 + u_offset2)/3
return u_grwrt,u_intcp
| mit | 8,867,636,776,251,121,000 | 32.634615 | 85 | 0.600343 | false |
nathanbjenx/cairis | cairis/bin/add_cairis_user.py | 1 | 3248 | #!/usr/bin/python
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import argparse
from flask import Flask
from flask_sqlalchemy import SQLAlchemy
from flask_security import Security, SQLAlchemyUserDatastore, UserMixin, RoleMixin, login_required
from flask_cors import CORS
from cairis.core.Borg import Borg
from cairis.core.MySQLDatabaseProxy import createDatabaseAccount,createDatabaseAndPrivileges,createDatabaseSchema
import cairis.core.BorgFactory
__author__ = 'Shamal Faily'
cairis.core.BorgFactory.dInitialise()
app = Flask(__name__)
app.config['DEBUG'] = True
b = Borg()
app.config['SECRET_KEY'] = b.secretKey
app.config['SECURITY_PASSWORD_HASH'] = b.passwordHash
app.config['SECURITY_PASSWORD_SALT'] = b.passwordSalt
app.config['SQLALCHEMY_DATABASE_URI'] = 'mysql://root:' + b.rPasswd + '@' + b.dbHost + '/cairis_user'
db = SQLAlchemy(app)
cors = CORS(app)
roles_users = db.Table('roles_users', db.Column('user_id', db.Integer(), db.ForeignKey('auth_user.id')), db.Column('role_id', db.Integer(), db.ForeignKey('auth_role.id')))
class Role(db.Model, RoleMixin):
__tablename__ = 'auth_role'
id = db.Column(db.Integer(), primary_key=True)
name = db.Column(db.String(80), unique=True)
description = db.Column(db.String(255))
class User(db.Model, UserMixin):
__tablename__ = 'auth_user'
id = db.Column(db.Integer, primary_key=True)
email = db.Column(db.String(255), unique=True)
password = db.Column(db.String(255))
name = db.Column(db.String(255))
active = db.Column(db.Boolean())
confirmed_at = db.Column(db.DateTime())
roles = db.relationship('Role', secondary=roles_users, backref=db.backref('users', lazy='dynamic'))
user_datastore = SQLAlchemyUserDatastore(db,User, Role)
security = Security(app, user_datastore)
def main():
parser = argparse.ArgumentParser(description='Computer Aided Integration of Requirements and Information Security - Add CAIRIS user')
parser.add_argument('user',help='Email address')
parser.add_argument('password',help='password')
parser.add_argument('name',help='Full name')
args = parser.parse_args()
createDatabaseAccount(b.rPasswd,b.dbHost,b.dbPort,args.user,'')
createDatabaseAndPrivileges(b.rPasswd,b.dbHost,b.dbPort,args.user,'',args.user + '_default')
createDatabaseSchema(b.cairisRoot,b.dbHost,b.dbPort,args.user,'',args.user + '_default')
db.create_all()
user_datastore.create_user(email=args.user, password=args.password, name=args.name)
db.session.commit()
if __name__ == '__main__':
main()
| apache-2.0 | 7,796,071,425,858,259,000 | 40.113924 | 171 | 0.740148 | false |
maciejkula/scipy | scipy/interpolate/polyint.py | 1 | 32302 | from __future__ import division, print_function, absolute_import
import warnings
import numpy as np
from scipy.special import factorial
from scipy.lib.six import xrange
__all__ = ["KroghInterpolator", "krogh_interpolate", "BarycentricInterpolator",
"barycentric_interpolate", "PiecewisePolynomial",
"piecewise_polynomial_interpolate", "approximate_taylor_polynomial"]
def _isscalar(x):
"""Check whether x is if a scalar type, or 0-dim"""
return np.isscalar(x) or hasattr(x, 'shape') and x.shape == ()
class _Interpolator1D(object):
"""
Common features in univariate interpolation
Deal with input data type and interpolation axis rolling. The
actual interpolator can assume the y-data is of shape (n, r) where
`n` is the number of x-points, and `r` the number of variables,
and use self.dtype as the y-data type.
Attributes
----------
_y_axis
Axis along which the interpolation goes in the original array
_y_extra_shape
Additional trailing shape of the input arrays, excluding
the interpolation axis.
dtype
Dtype of the y-data arrays. Can be set via set_dtype, which
forces it to be float or complex.
Methods
-------
__call__
_prepare_x
_finish_y
_reshape_yi
_set_yi
_set_dtype
_evaluate
"""
__slots__ = ('_y_axis', '_y_extra_shape', 'dtype')
def __init__(self, xi=None, yi=None, axis=None):
self._y_axis = axis
self._y_extra_shape = None
self.dtype = None
if yi is not None:
self._set_yi(yi, xi=xi, axis=axis)
def __call__(self, x):
"""
Evaluate the interpolant
Parameters
----------
x : array-like
Points to evaluate the interpolant at.
Returns
-------
y : array-like
Interpolated values. Shape is determined by replacing
the interpolation axis in the original array with the shape of x.
"""
x, x_shape = self._prepare_x(x)
y = self._evaluate(x)
return self._finish_y(y, x_shape)
def _evaluate(self, x):
"""
Actually evaluate the value of the interpolator.
"""
raise NotImplementedError()
def _prepare_x(self, x):
"""Reshape input x array to 1-D"""
x = np.asarray(x)
if not np.issubdtype(x.dtype, np.inexact):
# Cast integers etc to floats
x = x.astype(float)
x_shape = x.shape
return x.ravel(), x_shape
def _finish_y(self, y, x_shape):
"""Reshape interpolated y back to n-d array similar to initial y"""
y = y.reshape(x_shape + self._y_extra_shape)
if self._y_axis != 0 and x_shape != ():
nx = len(x_shape)
ny = len(self._y_extra_shape)
s = (list(range(nx, nx + self._y_axis))
+ list(range(nx)) + list(range(nx+self._y_axis, nx+ny)))
y = y.transpose(s)
return y
def _reshape_yi(self, yi, check=False):
yi = np.rollaxis(np.asarray(yi), self._y_axis)
if check and yi.shape[1:] != self._y_extra_shape:
ok_shape = "%r + (N,) + %r" % (self._y_extra_shape[-self._y_axis:],
self._y_extra_shape[:-self._y_axis])
raise ValueError("Data must be of shape %s" % ok_shape)
return yi.reshape((yi.shape[0], -1))
def _set_yi(self, yi, xi=None, axis=None):
if axis is None:
axis = self._y_axis
if axis is None:
raise ValueError("no interpolation axis specified")
yi = np.asarray(yi)
shape = yi.shape
if shape == ():
shape = (1,)
if xi is not None and shape[axis] != len(xi):
raise ValueError("x and y arrays must be equal in length along "
"interpolation axis.")
self._y_axis = (axis % yi.ndim)
self._y_extra_shape = yi.shape[:self._y_axis]+yi.shape[self._y_axis+1:]
self.dtype = None
self._set_dtype(yi.dtype)
def _set_dtype(self, dtype, union=False):
if np.issubdtype(dtype, np.complexfloating) \
or np.issubdtype(self.dtype, np.complexfloating):
self.dtype = np.complex_
else:
if not union or self.dtype != np.complex_:
self.dtype = np.float_
class _Interpolator1DWithDerivatives(_Interpolator1D):
def derivatives(self, x, der=None):
"""
Evaluate many derivatives of the polynomial at the point x
Produce an array of all derivative values at the point x.
Parameters
----------
x : array-like
Point or points at which to evaluate the derivatives
der : None or integer
How many derivatives to extract; None for all potentially
nonzero derivatives (that is a number equal to the number
of points). This number includes the function value as 0th
derivative.
Returns
-------
d : ndarray
Array with derivatives; d[j] contains the j-th derivative.
Shape of d[j] is determined by replacing the interpolation
axis in the original array with the shape of x.
Examples
--------
>>> KroghInterpolator([0,0,0],[1,2,3]).derivatives(0)
array([1.0,2.0,3.0])
>>> KroghInterpolator([0,0,0],[1,2,3]).derivatives([0,0])
array([[1.0,1.0],
[2.0,2.0],
[3.0,3.0]])
"""
x, x_shape = self._prepare_x(x)
y = self._evaluate_derivatives(x, der)
y = y.reshape((y.shape[0],) + x_shape + self._y_extra_shape)
if self._y_axis != 0 and x_shape != ():
nx = len(x_shape)
ny = len(self._y_extra_shape)
s = ([0] + list(range(nx+1, nx + self._y_axis+1))
+ list(range(1,nx+1)) +
list(range(nx+1+self._y_axis, nx+ny+1)))
y = y.transpose(s)
return y
def derivative(self, x, der=1):
"""
Evaluate one derivative of the polynomial at the point x
Parameters
----------
x : array-like
Point or points at which to evaluate the derivatives
der : integer, optional
Which derivative to extract. This number includes the
function value as 0th derivative.
Returns
-------
d : ndarray
Derivative interpolated at the x-points. Shape of d is
determined by replacing the interpolation axis in the
original array with the shape of x.
Notes
-----
This is computed by evaluating all derivatives up to the desired
one (using self.derivatives()) and then discarding the rest.
"""
x, x_shape = self._prepare_x(x)
y = self._evaluate_derivatives(x, der+1)
return self._finish_y(y[der], x_shape)
class KroghInterpolator(_Interpolator1DWithDerivatives):
"""
Interpolating polynomial for a set of points.
The polynomial passes through all the pairs (xi,yi). One may
additionally specify a number of derivatives at each point xi;
this is done by repeating the value xi and specifying the
derivatives as successive yi values.
Allows evaluation of the polynomial and all its derivatives.
For reasons of numerical stability, this function does not compute
the coefficients of the polynomial, although they can be obtained
by evaluating all the derivatives.
Parameters
----------
xi : array-like, length N
Known x-coordinates. Must be sorted in increasing order.
yi : array-like
Known y-coordinates. When an xi occurs two or more times in
a row, the corresponding yi's represent derivative values.
axis : int, optional
Axis in the yi array corresponding to the x-coordinate values.
Notes
-----
Be aware that the algorithms implemented here are not necessarily
the most numerically stable known. Moreover, even in a world of
exact computation, unless the x coordinates are chosen very
carefully - Chebyshev zeros (e.g. cos(i*pi/n)) are a good choice -
polynomial interpolation itself is a very ill-conditioned process
due to the Runge phenomenon. In general, even with well-chosen
x values, degrees higher than about thirty cause problems with
numerical instability in this code.
Based on [1]_.
References
----------
.. [1] Krogh, "Efficient Algorithms for Polynomial Interpolation
and Numerical Differentiation", 1970.
Examples
--------
To produce a polynomial that is zero at 0 and 1 and has
derivative 2 at 0, call
>>> KroghInterpolator([0,0,1],[0,2,0])
This constructs the quadratic 2*X**2-2*X. The derivative condition
is indicated by the repeated zero in the xi array; the corresponding
yi values are 0, the function value, and 2, the derivative value.
For another example, given xi, yi, and a derivative ypi for each
point, appropriate arrays can be constructed as:
>>> xi_k, yi_k = np.repeat(xi, 2), np.ravel(np.dstack((yi,ypi)))
>>> KroghInterpolator(xi_k, yi_k)
To produce a vector-valued polynomial, supply a higher-dimensional
array for yi:
>>> KroghInterpolator([0,1],[[2,3],[4,5]])
This constructs a linear polynomial giving (2,3) at 0 and (4,5) at 1.
"""
def __init__(self, xi, yi, axis=0):
_Interpolator1DWithDerivatives.__init__(self, xi, yi, axis)
self.xi = np.asarray(xi)
self.yi = self._reshape_yi(yi)
self.n, self.r = self.yi.shape
c = np.zeros((self.n+1, self.r), dtype=self.dtype)
c[0] = self.yi[0]
Vk = np.zeros((self.n, self.r), dtype=self.dtype)
for k in xrange(1,self.n):
s = 0
while s <= k and xi[k-s] == xi[k]:
s += 1
s -= 1
Vk[0] = self.yi[k]/float(factorial(s))
for i in xrange(k-s):
if xi[i] == xi[k]:
raise ValueError("Elements if `xi` can't be equal.")
if s == 0:
Vk[i+1] = (c[i]-Vk[i])/(xi[i]-xi[k])
else:
Vk[i+1] = (Vk[i+1]-Vk[i])/(xi[i]-xi[k])
c[k] = Vk[k-s]
self.c = c
def _evaluate(self, x):
pi = 1
p = np.zeros((len(x), self.r), dtype=self.dtype)
p += self.c[0,np.newaxis,:]
for k in range(1, self.n):
w = x - self.xi[k-1]
pi = w*pi
p += pi[:,np.newaxis] * self.c[k]
return p
def _evaluate_derivatives(self, x, der=None):
n = self.n
r = self.r
if der is None:
der = self.n
pi = np.zeros((n, len(x)))
w = np.zeros((n, len(x)))
pi[0] = 1
p = np.zeros((len(x), self.r))
p += self.c[0,np.newaxis,:]
for k in xrange(1,n):
w[k-1] = x - self.xi[k-1]
pi[k] = w[k-1]*pi[k-1]
p += pi[k,:,np.newaxis]*self.c[k]
cn = np.zeros((max(der,n+1), len(x), r), dtype=self.dtype)
cn[:n+1,:,:] += self.c[:n+1,np.newaxis,:]
cn[0] = p
for k in xrange(1,n):
for i in xrange(1,n-k+1):
pi[i] = w[k+i-1]*pi[i-1]+pi[i]
cn[k] = cn[k]+pi[i,:,np.newaxis]*cn[k+i]
cn[k] *= factorial(k)
cn[n,:,:] = 0
return cn[:der]
def krogh_interpolate(xi,yi,x,der=0,axis=0):
"""
Convenience function for polynomial interpolation.
See `KroghInterpolator` for more details.
Parameters
----------
xi : array_like
Known x-coordinates.
yi : array_like
Known y-coordinates, of shape ``(xi.size, R)``. Interpreted as
vectors of length R, or scalars if R=1.
x : array_like
Point or points at which to evaluate the derivatives.
der : int or list
How many derivatives to extract; None for all potentially
nonzero derivatives (that is a number equal to the number
of points), or a list of derivatives to extract. This number
includes the function value as 0th derivative.
axis : int, optional
Axis in the yi array corresponding to the x-coordinate values.
Returns
-------
d : ndarray
If the interpolator's values are R-dimensional then the
returned array will be the number of derivatives by N by R.
If `x` is a scalar, the middle dimension will be dropped; if
the `yi` are scalars then the last dimension will be dropped.
See Also
--------
KroghInterpolator
Notes
-----
Construction of the interpolating polynomial is a relatively expensive
process. If you want to evaluate it repeatedly consider using the class
KroghInterpolator (which is what this function uses).
"""
P = KroghInterpolator(xi, yi, axis=axis)
if der == 0:
return P(x)
elif _isscalar(der):
return P.derivative(x,der=der)
else:
return P.derivatives(x,der=np.amax(der)+1)[der]
def approximate_taylor_polynomial(f,x,degree,scale,order=None):
"""
Estimate the Taylor polynomial of f at x by polynomial fitting.
Parameters
----------
f : callable
The function whose Taylor polynomial is sought. Should accept
a vector of `x` values.
x : scalar
The point at which the polynomial is to be evaluated.
degree : int
The degree of the Taylor polynomial
scale : scalar
The width of the interval to use to evaluate the Taylor polynomial.
Function values spread over a range this wide are used to fit the
polynomial. Must be chosen carefully.
order : int or None, optional
The order of the polynomial to be used in the fitting; `f` will be
evaluated ``order+1`` times. If None, use `degree`.
Returns
-------
p : poly1d instance
The Taylor polynomial (translated to the origin, so that
for example p(0)=f(x)).
Notes
-----
The appropriate choice of "scale" is a trade-off; too large and the
function differs from its Taylor polynomial too much to get a good
answer, too small and round-off errors overwhelm the higher-order terms.
The algorithm used becomes numerically unstable around order 30 even
under ideal circumstances.
Choosing order somewhat larger than degree may improve the higher-order
terms.
"""
if order is None:
order = degree
n = order+1
# Choose n points that cluster near the endpoints of the interval in
# a way that avoids the Runge phenomenon. Ensure, by including the
# endpoint or not as appropriate, that one point always falls at x
# exactly.
xs = scale*np.cos(np.linspace(0,np.pi,n,endpoint=n % 1)) + x
P = KroghInterpolator(xs, f(xs))
d = P.derivatives(x,der=degree+1)
return np.poly1d((d/factorial(np.arange(degree+1)))[::-1])
class BarycentricInterpolator(_Interpolator1D):
"""The interpolating polynomial for a set of points
Constructs a polynomial that passes through a given set of points.
Allows evaluation of the polynomial, efficient changing of the y
values to be interpolated, and updating by adding more x values.
For reasons of numerical stability, this function does not compute
the coefficients of the polynomial.
The values yi need to be provided before the function is
evaluated, but none of the preprocessing depends on them, so rapid
updates are possible.
Parameters
----------
xi : array-like
1-d array of x coordinates of the points the polynomial
should pass through
yi : array-like
The y coordinates of the points the polynomial should pass through.
If None, the y values will be supplied later via the `set_y` method.
axis : int, optional
Axis in the yi array corresponding to the x-coordinate values.
Notes
-----
This class uses a "barycentric interpolation" method that treats
the problem as a special case of rational function interpolation.
This algorithm is quite stable, numerically, but even in a world of
exact computation, unless the x coordinates are chosen very
carefully - Chebyshev zeros (e.g. cos(i*pi/n)) are a good choice -
polynomial interpolation itself is a very ill-conditioned process
due to the Runge phenomenon.
Based on Berrut and Trefethen 2004, "Barycentric Lagrange Interpolation".
"""
def __init__(self, xi, yi=None, axis=0):
_Interpolator1D.__init__(self, xi, yi, axis)
self.xi = np.asarray(xi)
self.set_yi(yi)
self.n = len(self.xi)
self.wi = np.zeros(self.n)
self.wi[0] = 1
for j in xrange(1,self.n):
self.wi[:j] *= (self.xi[j]-self.xi[:j])
self.wi[j] = np.multiply.reduce(self.xi[:j]-self.xi[j])
self.wi **= -1
def set_yi(self, yi, axis=None):
"""
Update the y values to be interpolated
The barycentric interpolation algorithm requires the calculation
of weights, but these depend only on the xi. The yi can be changed
at any time.
Parameters
----------
yi : array_like
The y coordinates of the points the polynomial should pass through.
If None, the y values will be supplied later.
axis : int, optional
Axis in the yi array corresponding to the x-coordinate values.
"""
if yi is None:
self.yi = None
return
self._set_yi(yi, xi=self.xi, axis=axis)
self.yi = self._reshape_yi(yi)
self.n, self.r = self.yi.shape
def add_xi(self, xi, yi=None):
"""
Add more x values to the set to be interpolated
The barycentric interpolation algorithm allows easy updating by
adding more points for the polynomial to pass through.
Parameters
----------
xi : array_like
The x coordinates of the points that the polynomial should pass
through.
yi : array_like, optional
The y coordinates of the points the polynomial should pass through.
Should have shape ``(xi.size, R)``; if R > 1 then the polynomial is
vector-valued.
If `yi` is not given, the y values will be supplied later. `yi` should
be given if and only if the interpolator has y values specified.
"""
if yi is not None:
if self.yi is None:
raise ValueError("No previous yi value to update!")
yi = self._reshape_yi(yi, check=True)
self.yi = np.vstack((self.yi,yi))
else:
if self.yi is not None:
raise ValueError("No update to yi provided!")
old_n = self.n
self.xi = np.concatenate((self.xi,xi))
self.n = len(self.xi)
self.wi **= -1
old_wi = self.wi
self.wi = np.zeros(self.n)
self.wi[:old_n] = old_wi
for j in xrange(old_n,self.n):
self.wi[:j] *= (self.xi[j]-self.xi[:j])
self.wi[j] = np.multiply.reduce(self.xi[:j]-self.xi[j])
self.wi **= -1
def __call__(self, x):
"""Evaluate the interpolating polynomial at the points x
Parameters
----------
x : array-like
Points to evaluate the interpolant at.
Returns
-------
y : array-like
Interpolated values. Shape is determined by replacing
the interpolation axis in the original array with the shape of x.
Notes
-----
Currently the code computes an outer product between x and the
weights, that is, it constructs an intermediate array of size
N by len(x), where N is the degree of the polynomial.
"""
return _Interpolator1D.__call__(self, x)
def _evaluate(self, x):
if x.size == 0:
p = np.zeros((0, self.r), dtype=self.dtype)
else:
c = x[...,np.newaxis]-self.xi
z = c == 0
c[z] = 1
c = self.wi/c
p = np.dot(c,self.yi)/np.sum(c,axis=-1)[...,np.newaxis]
# Now fix where x==some xi
r = np.nonzero(z)
if len(r) == 1: # evaluation at a scalar
if len(r[0]) > 0: # equals one of the points
p = self.yi[r[0][0]]
else:
p[r[:-1]] = self.yi[r[-1]]
return p
def barycentric_interpolate(xi, yi, x, axis=0):
"""
Convenience function for polynomial interpolation.
Constructs a polynomial that passes through a given set of points,
then evaluates the polynomial. For reasons of numerical stability,
this function does not compute the coefficients of the polynomial.
This function uses a "barycentric interpolation" method that treats
the problem as a special case of rational function interpolation.
This algorithm is quite stable, numerically, but even in a world of
exact computation, unless the `x` coordinates are chosen very
carefully - Chebyshev zeros (e.g. cos(i*pi/n)) are a good choice -
polynomial interpolation itself is a very ill-conditioned process
due to the Runge phenomenon.
Parameters
----------
xi : array_like
1-d array of x coordinates of the points the polynomial should
pass through
yi : array_like
The y coordinates of the points the polynomial should pass through.
x : scalar or array_like
Points to evaluate the interpolator at.
axis : int, optional
Axis in the yi array corresponding to the x-coordinate values.
Returns
-------
y : scalar or array_like
Interpolated values. Shape is determined by replacing
the interpolation axis in the original array with the shape of x.
See Also
--------
BarycentricInterpolator
Notes
-----
Construction of the interpolation weights is a relatively slow process.
If you want to call this many times with the same xi (but possibly
varying yi or x) you should use the class `BarycentricInterpolator`.
This is what this function uses internally.
"""
return BarycentricInterpolator(xi, yi, axis=axis)(x)
class PiecewisePolynomial(_Interpolator1DWithDerivatives):
"""Piecewise polynomial curve specified by points and derivatives
This class represents a curve that is a piecewise polynomial. It
passes through a list of points and has specified derivatives at
each point. The degree of the polynomial may vary from segment to
segment, as may the number of derivatives available. The degree
should not exceed about thirty.
Appending points to the end of the curve is efficient.
Parameters
----------
xi : array-like
a sorted 1-d array of x-coordinates
yi : array-like or list of array-likes
yi[i][j] is the j-th derivative known at xi[i] (for axis=0)
orders : list of integers, or integer
a list of polynomial orders, or a single universal order
direction : {None, 1, -1}
indicates whether the xi are increasing or decreasing
+1 indicates increasing
-1 indicates decreasing
None indicates that it should be deduced from the first two xi
axis : int, optional
Axis in the yi array corresponding to the x-coordinate values.
Notes
-----
If orders is None, or orders[i] is None, then the degree of the
polynomial segment is exactly the degree required to match all i
available derivatives at both endpoints. If orders[i] is not None,
then some derivatives will be ignored. The code will try to use an
equal number of derivatives from each end; if the total number of
derivatives needed is odd, it will prefer the rightmost endpoint. If
not enough derivatives are available, an exception is raised.
"""
def __init__(self, xi, yi, orders=None, direction=None, axis=0):
_Interpolator1DWithDerivatives.__init__(self, axis=axis)
warnings.warn('PiecewisePolynomial is deprecated in scipy 0.14. '
'Use BPoly.from_derivatives instead.',
category=DeprecationWarning)
if axis != 0:
try:
yi = np.asarray(yi)
except ValueError:
raise ValueError("If yi is a list, then axis must be 0")
preslice = ((slice(None,None,None),) * (axis % yi.ndim))
slice0 = preslice + (0,)
slice1 = preslice + (slice(1, None, None),)
else:
slice0 = 0
slice1 = slice(1, None, None)
yi0 = np.asarray(yi[slice0])
self._set_yi(yi0)
self.xi = [xi[0]]
self.yi = [self._reshape_yi(yi0)]
self.n = 1
self.r = np.prod(self._y_extra_shape, dtype=np.int64)
self.direction = direction
self.orders = []
self.polynomials = []
self.extend(xi[1:],yi[slice1],orders)
def _make_polynomial(self,x1,y1,x2,y2,order,direction):
"""Construct the interpolating polynomial object
Deduces the number of derivatives to match at each end
from order and the number of derivatives available. If
possible it uses the same number of derivatives from
each end; if the number is odd it tries to take the
extra one from y2. In any case if not enough derivatives
are available at one end or another it draws enough to
make up the total from the other end.
"""
n = order+1
n1 = min(n//2,len(y1))
n2 = min(n-n1,len(y2))
n1 = min(n-n2,len(y1))
if n1+n2 != n:
raise ValueError("Point %g has %d derivatives, point %g has %d derivatives, but order %d requested" % (x1, len(y1), x2, len(y2), order))
if not (n1 <= len(y1) and n2 <= len(y2)):
raise ValueError("`order` input incompatible with length y1 or y2.")
xi = np.zeros(n)
yi = np.zeros((n, self.r), dtype=self.dtype)
xi[:n1] = x1
yi[:n1] = y1[:n1].reshape((n1, self.r))
xi[n1:] = x2
yi[n1:] = y2[:n2].reshape((n2, self.r))
return KroghInterpolator(xi,yi,axis=0)
def append(self, xi, yi, order=None):
"""
Append a single point with derivatives to the PiecewisePolynomial
Parameters
----------
xi : float
Input
yi : array_like
`yi` is the list of derivatives known at `xi`
order : integer or None
a polynomial order, or instructions to use the highest
possible order
"""
yi = self._reshape_yi(yi, check=True)
self._set_dtype(yi.dtype, union=True)
if self.direction is None:
self.direction = np.sign(xi-self.xi[-1])
elif (xi-self.xi[-1])*self.direction < 0:
raise ValueError("x coordinates must be in the %d direction: %s" % (self.direction, self.xi))
self.xi.append(xi)
self.yi.append(yi)
if order is None:
n1 = len(self.yi[-2])
n2 = len(self.yi[-1])
n = n1+n2
order = n-1
self.orders.append(order)
self.polynomials.append(self._make_polynomial(
self.xi[-2], self.yi[-2],
self.xi[-1], self.yi[-1],
order, self.direction))
self.n += 1
def extend(self, xi, yi, orders=None):
"""
Extend the PiecewisePolynomial by a list of points
Parameters
----------
xi : array_like
A sorted list of x-coordinates.
yi : list of lists of length N1
``yi[i]`` (if ``axis == 0``) is the list of derivatives known
at ``xi[i]``.
orders : int or list of ints
A list of polynomial orders, or a single universal order.
direction : {None, 1, -1}
Indicates whether the `xi` are increasing or decreasing.
+1 indicates increasing
-1 indicates decreasing
None indicates that it should be deduced from the first two `xi`.
"""
if self._y_axis == 0:
# allow yi to be a ragged list
for i in xrange(len(xi)):
if orders is None or _isscalar(orders):
self.append(xi[i],yi[i],orders)
else:
self.append(xi[i],yi[i],orders[i])
else:
preslice = (slice(None,None,None),) * self._y_axis
for i in xrange(len(xi)):
if orders is None or _isscalar(orders):
self.append(xi[i],yi[preslice + (i,)],orders)
else:
self.append(xi[i],yi[preslice + (i,)],orders[i])
def _evaluate(self, x):
if _isscalar(x):
pos = np.clip(np.searchsorted(self.xi, x) - 1, 0, self.n-2)
y = self.polynomials[pos](x)
else:
m = len(x)
pos = np.clip(np.searchsorted(self.xi, x) - 1, 0, self.n-2)
y = np.zeros((m, self.r), dtype=self.dtype)
if y.size > 0:
for i in xrange(self.n-1):
c = pos == i
y[c] = self.polynomials[i](x[c])
return y
def _evaluate_derivatives(self, x, der=None):
if der is None and self.polynomials:
der = self.polynomials[0].n
if _isscalar(x):
pos = np.clip(np.searchsorted(self.xi, x) - 1, 0, self.n-2)
y = self.polynomials[pos].derivatives(x,der=der)
else:
m = len(x)
pos = np.clip(np.searchsorted(self.xi, x) - 1, 0, self.n-2)
y = np.zeros((der,m,self.r), dtype=self.dtype)
if y.size > 0:
for i in xrange(self.n-1):
c = pos == i
y[:,c] = self.polynomials[i].derivatives(x[c],der=der)
return y
def piecewise_polynomial_interpolate(xi,yi,x,orders=None,der=0,axis=0):
"""
Convenience function for piecewise polynomial interpolation.
Parameters
----------
xi : array_like
A sorted list of x-coordinates.
yi : list of lists
``yi[i]`` is the list of derivatives known at ``xi[i]``.
x : scalar or array_like
Coordinates at which to evalualte the polynomial.
orders : int or list of ints, optional
A list of polynomial orders, or a single universal order.
der : int or list
How many derivatives to extract; None for all potentially
nonzero derivatives (that is a number equal to the number
of points), or a list of derivatives to extract. This number
includes the function value as 0th derivative.
axis : int, optional
Axis in the `yi` array corresponding to the x-coordinate values.
Returns
-------
y : ndarray
Interpolated values or derivatives. If multiple derivatives
were requested, these are given along the first axis.
See Also
--------
PiecewisePolynomial
Notes
-----
If `orders` is None, or ``orders[i]`` is None, then the degree of the
polynomial segment is exactly the degree required to match all i
available derivatives at both endpoints. If ``orders[i]`` is not None,
then some derivatives will be ignored. The code will try to use an
equal number of derivatives from each end; if the total number of
derivatives needed is odd, it will prefer the rightmost endpoint. If
not enough derivatives are available, an exception is raised.
Construction of these piecewise polynomials can be an expensive process;
if you repeatedly evaluate the same polynomial, consider using the class
PiecewisePolynomial (which is what this function does).
"""
P = PiecewisePolynomial(xi, yi, orders, axis=axis)
if der == 0:
return P(x)
elif _isscalar(der):
return P.derivative(x,der=der)
else:
return P.derivatives(x,der=np.amax(der)+1)[der]
| bsd-3-clause | -1,840,114,695,129,729,800 | 33.621651 | 148 | 0.588354 | false |
dvstter/SelfPythonSmallTools | ftp_server/client.py | 1 | 1463 | import socket
import struct
import math
class client:
def init_client(self, address, port):
self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
try:
self.sock.connect((address.strip(), int(port)))
except socket.error, e:
print 'error:%s' % e
def main_loop(self):
while True:
cmd = raw_input('>')
if cmd == 'quit':
self.sock.send('quit')
self.sock.close()
break
elif cmd == 'list':
self.sock.send(cmd)
result = self.sock.recv(1024)
if result != '':
print result,
elif cmd[:5] == 'fetch':
self.sock.send(cmd)
self.get_file()
else:
print 'command did not recognised'
def get_file(self):
res = self.sock.recv(5)
if res == 'error':
print 'error occured...'
return
num_info = self.sock.recv(8)
# recieve file size and file name size
file_size, filename_size = struct.unpack('2i', num_info)
# recieve file name to create new file
filename = self.sock.recv(filename_size)
print 'fetching file %% destination:%s' % filename
# open file to write
fid = open(filename, 'wb')
for x in range(int(math.ceil(file_size/1024.0))):
chunk = self.sock.recv(1024)
fid.write(chunk)
fid.close()
print 'file transmitted over...'
if __name__ == '__main__':
c = client()
#address = raw_input('ftp server ip address:')
# port = raw_input('ftp server port number:')
address = 'localhost'
port = '8080'
c.init_client(address=address, port=port)
c.main_loop()
| gpl-2.0 | -1,645,092,799,523,094,300 | 24.224138 | 63 | 0.640465 | false |
proofchains/python-smartcolors | smartcolors/db.py | 1 | 8363 | # Copyright (C) 2014 Peter Todd <[email protected]>
#
# This file is part of python-smartcolors.
#
# It is subject to the license terms in the LICENSE file found in the top-level
# directory of this distribution.
#
# No part of python-smartcolors, including this file, may be copied, modified,
# propagated, or distributed except according to the terms contained in the
# LICENSE file.
import os
import tempfile
from bitcoin.core import b2x, b2lx, lx, x
import bitcoin.core
import bitcoin.core.script
import smartcolors.core.db
import smartcolors.io
class PersistentSet:
"""File-backed set"""
def __init__(self, *, root_dir_path):
self.root_dir_path = os.path.abspath(root_dir_path)
def _get_elem_filename(self, elem):
raise NotImplementedError
def _serialize_elem(self, elem):
raise NotImplementedError
def _deserialize_elem(self, fd):
raise NotImplementedError
def add(self, elem):
# No effect if element is already present
if elem in self:
return
elem_filename = self._get_elem_filename(elem)
os.makedirs(self.root_dir_path, exist_ok=True)
# Write the element to disk as a new temporary file in the directory
with tempfile.NamedTemporaryFile(dir=self.root_dir_path, prefix=elem_filename + '-tmp-') as fd:
self._serialize_elem(elem, fd)
fd.flush()
# Hardlink the file to it's correct name, which atomically makes it
# available to readers. The temporary name will be unlinked for us
# by NamedTemporaryFile.
try:
os.link(fd.name, os.path.join(self.root_dir_path, elem_filename))
except FileExistsError as exp:
# FIXME: actually handle this!
raise exp
def __iter__(self):
try:
elem_filenames = os.listdir(self.root_dir_path)
except FileNotFoundError as exp:
return
for elem_filename in elem_filenames:
with open(os.path.join(self.root_dir_path, elem_filename), 'rb') as fd:
yield self._deserialize_elem(fd)
def __contains__(self, elem):
elem_filename = self._get_elem_filename(elem)
return os.path.exists(os.path.join(self.root_dir_path, elem_filename))
class PersistentDict:
"""File-backed set"""
def __init__(self, *, root_dir_path):
self.root_dir_path = os.path.abspath(root_dir_path)
def _key_to_filename(self, key):
raise NotImplementedError
def _filename_to_key(self, filename):
raise NotImplementedError
def _get_item(self, key_abspath):
raise NotImplementedError
def _key_to_abspath(self, key):
return os.path.join(self.root_dir_path, self._key_to_filename(key))
def __contains__(self, key):
return os.path.exists(self._key_to_abspath(key))
def __getitem__(self, key):
key_abspath = self._key_to_abspath(key)
if not os.path.exists(key_abspath):
raise KeyError(key)
else:
return self._get_item(key_abspath)
def get(self, key, default_value=None):
try:
return self[key]
except KeyError:
return default_value
def __setitem__(self, key, value):
raise NotImplementedError
def setdefault(self, key, default_value=None):
try:
return self[key]
except KeyError:
pass
return default_value
def __iter__(self):
try:
key_filenames = os.listdir(self.root_dir_path)
except FileNotFoundError as exp:
return
for key_filename in key_filenames:
yield self._filename_to_key(key_filename)
def keys(self):
yield from self.__iter__()
def values(self):
yield from [self[key] for key in self.keys()]
def items(self):
for key in self:
yield (key, self[key])
class PersistentColorDefSet(PersistentSet):
def _get_elem_filename(self, colordef):
return b2x(colordef.hash) + '.scdef'
def _serialize_elem(self, colordef, fd):
smartcolors.io.ColorDefFileSerializer.stream_serialize(colordef, fd)
def _deserialize_elem(self, fd):
return smartcolors.io.ColorDefFileSerializer.stream_deserialize(fd)
class PersistentColorProofSet(PersistentSet):
def _get_elem_filename(self, colorproof):
return b2x(colorproof.hash) + '.scproof'
def _serialize_elem(self, colorproof, fd):
smartcolors.io.ColorProofFileSerializer.stream_serialize(colorproof, fd)
def _deserialize_elem(self, fd):
return smartcolors.io.ColorProofFileSerializer.stream_deserialize(fd)
class PersistentGenesisOutPointsDict(PersistentDict):
def _key_to_filename(self, outpoint):
return '%s:%d' % (b2lx(outpoint.hash), outpoint.n)
def _filename_to_key(self, filename):
hex_hash, str_n = filename.split(':')
return bitcoin.core.COutPoint(lx(hex_hash), int(str_n))
def _get_item(self, key_abspath):
return PersistentColorDefSet(root_dir_path=key_abspath)
def setdefault(self, key, default_value=None):
assert default_value == set()
default_value = PersistentColorDefSet(root_dir_path=self._key_to_abspath(key))
return super().setdefault(key, default_value=default_value)
class PersistentGenesisScriptPubKeysDict(PersistentDict):
def _key_to_filename(self, scriptPubKey):
if scriptPubKey:
return b2x(scriptPubKey)
else:
# gotta handle the empty case!
return '_'
def _filename_to_key(self, filename):
if filename == '_':
return bitcoin.core.script.CScript()
else:
return bitcoin.core.script.CScript(x(filename))
def _get_item(self, key_abspath):
return PersistentColorDefSet(root_dir_path=key_abspath)
def setdefault(self, key, default_value=None):
assert default_value == set()
default_value = PersistentColorDefSet(root_dir_path=self._key_to_abspath(key))
return super().setdefault(key, default_value=default_value)
class PersistentColorProofsByColorDefDict(PersistentDict):
def _key_to_filename(self, colordef):
return b2x(colordef.hash)
def _filename_to_key(self, filename):
# Bit of a hack to say the least...
colordef_filename = os.path.join(self.root_dir_path, '..', '..', 'colordefs', filename + '.scdef')
with open(colordef_filename, 'rb') as fd:
return smartcolors.io.ColorDefFileSerializer.stream_deserialize(fd)
def _get_item(self, key_abspath):
return PersistentColorProofSet(root_dir_path=key_abspath)
def setdefault(self, key, default_value=None):
assert default_value == set()
default_value = PersistentColorProofSet(root_dir_path=self._key_to_abspath(key))
return super().setdefault(key, default_value=default_value)
class PersistentColoredOutPointsDict(PersistentDict):
def _key_to_filename(self, outpoint):
return '%s:%d' % (b2lx(outpoint.hash), outpoint.n)
def _filename_to_key(self, filename):
hex_hash, str_n = filename.split(':')
return bitcoin.core.COutPoint(lx(hex_hash), int(str_n))
def _get_item(self, key_abspath):
return PersistentColorProofsByColorDefDict(root_dir_path=key_abspath)
def setdefault(self, key, default_value=None):
assert default_value == {}
default_value = PersistentColorProofsByColorDefDict(root_dir_path=self._key_to_abspath(key))
return super().setdefault(key, default_value=default_value)
class PersistentColorProofDb(smartcolors.core.db.ColorProofDb):
def __init__(self, root_dir_path):
self.root_dir_path = os.path.abspath(root_dir_path)
self.colordefs = PersistentColorDefSet(root_dir_path=os.path.join(self.root_dir_path, 'colordefs'))
self.genesis_outpoints = PersistentGenesisOutPointsDict(root_dir_path=os.path.join(self.root_dir_path, 'genesis_outpoints'))
self.genesis_scriptPubKeys = PersistentGenesisScriptPubKeysDict(root_dir_path=os.path.join(self.root_dir_path, 'genesis_scriptPubKeys'))
self.colored_outpoints = PersistentColoredOutPointsDict(root_dir_path=os.path.join(self.root_dir_path, 'colored_outpoints'))
| gpl-3.0 | 4,823,416,512,252,157,000 | 32.8583 | 144 | 0.656343 | false |
hetica/webeni | static/lib/cisco_clt.py | 1 | 3492 | #!/usr/bin/python
# *-* coding:utf-8 *-*
__appname__ = 'pytacad-clt'
__version__ = "0.1"
__author__ = "Benoit Guibert <[email protected]>"
__licence__ = ""
import os, sys
import unicodedata
from django.utils.encoding import smart_unicode
server = os.path.dirname(sys.argv[0]) + '/pytacad-server'
cwd = '/var/local/pytacad/'
dirClasses = cwd + 'classes'
os.chdir(cwd)
def find_user(search_str=None):
""" Chercher un utilisateur """
f = open('liste_stagiaires')
c = f.readlines() # c : contenu avec une liste des lignes
nb = 0
list_stag = []
for a in c:
if unicodedata.normalize("NFKD", smart_unicode(search_str.lower(), 'utf-8')).encode('ascii', 'ignore') in unicodedata.normalize("NFKD", smart_unicode(a.lower(), 'utf-8')).encode('ascii', 'ignore'):
list_stag.append(a)
nb +=1
return (nb, list_stag)
def afficher_stags(stags):
""" mettre en forme l'affichage """
result = ""
for stag in stags:
s = stag.split(';')
result += 'Stagiaire : \t{0} {1} ({2})\n'.format(s[1], s[0], s[3])
for i,a in enumerate(stag.split(';')[5].split(',')):
if i == 0: result += 'Classes :\t' + a + '\n'
else : result += ('\t\t{0}\n'.format(a))
return result
def find_classe(search_str=None):
"""Chercher une ou des classes"""
l = os.listdir(dirClasses)
classes_found = ""
allclasses = ""
nb = 0
for i, a in enumerate(l):
# allclasses += a.split('.')[0].split('classe_')[1] + "\n"
allclasses += a.split('.')[0] + "\n"
if search_str.lower() in a.lower():
classe = a
# classes_found += a.split('.')[0].split('classe_')[1] + "\n"
classes_found += a.split('.')[0] + "\n"
nb += 1
if nb == 0:
# si aucune classe trouvée, les afficher toutes
mesg = "Aucune classe n'a été trouvée\n"
mesg += "Liste des classes de l'académie\n\n"
mesg += allclasses
return mesg
if nb == 1:
# si une classe trouvée, afficher les stagaires la composant
fic = dirClasses + "/" + classe
f = open(fic, 'r')
mesg = f.read()
f.close()
return mesg
if nb > 1:
# si plusieurs classes trouvées, afficher celles trouvées
mesg = str(nb) + " classes trouvées\n"
mesg += "Affinez votre recherche\n\n"
mesg += classes_found
return mesg
"""
def infos():
os.system('clear')
print("\n INFOS GENERALES\n")
f = open('liste_stagiaires').readlines()
print(" Nombre de stagiaires : {0}".format(len(f)))
classes = os.listdir(dirClasses)
print(" Nombre de classes : {0}".format(len(classes)))
c = raw_input("\n Tapez sur une touche pour revenir au menu,\n ou 'c' pour afficher les noms des classes... ")
if c == "c":
os.system('clear')
for a in classes:
fclasse = open("./classes/" + a)
print(fclasse.readlines()[1].split(": ")[1].rstrip())
raw_input("\n Tapez sur une touche pour revenir au menu")
"""
"""
def maj_bd():
os.system('clear')
print("\n MISE A JOUR DE LA BASE DE DONNEES")
print(" ---------------------------------\n")
print(' La base de données est mise à jour 2 fois par jour, à 8H30 et 13H30.')
print(' Il est cependant possible de forcer une mise à jour ponctuelle en cas de besoin.')
print(" Celle-ci peut durer plusieurs minutes car il faut télécharger des pages Web sur Internet")
c = raw_input("\n Voulez-vous mettre la base de donnée à jour (taper 'y' pour accepter) ? ")
if c == "y":
print(" Merci de patienter...\n")
os.system(server)
print("\n La mise à jour est terminée")
raw_input("\n Tapez sur une touche pour revenir au menu... ")
"""
if __name__ == "__main__" :
menu()
| lgpl-3.0 | 5,053,427,114,607,111,000 | 30.572727 | 199 | 0.624532 | false |
okamstudio/godot | modules/mono/build_scripts/make_cs_compressed_header.py | 3 | 3430 |
def generate_header(src, dst, version_dst):
from compat import byte_to_str
with open(dst, 'w') as header:
header.write('/* THIS FILE IS GENERATED DO NOT EDIT */\n')
header.write('#ifndef CS_COMPRESSED_H\n')
header.write('#define CS_COMPRESSED_H\n\n')
header.write('#ifdef TOOLS_ENABLED\n\n')
header.write('#include "core/map.h"\n')
header.write('#include "core/ustring.h"\n')
inserted_files = ''
import os
latest_mtime = 0
cs_file_count = 0
for root, _, files in os.walk(src):
files = [f for f in files if f.endswith('.cs')]
for file in files:
cs_file_count += 1
filepath = os.path.join(root, file)
filepath_src_rel = os.path.relpath(filepath, src)
mtime = os.path.getmtime(filepath)
latest_mtime = mtime if mtime > latest_mtime else latest_mtime
with open(filepath, 'rb') as f:
buf = f.read()
decomp_size = len(buf)
import zlib
buf = zlib.compress(buf)
name = str(cs_file_count)
header.write('\n')
header.write('// ' + filepath_src_rel + '\n')
header.write('static const int _cs_' + name + '_compressed_size = ' + str(len(buf)) + ';\n')
header.write('static const int _cs_' + name + '_uncompressed_size = ' + str(decomp_size) + ';\n')
header.write('static const unsigned char _cs_' + name + '_compressed[] = { ')
for i, buf_idx in enumerate(range(len(buf))):
if i > 0:
header.write(', ')
header.write(byte_to_str(buf[buf_idx]))
inserted_files += '\tr_files.insert("' + filepath_src_rel.replace('\\', '\\\\') + '", ' \
'CompressedFile(_cs_' + name + '_compressed_size, ' \
'_cs_' + name + '_uncompressed_size, ' \
'_cs_' + name + '_compressed));\n'
header.write(' };\n')
header.write('\nstruct CompressedFile\n' '{\n'
'\tint compressed_size;\n' '\tint uncompressed_size;\n' '\tconst unsigned char* data;\n'
'\n\tCompressedFile(int p_comp_size, int p_uncomp_size, const unsigned char* p_data)\n'
'\t{\n' '\t\tcompressed_size = p_comp_size;\n' '\t\tuncompressed_size = p_uncomp_size;\n'
'\t\tdata = p_data;\n' '\t}\n' '\n\tCompressedFile() {}\n' '};\n'
'\nvoid get_compressed_files(Map<String, CompressedFile>& r_files)\n' '{\n' + inserted_files + '}\n'
)
header.write('\n#endif // TOOLS_ENABLED\n')
header.write('\n#endif // CS_COMPRESSED_H\n')
glue_version = int(latest_mtime) # The latest modified time will do for now
with open(version_dst, 'w') as version_header:
version_header.write('/* THIS FILE IS GENERATED DO NOT EDIT */\n')
version_header.write('#ifndef CS_GLUE_VERSION_H\n')
version_header.write('#define CS_GLUE_VERSION_H\n\n')
version_header.write('#define CS_GLUE_VERSION UINT32_C(' + str(glue_version) + ')\n')
version_header.write('\n#endif // CS_GLUE_VERSION_H\n')
| mit | 6,835,633,651,705,477,000 | 55.229508 | 117 | 0.505248 | false |
KanoComputing/kano-apps | kano_apps/MainWindow.py | 1 | 4372 | # MainWindow.py
#
# Copyright (C) 2014-2018 Kano Computing Ltd.
# License: http://www.gnu.org/licenses/gpl-2.0.txt GNU GPLv2
#
# The MainWindow class
from gi import require_version
require_version('Gtk', '3.0')
from gi.repository import Gtk, Gdk
from kano_apps import Media
from kano_apps.UIElements import Contents
from kano_apps.AppGrid import Apps
from kano_apps.AppData import get_applications, refresh_package_list
from kano_apps.AppInstaller import AppInstaller
from kano.gtk3.top_bar import TopBar
from kano.gtk3.application_window import ApplicationWindow
from kano.gtk3.kano_dialog import KanoDialog
try:
from kano_profile.tracker import Tracker
kanotracker = Tracker()
except:
pass
class MainWindow(ApplicationWindow):
def __init__(self, install=None, icon_only=False, tutorial=False):
ApplicationWindow.__init__(self, 'Apps', 755, 588)
self._install = install
self._tutorial = tutorial
self._icon_only = icon_only
self._last_page = 0
self.connect("show", self._app_loaded)
# Destructor
self.connect('delete-event', Gtk.main_quit)
self.set_icon_from_file("/usr/share/kano-desktop/icons/apps.png")
# Styling
screen = Gdk.Screen.get_default()
specific_css_provider = Gtk.CssProvider()
specific_css_provider.load_from_path(Media.media_dir() +
'css/style.css')
specific_style_context = Gtk.StyleContext()
specific_style_context.add_provider_for_screen(
screen,
specific_css_provider,
Gtk.STYLE_PROVIDER_PRIORITY_USER
)
style = self.get_style_context()
style.add_class('main_window')
# Setup widgets
self.set_decorated(True)
self._top_bar = TopBar(_("Apps"), self._win_width, False)
self._top_bar.set_close_callback(Gtk.main_quit)
self.set_titlebar(self._top_bar)
self._contents = Contents(self)
self.set_main_widget(self._contents)
self.show_apps_view()
def get_main_area(self):
return self._contents
def get_last_page(self):
return self._last_page
def set_last_page(self, last_page_num):
self._last_page = last_page_num
def show_apps_view(self, button=None, event=None):
self._top_bar.disable_prev()
self._apps = apps = Apps(get_applications(), self)
self.get_main_area().set_contents(apps)
def refresh(self, category=None):
for app in get_applications():
if self._apps.has_app(app):
self._apps.update_app(app)
else:
self._apps.add_app(app)
def _app_loaded(self, widget):
if self._install is not None:
self._install_apps()
elif self._tutorial:
self._show_icon_tutorial()
def _show_icon_tutorial(self):
try:
from kano_profile.apps import save_app_state_variable, load_app_state_variable
if load_app_state_variable('kano-apps', 'icon-tutorial-shown'):
return
else:
save_app_state_variable('kano-apps', 'icon-tutorial-shown', True)
except ImportError:
# ignore problems importing kano_profile, as we don't want it to
# be a dependency
pass
kdialog = KanoDialog(
_("Add more apps to the desktop"),
_(
"Click the '+' button to the right of the app name to "
"make it appear on the desktop. You can remove it again "
"by clicking on 'x'."
),
{
_("OK, GOT IT"): {
"return_value": 0,
"color": "green"
}
},
parent_window=self
)
kdialog.set_action_background("grey")
kdialog.title.description.set_max_width_chars(40)
kdialog.run()
def _install_apps(self):
pw = None
for app in self._install:
inst = AppInstaller(app, self._apps, pw, self)
inst.set_check_if_installed(True)
inst.set_icon_only(self._icon_only)
inst.install()
pw = inst.get_sudo_pw()
self.set_last_page(0)
refresh_package_list()
self.refresh()
| gpl-2.0 | -7,431,524,287,024,962,000 | 30.007092 | 90 | 0.583486 | false |
przemyslawjanpietrzak/pyMonet | pymonet/semigroups.py | 1 | 5042 | class Semigroup:
"""
In mathematics, a semigroup is an algebraic structure
consisting of a set together with an associative binary operation.
A semigroup generalizes a monoid in that there might not exist an identity element.
It also (originally) generalized a group (a monoid with all inverses)
to a type where every element did not have to have an inverse, this the name semigroup.
"""
def __init__(self, value):
self.value = value
def __eq__(self, other) -> bool:
return self.value == other.value
def fold(self, fn):
return fn(self.value)
@classmethod
def neutral(cls):
return cls(cls.neutral_element)
class Sum(Semigroup):
"""
Sum is a Monoid that will combine 2 numbers under addition.
"""
neutral_element = 0
def __str__(self) -> str: # pragma: no cover
return 'Sum[value={}]'.format(self.value)
def concat(self, semigroup: 'Sum') -> 'Sum':
"""
:param semigroup: other semigroup to concat
:type semigroup: Sum[B]
:returns: new Sum with sum of concat semigroups values
:rtype: Sum[A]
"""
return Sum(self.value + semigroup.value)
class All(Semigroup):
"""
All is a Monoid that will combine 2 values of any type using logical conjunction on their coerced Boolean values.
"""
neutral_element = True
def __str__(self) -> str: # pragma: no cover
return 'All[value={}]'.format(self.value)
def concat(self, semigroup: 'All') -> 'All':
"""
:param semigroup: other semigroup to concat
:type semigroup: All[B]
:returns: new All with last truly value or first falsy
:rtype: All[A | B]
"""
return All(self.value and semigroup.value)
class One(Semigroup):
"""
One is a Monoid that will combine 2 values of any type using logical disjunction OR on their coerced Boolean values.
"""
neutral_element = False
def __str__(self) -> str: # pragma: no cover
return 'One[value={}]'.format(self.value)
def concat(self, semigroup):
"""
:param semigroup: other semigroup to concat
:type semigroup: One[B]
:returns: new One with first truly value or last falsy
:rtype: One[A | B]
"""
return One(self.value or semigroup.value)
class First(Semigroup):
"""
First is a Monoid that will always return the first, value when 2 First instances are combined.
"""
def __str__(self) -> str: # pragma: no cover
return 'Fist[value={}]'.format(self.value)
def concat(self, semigroup):
"""
:param semigroup: other semigroup to concat
:type semigroup: First[B]
:returns: new First with first value
:rtype: First[A]
"""
return First(self.value)
class Last(Semigroup):
"""
Last is a Monoid that will always return the lastest, value when 2 Last instances are combined.
"""
def __str__(self) -> str: # pragma: no cover
return 'Last[value={}]'.format(self.value)
def concat(self, semigroup):
"""
:param semigroup: other semigroup to concat
:type semigroup: Last[B]
:returns: new Last with last value
:rtype: Last[A]
"""
return Last(semigroup.value)
class Map(Semigroup):
"""
Map is a Semigroup that will always return contated all values inside Map value
"""
def __str__(self) -> str: # pragma: no cover
return 'Map[value={}]'.format(self.value)
def concat(self, semigroup):
"""
:param semigroup: other semigroup to concat
:type semigroup: Map[B]
:returns: new Map with concated all values
:rtype: Map[A]
"""
return Map(
{key: value.concat(semigroup.value[key]) for key, value in self.value.items()}
)
class Max(Semigroup):
"""
Max is a Monoid that will combines 2 numbers, resulting in the largest of the two.
"""
neutral_element = -float("inf")
def __str__(self) -> str: # pragma: no cover
return 'Max[value={}]'.format(self.value)
def concat(self, semigroup):
"""
:param semigroup: other semigroup to concat
:type semigroup: Max[B]
:returns: new Max with largest value
:rtype: Max[A | B]
"""
return Max(self.value if self.value > semigroup.value else semigroup.value)
class Min(Semigroup):
"""
Min is a Monoid that will combines 2 numbers, resulting in the smallest of the two.
"""
neutral_element = float("inf")
def __str__(self) -> str: # pragma: no cover
return 'Min[value={}]'.format(self.value)
def concat(self, semigroup):
"""
:param semigroup: other semigroup to concat
:type semigroup: Min[B]
:returns: new Min with smallest value
:rtype: Min[A | B]
"""
return Min(self.value if self.value <= semigroup.value else semigroup.value)
| mit | -2,265,434,369,184,133,000 | 27.485876 | 120 | 0.601349 | false |
celiao/django-rest-authemail | authemail/admin.py | 1 | 3085 | from django.contrib import admin
from django.contrib.auth import get_user_model
from django.contrib.auth.admin import UserAdmin
from django.utils.translation import ugettext_lazy as _
from authemail.forms import EmailUserCreationForm, EmailUserChangeForm
from authemail.models import SignupCode, PasswordResetCode, EmailChangeCode
class SignupCodeAdmin(admin.ModelAdmin):
list_display = ('code', 'user', 'ipaddr', 'created_at')
ordering = ('-created_at',)
readonly_fields = ('user', 'code', 'ipaddr')
def has_add_permission(self, request):
return False
class SignupCodeInline(admin.TabularInline):
model = SignupCode
fieldsets = (
(None, {
'fields': ('code', 'ipaddr', 'created_at')
}),
)
readonly_fields = ('code', 'ipaddr', 'created_at')
def has_add_permission(self, request):
return False
class PasswordResetCodeAdmin(admin.ModelAdmin):
list_display = ('code', 'user', 'created_at')
ordering = ('-created_at',)
readonly_fields = ('user', 'code')
def has_add_permission(self, request):
return False
class PasswordResetCodeInline(admin.TabularInline):
model = PasswordResetCode
fieldsets = (
(None, {
'fields': ('code', 'created_at')
}),
)
readonly_fields = ('code', 'created_at')
def has_add_permission(self, request):
return False
class EmailChangeCodeAdmin(admin.ModelAdmin):
list_display = ('code', 'user', 'email', 'created_at')
ordering = ('-created_at',)
readonly_fields = ('user', 'code', 'email')
def has_add_permission(self, request):
return False
class EmailChangeCodeInline(admin.TabularInline):
model = EmailChangeCode
fieldsets = (
(None, {
'fields': ('code', 'email', 'created_at')
}),
)
readonly_fields = ('code', 'email', 'created_at')
def has_add_permission(self, request):
return False
class EmailUserAdmin(UserAdmin):
fieldsets = (
(None, {'fields': ('email', 'password')}),
(_('Personal Info'), {'fields': ('first_name', 'last_name')}),
(_('Permissions'), {'fields': ('is_active', 'is_staff', 'is_superuser',
'groups', 'user_permissions')}),
(_('Important dates'), {'fields': ('last_login', 'date_joined')}),
)
add_fieldsets = (
(None, {
'classes': ('wide',),
'fields': ('email', 'password1', 'password2'),
}),
)
form = EmailUserChangeForm
add_form = EmailUserCreationForm
inlines = [SignupCodeInline, EmailChangeCodeInline, PasswordResetCodeInline]
list_display = ('email', 'is_verified', 'first_name', 'last_name',
'is_staff')
search_fields = ('first_name', 'last_name', 'email')
ordering = ('email',)
admin.site.register(get_user_model(), EmailUserAdmin)
admin.site.register(SignupCode, SignupCodeAdmin)
admin.site.register(PasswordResetCode, PasswordResetCodeAdmin)
admin.site.register(EmailChangeCode, EmailChangeCodeAdmin)
| gpl-3.0 | -7,126,561,763,274,769,000 | 29.245098 | 80 | 0.621718 | false |
simodalla/django-caronte | setup.py | 1 | 1497 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
import caronte
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
version = caronte.__version__
if sys.argv[-1] == 'publish':
os.system('python setup.py sdist upload')
print("You probably want to also tag the version now:")
print(" git tag -a %s -m 'version %s'" % (version, version))
print(" git push --tags")
sys.exit()
readme = open('README.rst').read()
history = open('HISTORY.rst').read().replace('.. :changelog:', '')
setup(
name='django-caronte',
version=version,
description="""Your project description goes here""",
long_description=readme + '\n\n' + history,
author='Simone Dalla',
author_email='[email protected]',
url='https://github.com/simodalla/django-caronte',
packages=[
'caronte',
],
include_package_data=True,
install_requires=[
],
license="BSD",
zip_safe=False,
keywords='django-caronte',
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
],
)
| bsd-3-clause | 5,328,639,089,583,490,000 | 26.218182 | 66 | 0.607882 | false |
llvmpy/llvmpy | test/operands.py | 1 | 2339 | #!/usr/bin/env python
# Tests accessing of instruction operands.
import sys
import logging
import unittest
from llvm.core import *
try:
from StringIO import StringIO
except ImportError:
from io import StringIO
m = None
#===----------------------------------------------------------------------===
# implement a test function
test_module = """
define i32 @prod(i32, i32) {
entry:
%2 = mul i32 %0, %1
ret i32 %2
}
define i32 @test_func(i32, i32, i32) {
entry:
%tmp1 = call i32 @prod(i32 %0, i32 %1)
%tmp2 = add i32 %tmp1, %2
%tmp3 = add i32 %tmp2, 1
ret i32 %tmp3
}
"""
class TestOperands(unittest.TestCase):
def test_operands(self):
m = Module.from_assembly(StringIO(test_module))
logging.debug("-"*60)
logging.debug(m)
logging.debug("-"*60)
test_func = m.get_function_named("test_func")
prod = m.get_function_named("prod")
#===-----------------------------------------------------------===
# test operands
i1 = test_func.basic_blocks[0].instructions[0]
i2 = test_func.basic_blocks[0].instructions[1]
logging.debug("Testing User.operand_count ..")
self.assertEqual(i1.operand_count, 3)
self.assertEqual(i2.operand_count, 2)
logging.debug("Testing User.operands ..")
self.assert_(i1.operands[-1] is prod)
self.assert_(i1.operands[0] is test_func.args[0])
self.assert_(i1.operands[1] is test_func.args[1])
self.assert_(i2.operands[0] is i1)
self.assert_(i2.operands[1] is test_func.args[2])
self.assertEqual(len(i1.operands), 3)
self.assertEqual(len(i2.operands), 2)
#===-----------------------------------------------------------===
# show test_function
logging.debug("Examining test_function `test_test_func':")
idx = 1
for inst in test_func.basic_blocks[0].instructions:
logging.debug("Instruction #%d:", idx)
logging.debug(" operand_count = %d", inst.operand_count)
logging.debug(" operands:")
oidx = 1
for op in inst.operands:
logging.debug(" %d: %s", oidx, repr(op))
oidx += 1
idx += 1
if __name__ == '__main__':
unittest.main()
| bsd-3-clause | -1,771,277,602,085,556,700 | 26.197674 | 77 | 0.525011 | false |
OpenDrift/opendrift | examples/example_plast.py | 1 | 1520 | #!/usr/bin/env python
"""
Plastic
==================================
"""
from datetime import timedelta
from opendrift.readers import reader_netCDF_CF_generic
from opendrift.models.plastdrift import PlastDrift
o = PlastDrift(loglevel=20)
o.list_configspec() # to see available configuration options
# Arome atmospheric model
reader_arome = reader_netCDF_CF_generic.Reader(o.test_data_folder() + '16Nov2015_NorKyst_z_surface/arome_subset_16Nov2015.nc')
# Norkyst ocean model
reader_norkyst = reader_netCDF_CF_generic.Reader(o.test_data_folder() + '16Nov2015_NorKyst_z_surface/norkyst800_subset_16Nov2015.nc')
o.add_reader([reader_norkyst, reader_arome])
start_time = reader_arome.start_time
end_time = reader_arome.start_time + timedelta(hours=5)
end_time = reader_arome.end_time
time = [start_time, start_time + timedelta(hours=5)]
#%%
# Seeding some particles
lon = 4.6; lat = 60.0; # Outside Bergen
o.seed_elements(lon, lat, radius=50, number=3000, time=time)
o.run(end_time=end_time, time_step=1800, time_step_output=3600)
#%%
# Second run, without wind/Stokes drift
o2 = PlastDrift(loglevel=20)
o2.add_reader([reader_norkyst])
o2.seed_elements(lon, lat, radius=50, number=3000, time=time)
o2.run(end_time=end_time, time_step=1800, time_step_output=3600)
#%%
# Print and plot results
print(o)
o.animation(compare=o2, fast=True,
legend=['Current + wind/Stokes drift', 'Current only'])
#o.animation(color='depth')
#o.plot_property('depth')
#%%
# .. image:: /gallery/animations/example_plast_0.gif
| gpl-2.0 | 396,845,777,678,186,000 | 30.666667 | 133 | 0.721711 | false |
rh-marketingops/dwm | dwm/test/test_val_g_lookup.py | 1 | 1702 | """ test generic validation lookup function """
import mongomock
#from mock import patch
#from nose.tools import raises
from dwm import Dwm
# Setup mongomock db
DB = mongomock.MongoClient().db
DB.genericLookup.insert({"find": "BADVALUE"})
# Setup Dwm instance
FIELDS = {
'field1': {
'lookup': ['genericLookup'],
'derive': []
},
'field2': {
'lookup': ['genericLookup'],
'derive': []
}
}
DWM = Dwm(name='test', mongo=DB, fields=FIELDS)
# Let the testing begin
def test_dwm_vg_lup_bad():
""" Ensure generic lookup occurs """
rec = {'field1': 'BADVALUE'}
rec_out, _ = DWM._val_g_lookup(rec, {}) #pylint: disable=W0212
assert rec_out == {'field1': ''}
def test_dwm_vg_lup_good():
""" Ensure good value not cleared """
rec = {'field1': 'GOODVALUE'}
rec_out, _ = DWM._val_g_lookup(rec, {}) #pylint: disable=W0212
assert rec_out == rec
def test_dwm_vg_lup_badcln():
""" Ensure basic lookup occurs and cleans value before """
rec = {'field1': ' badvalue\r\n '}
rec_out, _ = DWM._val_g_lookup(rec, {}) #pylint: disable=W0212
assert rec_out == {'field1': ''}
def test_dwm_vg_lup_badmulti():
""" Ensure lookup occurs on every field in config """
rec = {'field1': 'BADVALUE', 'field2': 'BADVALUE'}
rec_out, _ = DWM._val_g_lookup(rec, {}) #pylint: disable=W0212
assert rec_out == {'field1': '', 'field2': ''}
def test_dwm_vg_lup_leave():
""" Ensure lookup does not occur on field not in config """
rec = {'field1': 'BADVALUE', 'field3': 'BADVALUE'}
rec_out, _ = DWM._val_g_lookup(rec, {}) #pylint: disable=W0212
assert rec_out == {'field1': '', 'field3': 'BADVALUE'}
| gpl-3.0 | 8,576,870,995,289,725,000 | 25.184615 | 66 | 0.596357 | false |
mikesname/ehri-collections | ehriportal/portal/forms.py | 1 | 6151 | """Portal search forms."""
import string
from django import forms
from django.contrib.admin import widgets
from django.utils import translation
from django.utils.translation import ugettext_lazy as _
from django.forms.models import modelformset_factory, inlineformset_factory
from jsonfield.forms import JSONFormField
from haystack.forms import EmptySearchQuerySet
from portal import models, data, utils
class PortalSearchForm(forms.Form):
ENTITIES = [models.Repository, models.Collection, models.Authority]
SORTFIELDS = (
("", _("Relevance")),
("name", _("Title/Name")),
("publication_date", _("Publication Date")),
("django_ct", _("Type")),
)
q = forms.CharField(required=False, label=_('Search'))
sort = forms.ChoiceField(required=False, choices=SORTFIELDS, label=_("Order By"))
def filter(self, sqs):
"""Filter a search queryset."""
self.sqs = sqs
if self.cleaned_data["sort"]:
self.sqs = self.sqs.order_by(self.cleaned_data["sort"])
if not self.cleaned_data["q"]:
return self.no_query_found()
return sqs.auto_query(self.cleaned_data["q"])
def no_query_found(self):
return self.sqs
class PortalAllSearchForm(PortalSearchForm):
"""Form representing the whole collection."""
# def no_query_found(self):
# return EmptySearchQuerySet()
class LanguageSelectWidget(forms.SelectMultiple):
choices = utils.language_choices()
def __init__(self, *args, **kwargs):
super(LanguageSelectWidget, self).__init__(*args, **kwargs)
class ScriptSelectWidget(forms.SelectMultiple):
choices = utils.script_choices()
class MapSearchForm(PortalSearchForm):
type = forms.ChoiceField(label=_('Type'), choices=(("Repository", "Repository"),
("Collection", "Collection")))
ne = forms.CharField(required=False, label=_('North East'),
widget=forms.HiddenInput())
sw = forms.CharField(required=False, label=_('South West'),
widget=forms.HiddenInput())
def no_query_found(self):
"""Show no results for a map search."""
return EmptySearchQuerySet()
def filter(self, sqs):
"""Filter a search set with geo-bounds."""
model = getattr(models, self.cleaned_data["type"])
sqs = sqs.models(model)
return super(MapSearchForm, self).filter(sqs)
class FacetListSearchForm(PortalSearchForm):
"""Extension of the search form with another field for
the order in which facets are sorted. Since we can't do
this natively with Haystack, we have to hack it ourselves.
"""
sort = forms.ChoiceField(required=False,
choices=(("count",_("Count")), ("name", _("Name"))))
class LanguageSelectFormField(JSONFormField):
def __init__(self, *args, **kwargs):
super(LanguageSelectFormField, self).__init__(*args, **kwargs)
self.widget = forms.SelectMultiple(choices=utils.language_choices())
class ScriptSelectFormField(JSONFormField):
def __init__(self, *args, **kwargs):
super(ScriptSelectFormField, self).__init__(*args, **kwargs)
self.widget = forms.SelectMultiple(choices=utils.script_choices())
class FuzzyDateForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
attrs={
'class':'input-small',
'placeholder': 'Start Date'
}
super(FuzzyDateForm, self).__init__(*args, **kwargs)
self.fields["start_date"].widget = widgets.AdminDateWidget(attrs=attrs)
self.fields["end_date"].widget = widgets.AdminDateWidget(attrs=attrs)
class Meta:
model = models.FuzzyDate
fields = ("start_date", "end_date",)
class OtherNameForm(forms.ModelForm):
class Meta:
fields = ("name",)
widgets = {
"name": forms.TextInput(attrs={'placeholder': _("Type another name here...")}),
}
class PortalEntityForm(forms.ModelForm):
# extra (non-model) field for revision comment
revision_comment = forms.CharField(required=False, widget=forms.TextInput(attrs={
"placeholder": _("Summary of changes (optional)"),
}))
class ContactForm(forms.ModelForm):
class Meta:
model = models.Contact
class CollectionEditForm(PortalEntityForm):
languages = LanguageSelectFormField()
languages_of_description = LanguageSelectFormField()
scripts = ScriptSelectFormField()
scripts_of_description = ScriptSelectFormField()
class Meta:
model = models.Collection
class RepositoryEditForm(PortalEntityForm):
languages = LanguageSelectFormField()
scripts = ScriptSelectFormField()
class Meta:
model = models.Repository
class AuthorityEditForm(PortalEntityForm):
languages = LanguageSelectFormField()
scripts = ScriptSelectFormField()
class Meta:
model = models.Authority
class RestoreRevisionForm(forms.Form):
"""Restore a revision of an object."""
def propertyformset_factory(topclass, propname):
propcls = models.propertyproxy_factory(propname)
return inlineformset_factory(
topclass, propcls, fields=("value",), extra=1)
# FIXME: !!! The OtherName formsets below are created using the Collection
# as the primary model, but they're also used in the repository and
# authority forms. This doesn't seem to matter, because when they're
# constructed the primary model seems to be overridden by the instance
# argument given, but it's obviously still wrong and bug-prone.
# The alternative is lots of ugly duplication or another exceedingly
# meta 'factory' function, neither of which are nice options.
DateFormSet = inlineformset_factory(models.Collection, models.FuzzyDate,
form=FuzzyDateForm, extra=1)
OtherNameFormSet = inlineformset_factory(models.Collection, models.OtherFormOfName,
form=OtherNameForm, extra=1)
ParallelNameFormSet = inlineformset_factory(models.Collection, models.ParallelFormOfName,
form=OtherNameForm, extra=1)
ContactFormSet = inlineformset_factory(models.Repository, models.Contact,
form=ContactForm, extra=1)
| mit | -8,758,907,469,438,605,000 | 32.612022 | 95 | 0.679402 | false |
franklingu/leetcode-solutions | questions/longest-common-subsequence/Solution.py | 1 | 1711 | """
Given two strings text1 and text2, return the length of their longest common subsequence.
A subsequence of a string is a new string generated from the original string with some characters(can be none) deleted without changing the relative order of the remaining characters. (eg, "ace" is a subsequence of "abcde" while "aec" is not). A common subsequence of two strings is a subsequence that is common to both strings.
If there is no common subsequence, return 0.
Example 1:
Input: text1 = "abcde", text2 = "ace"
Output: 3
Explanation: The longest common subsequence is "ace" and its length is 3.
Example 2:
Input: text1 = "abc", text2 = "abc"
Output: 3
Explanation: The longest common subsequence is "abc" and its length is 3.
Example 3:
Input: text1 = "abc", text2 = "def"
Output: 0
Explanation: There is no such common subsequence, so the result is 0.
Constraints:
1 <= text1.length <= 1000
1 <= text2.length <= 1000
The input strings consist of lowercase English characters only.
"""
class Solution:
def longestCommonSubsequence(self, text1: str, text2: str) -> int:
if not text1 or not text2:
return 0
dp = [[0] * len(text2) for _ in text1]
for i, c1 in enumerate(text1):
for j, c2 in enumerate(text2):
if c1 == c2:
dp[i][j] = 1 if i < 1 or j < 1 else dp[i - 1][j - 1] + 1
continue
if i < 1:
n1 = 0
else:
n1 = dp[i - 1][j]
if j < 1:
n2 = 0
else:
n2 = dp[i][j - 1]
dp[i][j] = max(n1, n2)
return dp[-1][-1]
| mit | -3,796,396,246,717,548,000 | 29.553571 | 328 | 0.5827 | false |
iwinulose/eve | eve/objects.py | 1 | 2549 | # Copyright (c) 2014, Charles Duyk
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
class Entity(object):
def __init__(self, name, theId):
self._name = name
self._id = int(theId)
def __repr__(self):
return "%s(\"%s\", %d)" % (self.__class__.__name__, self.getName(), self.getID())
def __str__(self):
return "%s (id: %d)" % (self.getName(), self.getID())
def __eq__(self, other):
if isinstance(other, Entity):
return self.getID() == other.getID()
return NotImplemented
def __ne__(self, other):
isEqual = self.__eq__(other)
if isEqual is NotImplemented:
return isEqual
return not isEqual
def getName(self):
return self._name
def getID(self):
return self._id
def valueByVolume(self, pricePerUnit, volume=1.0):
volumeFloat = volume + 0.0
unitVolume = self.getSize()
pricePerMeter = pricePerUnit/unitVolume
value = pricePerMeter * volumeFloat
return value
class Item(Entity):
def __init__(self, name, marketID, size):
super(Item, self).__init__(name, marketID)
self._size = size + 0.0
def __repr__(self):
return "Item(\"%s\", %d, %f)" % (self.getName(), self.getID(), self.getSize())
def __str__(self):
return "%s: id %d size %f" % (self.getName(), self.getID(), self.getSize())
def getSize(self):
return self._size
| bsd-2-clause | -414,263,850,727,675,300 | 31.679487 | 83 | 0.708905 | false |
apache/bloodhound | bloodhound_dashboard/bhdashboard/wiki.py | 2 | 3618 | # -*- coding: UTF-8 -*-
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
r"""Project dashboard for Apache(TM) Bloodhound
WikiMacros and WikiProcessors related to dashboard system.
"""
from ast import literal_eval
from genshi.builder import tag
from trac.web.chrome import Chrome
from trac.wiki.api import parse_args
from trac.wiki.macros import WikiMacroBase
from bhdashboard.web_ui import DashboardChrome, DashboardModule
GUIDE_NAME = 'Guide'
RENAME_MAP = {'TracGuide': GUIDE_NAME + '/Index',}
def new_name(name, force=False):
if name.startswith('Trac'):
return RENAME_MAP.get(name, GUIDE_NAME + '/' + name[4:])
else:
return name
class WidgetMacro(WikiMacroBase):
"""Embed Bloodhound widgets using WikiFormatting.
"""
#: A gettext domain to translate the macro description
_domain = None
#: A macro description
_description = """Embed Bloodhound widgets using WikiFormatting."""
def expand_macro(self, formatter, name, content):
"""Render widget contents by re-using wiki markup implementation
"""
if self.env[DashboardModule] is None:
return DashboardModule(self.env).alert_disabled()
largs, kwargs = parse_args(content, strict=True)
try:
(widget_name ,) = largs
except ValueError:
template = 'widget_alert.html'
data = {
'msgtype' : 'error',
'msglabel' : 'Error',
'msgbody' : tag('Expected ', tag.code(1),
' positional argument (i.e. widget name), but got ',
tag.code(len(largs)), ' instead'),
'msgdetails' : [
('Macro name', tag.code('WidgetMacro')),
('Arguments', ', '.join(largs) if largs \
else tag.span('None', class_='label')),
],
}
else:
widget_name = widget_name.strip()
wopts = {} ; wargs = {}
def parse_literal(value):
try:
return literal_eval(value)
except (SyntaxError, ValueError):
return value
for argnm, value in kwargs.iteritems():
if argnm.startswith('wo_'):
wopts[argnm[3:]] = value
else :
wargs[argnm] = parse_literal(value)
template = 'widget.html'
data = {
'args' : wargs,
'bhdb' : DashboardChrome(self.env),
'id' : None,
'opts' : wopts,
'widget' : widget_name
}
return Chrome(self.env).render_template(
formatter.req, template, data, fragment=True)
| apache-2.0 | 3,236,449,236,041,884,000 | 35.18 | 80 | 0.572692 | false |
bskari/sparkfun-avc | analysis/process_gps.py | 1 | 1997 | """Formats GPS log messages into a path KMZ file that Google Earth can read."""
#!/bin/env python
import collections
import json
import sys
from plot_points import get_kml
def main():
"""Main function."""
if len(sys.argv) <= 1:
print('Usage: {} <log file>'.format(sys.argv[0]))
return
in_file_name = sys.argv[1]
name = in_file_name[:in_file_name.rfind('.')]
out_file_name = sys.argv[2] if len(sys.argv) > 2 else 'out.kml'
with open(in_file_name) as in_stream:
lines = in_stream.readlines()
runs = process_lines(iter(lines))
with open(out_file_name, 'w') as out_stream:
out_stream.write(get_kml(runs, name))
def process_lines(in_stream):
"""I don't know."""
run_count = 1
runs = []
for line in in_stream:
if 'Received run command' in line or 'Button pressed' in line:
print('Starting run {}'.format(run_count))
runs.append(process_run(in_stream, run_count))
run_count += 1
return runs
def process_run(in_stream, run_count):
"""Returns the points in a run."""
points = collections.defaultdict(lambda: [])
for line in in_stream:
if 'Received stop command' in line or 'No waypoints, stopping' in line:
break
elif '"device_id"' in line:
parts = json.loads(line[line.find('{'):line.rfind('}') + 1])
if 'latitude_d' not in parts:
# Probably an accelerometer message
continue
latitude = parts['latitude_d']
longitude = parts['longitude_d']
# Ignore early bad estimates
if latitude > 1:
points[parts['device_id']].append((latitude, longitude))
else:
print('Ignoring {},{}'.format(latitude, longitude))
print(
'Ending run {} with {} paths'.format(
run_count,
len(points)
)
)
return points
if __name__ == '__main__':
main()
| mit | -5,713,281,913,200,312,000 | 28.367647 | 79 | 0.564347 | false |
chilltemp/raspberrypi-lighting | src/app.py | 1 | 17994 | # -*- coding: utf-8 -*-
from __future__ import with_statement
import logging
import time
import json
import Queue
import argparse
import os
import ConfigParser
from logging.handlers import RotatingFileHandler
from random import randint
from threading import Thread
from datetime import datetime, timedelta
from flask import Flask, request, session, url_for, redirect, render_template, abort, g, flash, jsonify
from sys import exit
from apscheduler.scheduler import Scheduler
SECRET_KEY = 'nkjfsnkgbkfnge347r28fherg8fskgsd2r3fjkenwkg33f3s'
CONFIGURATION_PATH = "circadian.conf"
led_chain = None
auto_resume_offset = None # Now set in config file.
auto_resume_job = None
# create our little application
app = Flask(__name__)
app.config.from_object(__name__)
app.debug = True # !!! Set this to False for production use !!!
time_format = "%H:%M:%S"
# Event times should be in the form of %H:%M:%S
# Event states should be in the form of [Red,Green,Blue]
# Event names should be unique, as they are used for last run information
auto_state_events = [{'event_name' : 'Night Phase', 'event_start_time' : '00:00:00', 'event_end_time' : '07:00:00' , 'event_state' : [0,0,0], 'transition_duration': 1000},
{'event_name' : 'Sunrise Phase', 'event_start_time' : '07:30:00', 'event_end_time' : '08:59:00' , 'event_state' : [255,109,0], 'transition_duration': 10000},
{'event_name' : 'At Work', 'event_start_time' : '09:00:00', 'event_end_time' : '18:59:00' , 'event_state' : [0,0,0], 'transition_duration': 5000},
{'event_name' : 'Alert Phase', 'event_start_time' : '19:00:00', 'event_end_time' : '21:59:00' , 'event_state' : [0,78,103], 'transition_duration': 5000},
{'event_name' : 'Relaxation Phase', 'event_start_time' : '22:00:00', 'event_end_time' : '23:59:00' , 'event_state' : [255,27,14], 'transition_duration': 3000},]
# need to work on further transition modes.
valid_transition_modes = ['fade']
# Currently supported led drivers
valid_led_drivers = ['ws2801','lpd6803','lpd8806']
# Stolen from http://stackoverflow.com/questions/4296249/how-do-i-convert-a-hex-triplet-to-an-rgb-tuple-and-back
HEX = '0123456789abcdef'
HEX2 = dict((a+b, HEX.index(a)*16 + HEX.index(b)) for a in HEX for b in HEX)
def rgb(triplet):
triplet = triplet.lower()
return { 'R' : HEX2[triplet[0:2]], 'G' : HEX2[triplet[2:4]], 'B' : HEX2[triplet[4:6]]}
def triplet(rgb):
return format((rgb[0]<<16)|(rgb[1]<<8)|rgb[2], '06x')
class Chain_Communicator:
def __init__(self, driver_type, chain_length):
if driver_type == 'ws2801':
from pigredients.ics import ws2801 as ws2801
self.led_chain = ws2801.WS2801_Chain(ics_in_chain=chain_length)
elif driver_type == 'lpd6803':
from pigredients.ics import lpd6803 as lpd6803
self.led_chain = lpd6803.LPD6803_Chain(ics_in_chain=chain_length)
elif driver_type == 'lpd8806':
from pigredients.ics import lpd8806 as lpd8806
self.led_chain = lpd8806.LPD8806_Chain(ics_in_chain=chain_length)
self.auto_resume_job = None
self.queue = Queue.Queue()
self.mode_jobs = []
self.state = 'autonomous'
self.led_state = [0,0,0]
# use a running flag for our while loop
self.run = True
app.logger.debug("Chain_Communicator starting main_loop.")
self.loop_instance = Thread(target=self.main_loop)
self.loop_instance.start()
app.logger.info("Running resume auto, in case were in an auto event.")
self.resume_auto()
app.logger.debug("Chain_Communicator init complete.")
def main_loop(self):
try:
app.logger.debug("main_loop - processing queue ...")
while self.run :
# Grab the next lighting event, block until there is one.
lighting_event = self.queue.get(block=True)
# set our chain state
self.led_chain.set_rgb(lighting_event)
# write out the previously set state.
self.led_chain.write()
# store our state for later comparisons.
self.led_state = lighting_event
except KeyboardInterrupt:
self.run = False
app.logger.warning("Caught keyboard interupt in main_loop. Shutting down ...")
def auto_transition(self, *args, **kwargs):
# accepts all events from scheduler, checks if in auto mode, if not throws them away.
if self.state is 'autonomous':
self.transition(*args, **kwargs)
def transition(self, state, transition_duration=500, transition_mode='fade'):
# States must be in the format of a list containing Red, Green and Blue element values in order.
# example. White = [255,255,255] Red = [255,0,0] Blue = [0,0,255] etc.
# a duration is represented in an incredibly imprescise unit, known as ticks. Ticks are executed as fast as the the queue can be processed.
if transition_mode not in valid_transition_modes:
raise Exception("Invalid transition mode : %s , valid modes are : %s" % (transition_mode, valid_transition_modes))
with self.queue.mutex:
self.queue.queue.clear()
app.logger.info("Current state is : %s , destination state is : %s , transitioning via %s in : %d ticks" % (self.led_state, state, transition_mode, transition_duration))
if transition_mode is 'fade':
# Using a modified version of http://stackoverflow.com/questions/6455372/smooth-transition-between-two-states-dynamically-generated for smooth transitions between states.
for transition_count in range(transition_duration - 1):
event_state = []
for component in range(3):
event_state.append(self.led_state[component] + (state[component] - self.led_state[component]) * transition_count / transition_duration)
self.queue.put(event_state)
# last event is always fixed to the destination state to ensure we get there, regardless of any rounding errors. May need to rethink this mechanism, as I suspect small transitions will be prone to rounding errors resulting in a large final jump.
self.queue.put(state)
def clear_mode(self):
app.logger.debug("Removing any mode jobs from queue")
for job in self.mode_jobs:
app.logger.debug("Removing existing mode job")
sched.unschedule_job(job)
self.mode_jobs = []
def resume_auto(self):
if self.state is not 'manual':
self.clear_mode()
# returns system state to autonomous, to be triggered via the scheduler, or via a request hook from the web ui.
self.state = 'autonomous'
app.logger.debug("Resume auto called, system state is now : %s" % self.state)
app.logger.info("Looking to see if current time falls within any events.")
current_time = datetime.time(datetime.now())
for event in auto_state_events:
start_time = datetime.time(datetime.strptime(event['event_start_time'],time_format))
end_time = datetime.time(datetime.strptime(event['event_end_time'],time_format))
if current_time > start_time and current_time < end_time:
app.logger.info("Event : '%s' falls within the current time, executing state." % event['event_name'])
self.auto_transition(state=event['event_state'])
break
def shutdown(self):
self.run = False
# send final state to avoid blocking on queue.
self.queue.put([0,0,0])
self.loop_instance.join()
def format_datetime(timestamp):
"""Format a timestamp for display."""
return datetime.utcfromtimestamp(timestamp).strftime('%Y-%m-%d @ %H:%M')
def manual_set(hex_val):
if led_chain.state is not 'manual':
led_chain.clear_mode()
return_object = {'output' : None , 'error' : None, 'success' : False}
rgb_val = rgb(hex_val)
app.logger.debug("Given colour_val : %s, converted it to %s" % (hex_val, rgb_val))
led_chain.transition([rgb_val['R'],rgb_val['G'],rgb_val['B']])
app.logger.info("Set chain to manual colour state : %s" % [rgb_val['R'],rgb_val['G'],rgb_val['B']])
# Set our schedulars auto resume time.
# !!! change this to minutes when finished debugging !!!
# large gracetime as we want to make sure it fires, regardless of how late it is.
for job in sched.get_jobs():
if job.name == 'autoresume':
app.logger.debug("Removing existing autoresume job, and adding a new one.")
sched.unschedule_job(led_chain.auto_resume_job)
led_chain.auto_resume_job = sched.add_date_job(led_chain.resume_auto, datetime.now() + timedelta(minutes=auto_resume_offset), name='autoresume', misfire_grace_time=240)
break
else:
app.logger.debug("No existing autoresume jobs, adding one.")
led_chain.auto_resume_job = sched.add_date_job(led_chain.resume_auto, datetime.now() + timedelta(minutes=auto_resume_offset), name='autoresume', misfire_grace_time=240)
app.logger.debug("Job list now contains : %s" % sched.print_jobs())
led_chain.state = 'manual'
return_object['success'] = True
return return_object
@app.before_request
def before_request():
pass
@app.teardown_request
def teardown_request(exception):
pass
@app.route('/')
def index():
return render_template('index.html')
@app.route('/mode/auto')
def auto_mode():
if led_chain.state is not 'autonomous':
led_chain.resume_auto()
return jsonify({'sucess' : True})
@app.route('/mode/off')
def off_mode():
manual_set('000000')
return jsonify({'sucess' : True})
@app.route('/mode/cycle')
def cycle_mode():
if led_chain.state is not 'cycle':
led_chain.state = 'cycle'
# Schedule our cycle events ...
led_chain.mode_jobs = []
led_chain.mode_jobs.append(sched.add_interval_job(led_chain.transition, seconds=40, start_date=datetime.now() + timedelta(seconds=1), name='__cycle_0', kwargs={'state' : [126,0,255], 'transition_duration' : 800}))
led_chain.mode_jobs.append(sched.add_interval_job(led_chain.transition, seconds=40, start_date=datetime.now() + timedelta(seconds=6), name='__cycle_1', kwargs={'state' : [255,0,188], 'transition_duration' : 800}))
led_chain.mode_jobs.append(sched.add_interval_job(led_chain.transition, seconds=40, start_date=datetime.now() + timedelta(seconds=11), name='__cycle_2', kwargs={'state' : [255,0,0], 'transition_duration' : 800}))
led_chain.mode_jobs.append(sched.add_interval_job(led_chain.transition, seconds=40, start_date=datetime.now() + timedelta(seconds=16), name='__cycle_3', kwargs={'state' : [255,197,0], 'transition_duration' : 800}))
led_chain.mode_jobs.append(sched.add_interval_job(led_chain.transition, seconds=40, start_date=datetime.now() + timedelta(seconds=21), name='__cycle_4', kwargs={'state' : [135,255,0], 'transition_duration' : 800}))
led_chain.mode_jobs.append(sched.add_interval_job(led_chain.transition, seconds=40, start_date=datetime.now() + timedelta(seconds=26), name='__cycle_5', kwargs={'state' : [0,255,34], 'transition_duration' : 800}))
led_chain.mode_jobs.append(sched.add_interval_job(led_chain.transition, seconds=40, start_date=datetime.now() + timedelta(seconds=31), name='__cycle_6', kwargs={'state' : [0,255,254], 'transition_duration' : 800}))
led_chain.mode_jobs.append(sched.add_interval_job(led_chain.transition, seconds=40, start_date=datetime.now() + timedelta(seconds=36), name='__cycle_7', kwargs={'state' : [0,52,255], 'transition_duration' : 800}))
# Set our schedulars auto resume time.
# !!! change this to minutes when finished debugging !!!
# large gracetime as we want to make sure it fires, regardless of how late it is.
for job in sched.get_jobs():
if job.name == 'autoresume':
app.logger.debug("Removing existing autoresume job, and adding a new one.")
sched.unschedule_job(led_chain.auto_resume_job)
led_chain.auto_resume_job = sched.add_date_job(led_chain.resume_auto, datetime.now() + timedelta(minutes=auto_resume_offset), name='autoresume', misfire_grace_time=240)
break
else:
app.logger.debug("No existing autoresume jobs, adding one.")
led_chain.auto_resume_job = sched.add_date_job(led_chain.resume_auto, datetime.now() + timedelta(minutes=auto_resume_offset), name='autoresume', misfire_grace_time=240)
return jsonify({'sucess' : True})
@app.route('/job/list')
def list_jobs():
print sched.get_jobs()
job_list = {}
for job in sched.get_jobs():
# Filter our our internal jobs
if not job.name.startswith('__'):
job_list[job.name] = {}
print "Job trigger : %s type : %s" % (job.trigger, type(job.trigger))
job_list[job.name]['type'] = 'stuff'
return jsonify(job_list)
@app.route('/job/delete')
def delete_job():
return jsonify({'sucess' : False})
@app.route('/job/date/add')
def add_date_job():
random_state = []
for i in range(3):
random_state.append(randint(0,255))
sched.add_date_job(led_chain.transition, datetime.now() + timedelta(seconds=10), kwargs={'state' : random_state})
app.logger.debug("Job list now contains : %s" % sched.print_jobs())
return jsonify({'sucess' : True})
@app.route('/job/cron/add')
def add_cron_job():
return jsonify({'sucess' : False})
@app.route('/job/interval/add')
def add_interval_job():
return jsonify({'sucess' : False})
@app.route('/get/current_state')
def get_state():
return jsonify({'state': "#%s" % triplet(led_chain.led_state)})
@app.route('/get/current_mode')
def get_mode():
return jsonify({'mode': "%s" % led_chain.state})
@app.route('/set/<hex_val>', methods=['GET', 'POST'])
def send_command(hex_val):
return jsonify(manual_set(hex_val))
# add some filters to jinja
app.jinja_env.filters['datetimeformat'] = format_datetime
if __name__ == '__main__':
app_config = ConfigParser.SafeConfigParser()
app_config.readfp(open(CONFIGURATION_PATH))
# create console handler and set level to debug, with auto log rotate max size 10mb keeping 10 logs.
#file_handler = RotatingFileHandler( app_config.get("general", "logging_path") , maxBytes=10240000, backupCount=10)
# create formatter
#log_formatter = logging.Formatter("%(asctime)s - %(levelname)s - %(name)s - %(message)s")
# add formatter to our console handler
#file_handler.setFormatter(log_formatter)
#file_handler.setLevel(logging.DEBUG)
#app.logger.addHandler(file_handler)
try:
auto_resume_offset = int(app_config.get("behaviour", "auto_resume_delay"))
except ConfigParser.NoOptionError:
app.logger.warning("No 'auto_resume_delay' option specified in 'behaviour' section of the config file, defaulting to 90")
auto_resume_offset = 90
parser = argparse.ArgumentParser(description='Circadian and Mood Lighting.')
parser.add_argument('--type', action="store", dest="driver_type", required=False, help='The model number of the LED driver, eg. ws2801 or lpd6803. defaults to configuration file.')
parser.add_argument('--length', action="store", dest="led_count", type=int, required=False, help='The number of LEDs in the chain. defaults to configuration file.')
args = parser.parse_args()
if args.driver_type is not None and args.led_count is not None:
if args.driver_type.lower() in valid_led_drivers:
app.logger.info("LED Driver is :%s with %d in the chain" % (args.driver_type, args.led_count))
led_chain = Chain_Communicator(driver_type=args.driver_type.lower(), chain_length=args.led_count)
else:
raise Exception("Invalid LED Driver %s specified, implemented types are : %s" % (args.driver_type, valid_led_drivers))
else:
try:
led_chain = Chain_Communicator(driver_type=app_config.get("chain", "type"), chain_length=int(app_config.get("chain", "length")))
except ConfigParser.NoOptionError:
app.logger.warning("Unable to find both length and type properties in chain section of configuration file.")
sched = Scheduler()
sched.start()
# calculate our events from the auto_state_events list, need to find a better way of doing this, maybe a config file.
for event in auto_state_events:
app.logger.info("Processing scheduled event : %s" % event['event_name'])
start_hour = event['event_start_time'].split(':')[0]
start_minute = event['event_start_time'].split(':')[1]
start_second = event['event_start_time'].split(':')[2]
start_time = datetime.strptime(event['event_start_time'],time_format)
end_time = datetime.strptime(event['event_end_time'],time_format)
event_duration = (end_time - start_time).seconds
sched.add_cron_job(led_chain.auto_transition, hour=start_hour, minute=start_minute, second=start_second , name=event['event_name'], kwargs={'state' : event['event_state'], 'transition_duration' : event['transition_duration']}, misfire_grace_time=event_duration)
app.logger.debug("Startup job list contains : %s" % sched.get_jobs())
try:
app.run(host='0.0.0.0', port=int(app_config.get("general", "web_port")), use_reloader=False)
except KeyboardInterrupt:
app.logger.warning("Caught keyboard interupt. Shutting down ...")
app.logger.info("Calling shutdown on led chain")
led_chain.shutdown()
app.logger.info("Calling shutdown on scheduler")
sched.shutdown(wait=False)
app.logger.info("Shutting down logger and exiting ...")
logging.shutdown()
exit(0)
| mit | -8,704,567,486,014,967,000 | 47.501348 | 269 | 0.651217 | false |
blackpioter/sendgrid-python | examples/whitelabel/whitelabel.py | 1 | 8553 | import sendgrid
import json
import os
sg = sendgrid.SendGridAPIClient(apikey=os.environ.get('SENDGRID_API_KEY'))
##################################################
# Create a domain whitelabel. #
# POST /whitelabel/domains #
data = {
"automatic_security": False,
"custom_spf": True,
"default": True,
"domain": "example.com",
"ips": [
"192.168.1.1",
"192.168.1.2"
],
"subdomain": "news",
"username": "[email protected]"
}
response = sg.client.whitelabel.domains.post(request_body=data)
print(response.status_code)
print(response.body)
print(response.headers)
##################################################
# List all domain whitelabels. #
# GET /whitelabel/domains #
params = {'username': 'test_string', 'domain': 'test_string',
'exclude_subusers': 'true', 'limit': 1, 'offset': 1}
response = sg.client.whitelabel.domains.get(query_params=params)
print(response.status_code)
print(response.body)
print(response.headers)
##################################################
# Get the default domain whitelabel. #
# GET /whitelabel/domains/default #
response = sg.client.whitelabel.domains.default.get()
print(response.status_code)
print(response.body)
print(response.headers)
##################################################
# List the domain whitelabel associated with the given user. #
# GET /whitelabel/domains/subuser #
response = sg.client.whitelabel.domains.subuser.get()
print(response.status_code)
print(response.body)
print(response.headers)
##################################################
# Disassociate a domain whitelabel from a given user. #
# DELETE /whitelabel/domains/subuser #
response = sg.client.whitelabel.domains.subuser.delete()
print(response.status_code)
print(response.body)
print(response.headers)
##################################################
# Update a domain whitelabel. #
# PATCH /whitelabel/domains/{domain_id} #
data = {
"custom_spf": True,
"default": False
}
domain_id = "test_url_param"
response = sg.client.whitelabel.domains._(domain_id).patch(request_body=data)
print(response.status_code)
print(response.body)
print(response.headers)
##################################################
# Retrieve a domain whitelabel. #
# GET /whitelabel/domains/{domain_id} #
domain_id = "test_url_param"
response = sg.client.whitelabel.domains._(domain_id).get()
print(response.status_code)
print(response.body)
print(response.headers)
##################################################
# Delete a domain whitelabel. #
# DELETE /whitelabel/domains/{domain_id} #
domain_id = "test_url_param"
response = sg.client.whitelabel.domains._(domain_id).delete()
print(response.status_code)
print(response.body)
print(response.headers)
##################################################
# Associate a domain whitelabel with a given user. #
# POST /whitelabel/domains/{domain_id}/subuser #
data = {
"username": "[email protected]"
}
domain_id = "test_url_param"
response = sg.client.whitelabel.domains._(
domain_id).subuser.post(request_body=data)
print(response.status_code)
print(response.body)
print(response.headers)
##################################################
# Add an IP to a domain whitelabel. #
# POST /whitelabel/domains/{id}/ips #
data = {
"ip": "192.168.0.1"
}
id = "test_url_param"
response = sg.client.whitelabel.domains._(id).ips.post(request_body=data)
print(response.status_code)
print(response.body)
print(response.headers)
##################################################
# Remove an IP from a domain whitelabel. #
# DELETE /whitelabel/domains/{id}/ips/{ip} #
id = "test_url_param"
ip = "test_url_param"
response = sg.client.whitelabel.domains._(id).ips._(ip).delete()
print(response.status_code)
print(response.body)
print(response.headers)
##################################################
# Validate a domain whitelabel. #
# POST /whitelabel/domains/{id}/validate #
id = "test_url_param"
response = sg.client.whitelabel.domains._(id).validate.post()
print(response.status_code)
print(response.body)
print(response.headers)
##################################################
# Create an IP whitelabel #
# POST /whitelabel/ips #
data = {
"domain": "example.com",
"ip": "192.168.1.1",
"subdomain": "email"
}
response = sg.client.whitelabel.ips.post(request_body=data)
print(response.status_code)
print(response.body)
print(response.headers)
##################################################
# Retrieve all IP whitelabels #
# GET /whitelabel/ips #
params = {'ip': 'test_string', 'limit': 1, 'offset': 1}
response = sg.client.whitelabel.ips.get(query_params=params)
print(response.status_code)
print(response.body)
print(response.headers)
##################################################
# Retrieve an IP whitelabel #
# GET /whitelabel/ips/{id} #
id = "test_url_param"
response = sg.client.whitelabel.ips._(id).get()
print(response.status_code)
print(response.body)
print(response.headers)
##################################################
# Delete an IP whitelabel #
# DELETE /whitelabel/ips/{id} #
id = "test_url_param"
response = sg.client.whitelabel.ips._(id).delete()
print(response.status_code)
print(response.body)
print(response.headers)
##################################################
# Validate an IP whitelabel #
# POST /whitelabel/ips/{id}/validate #
id = "test_url_param"
response = sg.client.whitelabel.ips._(id).validate.post()
print(response.status_code)
print(response.body)
print(response.headers)
##################################################
# Create a Link Whitelabel #
# POST /whitelabel/links #
data = {
"default": True,
"domain": "example.com",
"subdomain": "mail"
}
params = {'limit': 1, 'offset': 1}
response = sg.client.whitelabel.links.post(
request_body=data, query_params=params)
print(response.status_code)
print(response.body)
print(response.headers)
##################################################
# Retrieve all link whitelabels #
# GET /whitelabel/links #
params = {'limit': 1}
response = sg.client.whitelabel.links.get(query_params=params)
print(response.status_code)
print(response.body)
print(response.headers)
##################################################
# Retrieve a Default Link Whitelabel #
# GET /whitelabel/links/default #
params = {'domain': 'test_string'}
response = sg.client.whitelabel.links.default.get(query_params=params)
print(response.status_code)
print(response.body)
print(response.headers)
##################################################
# Retrieve Associated Link Whitelabel #
# GET /whitelabel/links/subuser #
params = {'username': 'test_string'}
response = sg.client.whitelabel.links.subuser.get(query_params=params)
print(response.status_code)
print(response.body)
print(response.headers)
##################################################
# Disassociate a Link Whitelabel #
# DELETE /whitelabel/links/subuser #
params = {'username': 'test_string'}
response = sg.client.whitelabel.links.subuser.delete(query_params=params)
print(response.status_code)
print(response.body)
print(response.headers)
##################################################
# Update a Link Whitelabel #
# PATCH /whitelabel/links/{id} #
data = {
"default": True
}
id = "test_url_param"
response = sg.client.whitelabel.links._(id).patch(request_body=data)
print(response.status_code)
print(response.body)
print(response.headers)
##################################################
# Retrieve a Link Whitelabel #
# GET /whitelabel/links/{id} #
id = "test_url_param"
response = sg.client.whitelabel.links._(id).get()
print(response.status_code)
print(response.body)
print(response.headers)
##################################################
# Delete a Link Whitelabel #
# DELETE /whitelabel/links/{id} #
id = "test_url_param"
response = sg.client.whitelabel.links._(id).delete()
print(response.status_code)
print(response.body)
print(response.headers)
##################################################
# Validate a Link Whitelabel #
# POST /whitelabel/links/{id}/validate #
id = "test_url_param"
response = sg.client.whitelabel.links._(id).validate.post()
print(response.status_code)
print(response.body)
print(response.headers)
##################################################
# Associate a Link Whitelabel #
# POST /whitelabel/links/{link_id}/subuser #
data = {
"username": "[email protected]"
}
link_id = "test_url_param"
response = sg.client.whitelabel.links._(
link_id).subuser.post(request_body=data)
print(response.status_code)
print(response.body)
print(response.headers)
| mit | 2,733,658,310,389,220,400 | 26.238854 | 77 | 0.611131 | false |
sridevikoushik31/nova | nova/console/xvp.py | 1 | 7082 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright (c) 2010 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""XVP (Xenserver VNC Proxy) driver."""
import os
import signal
from Cheetah import Template
from oslo.config import cfg
from nova import context
from nova import db
from nova.openstack.common import excutils
from nova.openstack.common import log as logging
from nova.openstack.common import processutils
from nova import paths
from nova import utils
xvp_opts = [
cfg.StrOpt('console_xvp_conf_template',
default=paths.basedir_def('nova/console/xvp.conf.template'),
help='XVP conf template'),
cfg.StrOpt('console_xvp_conf',
default='/etc/xvp.conf',
help='generated XVP conf file'),
cfg.StrOpt('console_xvp_pid',
default='/var/run/xvp.pid',
help='XVP master process pid file'),
cfg.StrOpt('console_xvp_log',
default='/var/log/xvp.log',
help='XVP log file'),
cfg.IntOpt('console_xvp_multiplex_port',
default=5900,
help='port for XVP to multiplex VNC connections on'),
]
CONF = cfg.CONF
CONF.register_opts(xvp_opts)
CONF.import_opt('host', 'nova.netconf')
LOG = logging.getLogger(__name__)
class XVPConsoleProxy(object):
"""Sets up XVP config, and manages XVP daemon."""
def __init__(self):
self.xvpconf_template = open(CONF.console_xvp_conf_template).read()
self.host = CONF.host # default, set by manager.
super(XVPConsoleProxy, self).__init__()
@property
def console_type(self):
return 'vnc+xvp'
def get_port(self, context):
"""Get available port for consoles that need one."""
#TODO(mdragon): implement port selection for non multiplex ports,
# we are not using that, but someone else may want
# it.
return CONF.console_xvp_multiplex_port
def setup_console(self, context, console):
"""Sets up actual proxies."""
self._rebuild_xvp_conf(context.elevated())
def teardown_console(self, context, console):
"""Tears down actual proxies."""
self._rebuild_xvp_conf(context.elevated())
def init_host(self):
"""Start up any config'ed consoles on start."""
ctxt = context.get_admin_context()
self._rebuild_xvp_conf(ctxt)
def fix_pool_password(self, password):
"""Trim password to length, and encode."""
return self._xvp_encrypt(password, is_pool_password=True)
def fix_console_password(self, password):
"""Trim password to length, and encode."""
return self._xvp_encrypt(password)
def _rebuild_xvp_conf(self, context):
LOG.debug(_('Rebuilding xvp conf'))
pools = [pool for pool in
db.console_pool_get_all_by_host_type(context, self.host,
self.console_type)
if pool['consoles']]
if not pools:
LOG.debug('No console pools!')
self._xvp_stop()
return
conf_data = {'multiplex_port': CONF.console_xvp_multiplex_port,
'pools': pools,
'pass_encode': self.fix_console_password}
config = str(Template.Template(self.xvpconf_template,
searchList=[conf_data]))
self._write_conf(config)
self._xvp_restart()
def _write_conf(self, config):
try:
LOG.debug(_('Re-wrote %s') % CONF.console_xvp_conf)
with open(CONF.console_xvp_conf, 'w') as cfile:
cfile.write(config)
except IOError:
with excutils.save_and_reraise_exception():
LOG.exception(_("Failed to write configuration file"))
def _xvp_stop(self):
LOG.debug(_('Stopping xvp'))
pid = self._xvp_pid()
if not pid:
return
try:
os.kill(pid, signal.SIGTERM)
except OSError:
#if it's already not running, no problem.
pass
def _xvp_start(self):
if self._xvp_check_running():
return
LOG.debug(_('Starting xvp'))
try:
utils.execute('xvp',
'-p', CONF.console_xvp_pid,
'-c', CONF.console_xvp_conf,
'-l', CONF.console_xvp_log)
except processutils.ProcessExecutionError, err:
LOG.error(_('Error starting xvp: %s') % err)
def _xvp_restart(self):
LOG.debug(_('Restarting xvp'))
if not self._xvp_check_running():
LOG.debug(_('xvp not running...'))
self._xvp_start()
else:
pid = self._xvp_pid()
os.kill(pid, signal.SIGUSR1)
def _xvp_pid(self):
try:
with open(CONF.console_xvp_pid, 'r') as pidfile:
pid = int(pidfile.read())
except IOError:
return None
except ValueError:
return None
return pid
def _xvp_check_running(self):
pid = self._xvp_pid()
if not pid:
return False
try:
os.kill(pid, 0)
except OSError:
return False
return True
def _xvp_encrypt(self, password, is_pool_password=False):
"""Call xvp to obfuscate passwords for config file.
Args:
- password: the password to encode, max 8 char for vm passwords,
and 16 chars for pool passwords. passwords will
be trimmed to max len before encoding.
- is_pool_password: True if this this is the XenServer api password
False if it's a VM console password
(xvp uses different keys and max lengths for pool passwords)
Note that xvp's obfuscation should not be considered 'real' encryption.
It simply DES encrypts the passwords with static keys plainly viewable
in the xvp source code.
"""
maxlen = 8
flag = '-e'
if is_pool_password:
maxlen = 16
flag = '-x'
#xvp will blow up on passwords that are too long (mdragon)
password = password[:maxlen]
out, err = utils.execute('xvp', flag, process_input=password)
if err:
raise processutils.ProcessExecutionError(_("Failed to run xvp."))
return out.strip()
| apache-2.0 | -4,573,463,928,261,764,600 | 33.715686 | 79 | 0.57865 | false |
magfest/ubersystem | alembic/versions/e74a6a5904cd_add_payment_method_to_receipt_items.py | 1 | 1766 | """Add payment_method to receipt items
Revision ID: e74a6a5904cd
Revises: 53b71e7c45b5
Create Date: 2019-12-20 19:00:34.631484
"""
# revision identifiers, used by Alembic.
revision = 'e74a6a5904cd'
down_revision = '53b71e7c45b5'
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
try:
is_sqlite = op.get_context().dialect.name == 'sqlite'
except Exception:
is_sqlite = False
if is_sqlite:
op.get_context().connection.execute('PRAGMA foreign_keys=ON;')
utcnow_server_default = "(datetime('now', 'utc'))"
else:
utcnow_server_default = "timezone('utc', current_timestamp)"
def sqlite_column_reflect_listener(inspector, table, column_info):
"""Adds parenthesis around SQLite datetime defaults for utcnow."""
if column_info['default'] == "datetime('now', 'utc')":
column_info['default'] = utcnow_server_default
sqlite_reflect_kwargs = {
'listeners': [('column_reflect', sqlite_column_reflect_listener)]
}
# ===========================================================================
# HOWTO: Handle alter statements in SQLite
#
# def upgrade():
# if is_sqlite:
# with op.batch_alter_table('table_name', reflect_kwargs=sqlite_reflect_kwargs) as batch_op:
# batch_op.alter_column('column_name', type_=sa.Unicode(), server_default='', nullable=False)
# else:
# op.alter_column('table_name', 'column_name', type_=sa.Unicode(), server_default='', nullable=False)
#
# ===========================================================================
def upgrade():
op.add_column('receipt_item', sa.Column('payment_method', sa.Integer(), server_default='180350097', nullable=False))
def downgrade():
op.drop_column('receipt_item', 'payment_method')
| agpl-3.0 | -5,071,260,178,703,831,000 | 28.932203 | 120 | 0.627973 | false |
arotka/gapi | script/training/takePhotos.py | 1 | 2304 | #!/usr/bin/python
"""Raspberry Pi Face Recognition Security gate access
Face Detection Helper Functions
Copyright 2014 Amine KARROUT
Take photos for training the face recognizer.
"""
import glob
import os
import sys
import select
import cv2
import config
import face
# Prefix for positive training image filenames.
POSITIVE_FILE_PREFIX = 'positive_'
def is_letter_input(letter):
# Utility function to check if a specific character is available on stdin.
# Comparison is case insensitive.
if select.select([sys.stdin,],[],[],0.0)[0]:
input_char = sys.stdin.read(1)
return input_char.lower() == letter.lower()
return False
if __name__ == '__main__':
camera = config.get_camera()
# Create the directory for positive training images if it doesn't exist.
if not os.path.exists(config.POSITIVE_DIR):
os.makedirs(config.POSITIVE_DIR)
# Find the largest ID of existing positive images.
# Start new images after this ID value.
files = sorted(glob.glob(os.path.join(config.POSITIVE_DIR,
POSITIVE_FILE_PREFIX + '[0-9][0-9][0-9].pgm')))
count = 0
if len(files) > 0:
# Grab the count from the last filename.
count = int(files[-1][-7:-4])+1
print 'Capturing positive training images.'
print 'Press button or type c (and press enter) to capture an image.'
print 'Press Ctrl-C to quit.'
while True:
# Check if button was pressed or 'c' was received, then capture image.
raw_input("Press Enter to continue...")
if raw_input("Press Enter to continue..."):
print 'Capturing image...'
image = camera.read()
# Convert image to grayscale.
image = cv2.cvtColor(image, cv2.COLOR_RGB2GRAY)
# Get coordinates of single face in captured image.
result = face.detect_single(image)
if result is None:
print 'Could not detect single face! Check the image in capture.pgm' \
' to see what was captured and try again with only one face visible.'
continue
x, y, w, h = result
# Crop image as close as possible to desired face aspect ratio.
# Might be smaller if face is near edge of image.
crop = face.crop(image, x, y, w, h)
# Save image to file.
filename = os.path.join(config.POSITIVE_DIR, POSITIVE_FILE_PREFIX + '%03d.pgm' % count)
cv2.imwrite(filename, crop)
print 'Found face and wrote training image', filename
count += 1
| mit | 6,727,640,093,180,870,000 | 31.914286 | 90 | 0.705729 | false |
dpshelio/sunpy | sunpy/visualization/animator/image.py | 2 | 9327 | import matplotlib as mpl
import astropy.wcs
from sunpy.visualization.animator.base import ArrayAnimator
__all__ = ['ImageAnimator', 'ImageAnimatorWCS']
class ImageAnimator(ArrayAnimator):
"""
Create a matplotlib backend independent data explorer for 2D images.
The following keyboard shortcuts are defined in the viewer:
* 'left': previous step on active slider.
* 'right': next step on active slider.
* 'top': change the active slider up one.
* 'bottom': change the active slider down one.
* 'p': play/pause active slider.
This viewer can have user defined buttons added by specifying the labels
and functions called when those buttons are clicked as keyword arguments.
Parameters
----------
data: `numpy.ndarray`
The data to be visualized.
image_axes: `list`, optional
A list of the axes order that make up the image.
axis_ranges: `list` of physical coordinates for the `numpy.ndarray`, optional
Defaults to `None` and array indices will be used for all axes.
The `list` should contain one element for each axis of the `numpy.ndarray`.
For the image axes a ``[min, max]`` pair should be specified which will be
passed to `matplotlib.pyplot.imshow` as an extent.
For the slider axes a ``[min, max]`` pair can be specified or an array the
same length as the axis which will provide all values for that slider.
Notes
-----
Extra keywords are passed to `~sunpy.visualization.animator.ArrayAnimator`.
"""
def __init__(self, data, image_axes=[-2, -1], axis_ranges=None, **kwargs):
# Check that number of axes is 2.
if len(image_axes) != 2:
raise ValueError("There can only be two spatial axes")
# Define number of slider axes.
self.naxis = data.ndim
self.num_sliders = self.naxis-2
# Define marker to determine if plot axes values are supplied via array of
# pixel values or min max pair. This will determine the type of image produced
# and hence how to plot and update it.
self._non_regular_plot_axis = False
# Run init for parent class
super().__init__(data, image_axes=image_axes, axis_ranges=axis_ranges, **kwargs)
def plot_start_image(self, ax):
"""
Sets up plot of initial image.
"""
# Create extent arg
extent = []
# reverse because numpy is in y-x and extent is x-y
if max([len(self.axis_ranges[i]) for i in self.image_axes[::-1]]) > 2:
self._non_regular_plot_axis = True
for i in self.image_axes[::-1]:
if self._non_regular_plot_axis is False and len(self.axis_ranges[i]) > 2:
self._non_regular_plot_axis = True
extent.append(self.axis_ranges[i][0])
extent.append(self.axis_ranges[i][-1])
imshow_args = {'interpolation': 'nearest',
'origin': 'lower'}
imshow_args.update(self.imshow_kwargs)
# If value along an axis is set with an array, generate a NonUniformImage
if self._non_regular_plot_axis:
# If user has inverted the axes, transpose the data so the dimensions match.
if self.image_axes[0] < self.image_axes[1]:
data = self.data[self.frame_index].transpose()
else:
data = self.data[self.frame_index]
# Initialize a NonUniformImage with the relevant data and axis values and
# add the image to the axes.
im = mpl.image.NonUniformImage(ax, **imshow_args)
im.set_data(self.axis_ranges[self.image_axes[0]], self.axis_ranges[self.image_axes[1]], data)
ax.add_image(im)
# Define the xlim and ylim from the pixel edges.
ax.set_xlim(self.extent[0], self.extent[1])
ax.set_ylim(self.extent[2], self.extent[3])
else:
# Else produce a more basic plot with regular axes.
imshow_args.update({'extent': extent})
im = ax.imshow(self.data[self.frame_index], **imshow_args)
if self.if_colorbar:
self._add_colorbar(im)
return im
def update_plot(self, val, im, slider):
"""
Updates plot based on slider/array dimension being iterated.
"""
ind = int(val)
ax_ind = self.slider_axes[slider.slider_ind]
self.frame_slice[ax_ind] = ind
if val != slider.cval:
if self._non_regular_plot_axis:
if self.image_axes[0] < self.image_axes[1]:
data = self.data[self.frame_index].transpose()
else:
data = self.data[self.frame_index]
im.set_data(self.axis_ranges[self.image_axes[0]],
self.axis_ranges[self.image_axes[1]], data)
else:
im.set_array(self.data[self.frame_index])
slider.cval = val
# Update slider label to reflect real world values in axis_ranges.
super().update_plot(val, im, slider)
class ImageAnimatorWCS(ImageAnimator):
"""
Animates N-dimensional data with the associated `astropy.wcs.WCS`
information.
The following keyboard shortcuts are defined in the viewer:
* 'left': previous step on active slider.
* 'right': next step on active slider.
* 'top': change the active slider up one.
* 'bottom': change the active slider down one.
* 'p': play/pause active slider.
This viewer can have user defined buttons added by specifying the labels
and functions called when those buttons are clicked as keyword arguments.
Parameters
----------
data: `numpy.ndarray`
The data to be visualized.
image_axes: `list`, optional
A list of the axes order that make up the image.
unit_x_axis: `astropy.units.Unit`
The unit of X axis.
unit_y_axis: `astropy.units.Unit`
The unit of Y axis.
axis_ranges: `list` of physical coordinates for the `numpy.ndarray`, optional
Defaults to `None` and array indices will be used for all axes.
The `list` should contain one element for each axis of the `numpy.ndarray`.
For the image axes a ``[min, max]`` pair should be specified which will be
passed to `matplotlib.pyplot.imshow` as an extent.
For the slider axes a ``[min, max]`` pair can be specified or an array the
same length as the axis which will provide all values for that slider.
Notes
-----
Extra keywords are passed to `~sunpy.visualization.animator.ArrayAnimator`.
"""
def __init__(self, data, wcs=None, image_axes=[-1, -2], unit_x_axis=None, unit_y_axis=None,
axis_ranges=None, **kwargs):
if not isinstance(wcs, astropy.wcs.WCS):
raise ValueError("wcs data should be provided.")
if wcs.wcs.naxis is not data.ndim:
raise ValueError("Dimensions of data and wcs not matching")
self.wcs = wcs
list_slices_wcsaxes = [0 for i in range(self.wcs.naxis)]
list_slices_wcsaxes[image_axes[0]] = 'x'
list_slices_wcsaxes[image_axes[1]] = 'y'
self.slices_wcsaxes = list_slices_wcsaxes[::-1]
self.unit_x_axis = unit_x_axis
self.unit_y_axis = unit_y_axis
super().__init__(data, image_axes=image_axes, axis_ranges=axis_ranges, **kwargs)
def _get_main_axes(self):
axes = self.fig.add_axes([0.1, 0.1, 0.8, 0.8], projection=self.wcs,
slices=self.slices_wcsaxes)
self._set_unit_in_axis(axes)
return axes
def _set_unit_in_axis(self, axes):
x_index = self.slices_wcsaxes.index("x")
y_index = self.slices_wcsaxes.index("y")
if self.unit_x_axis is not None:
axes.coords[x_index].set_format_unit(self.unit_x_axis)
axes.coords[x_index].set_ticks(exclude_overlapping=True)
if self.unit_y_axis is not None:
axes.coords[y_index].set_format_unit(self.unit_y_axis)
axes.coords[y_index].set_ticks(exclude_overlapping=True)
def plot_start_image(self, ax):
"""
Sets up a plot of initial image.
"""
imshow_args = {'interpolation': 'nearest',
'origin': 'lower',
}
imshow_args.update(self.imshow_kwargs)
im = ax.imshow(self.data[self.frame_index], **imshow_args)
if self.if_colorbar:
self._add_colorbar(im)
return im
def update_plot(self, val, im, slider):
"""
Updates plot based on slider/array dimension being iterated.
"""
ind = int(val)
ax_ind = self.slider_axes[slider.slider_ind]
self.frame_slice[ax_ind] = ind
list_slices_wcsaxes = list(self.slices_wcsaxes)
list_slices_wcsaxes[self.wcs.naxis-ax_ind-1] = val
self.slices_wcsaxes = list_slices_wcsaxes
if val != slider.cval:
self.axes.reset_wcs(wcs=self.wcs, slices=self.slices_wcsaxes)
self._set_unit_in_axis(self.axes)
im.set_array(self.data[self.frame_index])
slider.cval = val
# Update slider label to reflect real world values in axis_ranges.
super().update_plot(val, im, slider)
| bsd-2-clause | -2,824,382,928,485,726,700 | 41.395455 | 105 | 0.609199 | false |
rbianchi66/survey | src/radioquestion.py | 1 | 2777 | from PyQt4 import QtGui, Qt, QtCore
from question import Question
class RadioQuestion(Question):
def __init__(self, id, question, card, parent = None):
self.buttons = []
super(RadioQuestion, self).__init__(id, question, card, parent)
def updateValue(self, question, answer):
self.card.set(question, answer)
self.emit( Qt.SIGNAL("clicked()"))
def showButtons(self, q):
qind = 0
if len(q) > 5:
hlay = QtGui.QHBoxLayout()
ncols = len(q) / 5
for nc in xrange(ncols):
qlay = QtGui.QVBoxLayout()
for icol in xrange(5):
element = QtGui.QRadioButton(self)
self.buttons.append(element)
n, question, valore = q[qind]
self.connect(element, Qt.SIGNAL("clicked()"), lambda n = n : self.updateValue(question, n))
if self.card.get(question) == n:
element.setChecked(True)
element.setText(valore)
qlay.addWidget(element)
qind += 1
hlay.addLayout(qlay)
if len(q)%5 > 0:
qlay = QtGui.QVBoxLayout()
for icol in xrange(len(q)%5):
element = QtGui.QRadioButton(self)
self.buttons.append(element)
n, question, val = q[qind]
self.connect(element, Qt.SIGNAL("clicked()"), lambda n = n : self.updateValue(question, n))
if self.card.get(question) == n:
element.setChecked(True)
element.setText(val)
qlay.addWidget(element)
qind += 1
hlay.addLayout(qlay)
self.answers_layout.addLayout(hlay)
else:
for icol in xrange(len(q)):
element = QtGui.QRadioButton(self)
self.buttons.append(element)
n, question, val = q[qind]
self.connect(element, Qt.SIGNAL("clicked()"), lambda n = n : self.updateValue(question, n))
if self.card.get(question) == n:
element.setChecked(True)
element.setText(val)
self.answers_layout.addWidget(element)
qind += 1
if len(self.buttons):
bf = None
for b in self.buttons:
if b.isChecked() == True:
bf = b
if bf is None:
answer, question, valore = q[0]
self.updateValue(question, answer)
self.buttons[0].setChecked(True)
| gpl-2.0 | 6,312,017,638,981,724,000 | 40.723077 | 111 | 0.476413 | false |
wlieurance/aim-reporting | classes.py | 1 | 2503 | import numpy
### sample standard deviation
class stdevs:
def __init__(self):
self.list = []
self.x = 0
def step(self, value):
if value != None:
self.list.append(value)
def finalize(self):
#print(self.list)
if len(self.list) > 1:
self.x = numpy.std(self.list, ddof=1)
else:
self.x = None
return self.x
### population standard deviation
class stdevp:
def __init__(self):
self.list = []
self.x = 0
def step(self, value):
if value != None:
self.list.append(value)
def finalize(self):
#print(self.list)
if len(self.list) > 1:
self.x = numpy.std(self.list, ddof=0)
else:
self.x = None
return self.x
### weighted mean
class meanw:
def __init__(self):
self.wgtlist = []
self.list = []
self.x = 0
def step(self, value, wgt):
if wgt == None:
wgt = 1
if value != None:
self.list.append(value)
self.wgtlist.append(wgt)
def finalize(self):
#print(self.list)
if len(self.list) >= 1:
y = numpy.array(self.list)
w = numpy.array(self.wgtlist)
self.x = (numpy.sum(w*y))/(numpy.sum(w))
else:
self.x = None
return self.x
### weighted standard deviation
class stdevw:
def __init__(self):
self.wgtlist = []
self.list = []
self.x = 0
def step(self, value, wgt):
if wgt == None:
wgt = 1
if value != None:
self.list.append(value)
self.wgtlist.append(wgt)
def finalize(self):
#print(self.list)
if len(self.list) > 1:
#unbiased estimator of variance with sample weights
#https://www.gnu.org/software/gsl/manual/html_node/Weighted-Samples.html
#https://en.wikipedia.org/wiki/Weighted_arithmetic_mean ###Reliability weights
y = numpy.array(self.list)
w = numpy.array(self.wgtlist)
V1 = numpy.sum(w)
V2 = numpy.sum(w**2)
mu = (numpy.sum(w*y)/V1) #weighted mean
muArray = numpy.full(y.size, mu)
sigma2w = numpy.sum(w*((y-muArray)**2))
self.x = (sigma2w/(V1-(V2/V1)))**(0.5)
#print("mu:",mu,"V1:",V1,"V2:",V2,"sigma2w:", sigma2w,"x:", self.x)
else:
self.x = None
return self.x
| gpl-3.0 | 3,504,032,938,813,552,000 | 28.104651 | 92 | 0.503396 | false |
ddico/odoo | addons/mail/controllers/main.py | 1 | 14644 | # -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
import base64
import logging
import psycopg2
import werkzeug.utils
import werkzeug.wrappers
from werkzeug.urls import url_encode
from odoo import api, http, registry, SUPERUSER_ID, _
from odoo.exceptions import AccessError
from odoo.http import request
from odoo.tools import consteq
_logger = logging.getLogger(__name__)
class MailController(http.Controller):
_cp_path = '/mail'
@classmethod
def _redirect_to_messaging(cls):
url = '/web#%s' % url_encode({'action': 'mail.action_discuss'})
return werkzeug.utils.redirect(url)
@classmethod
def _check_token(cls, token):
base_link = request.httprequest.path
params = dict(request.params)
params.pop('token', '')
valid_token = request.env['mail.thread']._notify_encode_link(base_link, params)
return consteq(valid_token, str(token))
@classmethod
def _check_token_and_record_or_redirect(cls, model, res_id, token):
comparison = cls._check_token(token)
if not comparison:
_logger.warning(_('Invalid token in route %s', request.httprequest.url))
return comparison, None, cls._redirect_to_messaging()
try:
record = request.env[model].browse(res_id).exists()
except Exception:
record = None
redirect = cls._redirect_to_messaging()
else:
redirect = cls._redirect_to_record(model, res_id)
return comparison, record, redirect
@classmethod
def _redirect_to_record(cls, model, res_id, access_token=None, **kwargs):
# access_token and kwargs are used in the portal controller override for the Send by email or Share Link
# to give access to the record to a recipient that has normally no access.
uid = request.session.uid
user = request.env['res.users'].sudo().browse(uid)
cids = False
# no model / res_id, meaning no possible record -> redirect to login
if not model or not res_id or model not in request.env:
return cls._redirect_to_messaging()
# find the access action using sudo to have the details about the access link
RecordModel = request.env[model]
record_sudo = RecordModel.sudo().browse(res_id).exists()
if not record_sudo:
# record does not seem to exist -> redirect to login
return cls._redirect_to_messaging()
# the record has a window redirection: check access rights
if uid is not None:
if not RecordModel.with_user(uid).check_access_rights('read', raise_exception=False):
return cls._redirect_to_messaging()
try:
# We need here to extend the "allowed_company_ids" to allow a redirection
# to any record that the user can access, regardless of currently visible
# records based on the "currently allowed companies".
cids = request.httprequest.cookies.get('cids', str(user.company_id.id))
cids = [int(cid) for cid in cids.split(',')]
try:
record_sudo.with_user(uid).with_context(allowed_company_ids=cids).check_access_rule('read')
except AccessError:
# In case the allowed_company_ids from the cookies (i.e. the last user configuration
# on his browser) is not sufficient to avoid an ir.rule access error, try to following
# heuristic:
# - Guess the supposed necessary company to access the record via the method
# _get_mail_redirect_suggested_company
# - If no company, then redirect to the messaging
# - Merge the suggested company with the companies on the cookie
# - Make a new access test if it succeeds, redirect to the record. Otherwise,
# redirect to the messaging.
suggested_company = record_sudo._get_mail_redirect_suggested_company()
if not suggested_company:
raise AccessError('')
cids = cids + [suggested_company.id]
record_sudo.with_user(uid).with_context(allowed_company_ids=cids).check_access_rule('read')
except AccessError:
return cls._redirect_to_messaging()
else:
record_action = record_sudo.get_access_action(access_uid=uid)
else:
record_action = record_sudo.get_access_action()
if record_action['type'] == 'ir.actions.act_url' and record_action.get('target_type') != 'public':
return cls._redirect_to_messaging()
record_action.pop('target_type', None)
# the record has an URL redirection: use it directly
if record_action['type'] == 'ir.actions.act_url':
return werkzeug.utils.redirect(record_action['url'])
# other choice: act_window (no support of anything else currently)
elif not record_action['type'] == 'ir.actions.act_window':
return cls._redirect_to_messaging()
url_params = {
'model': model,
'id': res_id,
'active_id': res_id,
'action': record_action.get('id'),
}
view_id = record_sudo.get_formview_id()
if view_id:
url_params['view_id'] = view_id
if cids:
url_params['cids'] = ','.join([str(cid) for cid in cids])
url = '/web?#%s' % url_encode(url_params)
return werkzeug.utils.redirect(url)
@http.route('/mail/read_followers', type='json', auth='user')
def read_followers(self, follower_ids):
request.env['mail.followers'].check_access_rights("read")
follower_recs = request.env['mail.followers'].sudo().browse(follower_ids)
res_ids = follower_recs.mapped('res_id')
res_models = set(follower_recs.mapped('res_model'))
if len(res_models) > 1:
raise AccessError(_("Can't read followers with different targeted model"))
res_model = res_models.pop()
request.env[res_model].check_access_rights("read")
request.env[res_model].browse(res_ids).check_access_rule("read")
followers = []
follower_id = None
for follower in follower_recs:
if follower.partner_id == request.env.user.partner_id:
follower_id = follower.id
followers.append({
'id': follower.id,
'partner_id': follower.partner_id.id,
'channel_id': follower.channel_id.id,
'name': follower.name,
'email': follower.email,
'is_active': follower.is_active,
# When editing the followers, the "pencil" icon that leads to the edition of subtypes
# should be always be displayed and not only when "debug" mode is activated.
'is_editable': True
})
return {
'followers': followers,
'subtypes': self.read_subscription_data(follower_id) if follower_id else None
}
@http.route('/mail/read_subscription_data', type='json', auth='user')
def read_subscription_data(self, follower_id):
""" Computes:
- message_subtype_data: data about document subtypes: which are
available, which are followed if any """
request.env['mail.followers'].check_access_rights("read")
follower = request.env['mail.followers'].sudo().browse(follower_id)
follower.ensure_one()
request.env[follower.res_model].check_access_rights("read")
request.env[follower.res_model].browse(follower.res_id).check_access_rule("read")
# find current model subtypes, add them to a dictionary
subtypes = request.env['mail.message.subtype'].search([
'&', ('hidden', '=', False),
'|', ('res_model', '=', follower.res_model), ('res_model', '=', False)])
followed_subtypes_ids = set(follower.subtype_ids.ids)
subtypes_list = [{
'name': subtype.name,
'res_model': subtype.res_model,
'sequence': subtype.sequence,
'default': subtype.default,
'internal': subtype.internal,
'followed': subtype.id in followed_subtypes_ids,
'parent_model': subtype.parent_id.res_model,
'id': subtype.id
} for subtype in subtypes]
return sorted(subtypes_list,
key=lambda it: (it['parent_model'] or '', it['res_model'] or '', it['internal'], it['sequence']))
@http.route('/mail/view', type='http', auth='public')
def mail_action_view(self, model=None, res_id=None, access_token=None, **kwargs):
""" Generic access point from notification emails. The heuristic to
choose where to redirect the user is the following :
- find a public URL
- if none found
- users with a read access are redirected to the document
- users without read access are redirected to the Messaging
- not logged users are redirected to the login page
models that have an access_token may apply variations on this.
"""
# ==============================================================================================
# This block of code disappeared on saas-11.3 to be reintroduced by TBE.
# This is needed because after a migration from an older version to saas-11.3, the link
# received by mail with a message_id no longer work.
# So this block of code is needed to guarantee the backward compatibility of those links.
if kwargs.get('message_id'):
try:
message = request.env['mail.message'].sudo().browse(int(kwargs['message_id'])).exists()
except:
message = request.env['mail.message']
if message:
model, res_id = message.model, message.res_id
# ==============================================================================================
if res_id and isinstance(res_id, str):
res_id = int(res_id)
return self._redirect_to_record(model, res_id, access_token, **kwargs)
@http.route('/mail/assign', type='http', auth='user', methods=['GET'])
def mail_action_assign(self, model, res_id, token=None):
comparison, record, redirect = self._check_token_and_record_or_redirect(model, int(res_id), token)
if comparison and record:
try:
record.write({'user_id': request.uid})
except Exception:
return self._redirect_to_messaging()
return redirect
@http.route('/mail/<string:res_model>/<int:res_id>/avatar/<int:partner_id>', type='http', auth='public')
def avatar(self, res_model, res_id, partner_id):
headers = [('Content-Type', 'image/png')]
status = 200
content = 'R0lGODlhAQABAIABAP///wAAACH5BAEKAAEALAAAAAABAAEAAAICTAEAOw==' # default image is one white pixel
if res_model in request.env:
try:
# if the current user has access to the document, get the partner avatar as sudo()
request.env[res_model].browse(res_id).check_access_rule('read')
if partner_id in request.env[res_model].browse(res_id).sudo().exists().message_ids.mapped('author_id').ids:
status, headers, _content = request.env['ir.http'].sudo().binary_content(
model='res.partner', id=partner_id, field='image_128', default_mimetype='image/png')
# binary content return an empty string and not a placeholder if obj[field] is False
if _content != '':
content = _content
if status == 304:
return werkzeug.wrappers.Response(status=304)
except AccessError:
pass
image_base64 = base64.b64decode(content)
headers.append(('Content-Length', len(image_base64)))
response = request.make_response(image_base64, headers)
response.status = str(status)
return response
@http.route('/mail/needaction', type='json', auth='user')
def needaction(self):
return request.env['res.partner'].get_needaction_count()
@http.route('/mail/init_messaging', type='json', auth='user')
def mail_init_messaging(self):
values = {
'needaction_inbox_counter': request.env['res.partner'].get_needaction_count(),
'starred_counter': request.env['res.partner'].get_starred_count(),
'channel_slots': request.env['mail.channel'].channel_fetch_slot(),
'mail_failures': request.env['mail.message'].message_fetch_failed(),
'commands': request.env['mail.channel'].get_mention_commands(),
'mention_partner_suggestions': request.env['res.partner'].get_static_mention_suggestions(),
'shortcodes': request.env['mail.shortcode'].sudo().search_read([], ['source', 'substitution', 'description']),
'menu_id': request.env['ir.model.data'].xmlid_to_res_id('mail.menu_root_discuss'),
'is_moderator': request.env.user.is_moderator,
'moderation_counter': request.env.user.moderation_counter,
'moderation_channel_ids': request.env.user.moderation_channel_ids.ids,
'partner_root': request.env.ref('base.partner_root').sudo().mail_partner_format(),
'public_partner': request.env.ref('base.public_partner').sudo().mail_partner_format(),
}
return values
@http.route('/mail/get_partner_info', type='json', auth='user')
def message_partner_info_from_emails(self, model, res_ids, emails, link_mail=False):
records = request.env[model].browse(res_ids)
try:
records.check_access_rule('read')
records.check_access_rights('read')
except:
return []
return records._message_partner_info_from_emails(emails, link_mail=link_mail)
@http.route('/mail/get_suggested_recipients', type='json', auth='user')
def message_get_suggested_recipients(self, model, res_ids):
records = request.env[model].browse(res_ids)
try:
records.check_access_rule('read')
records.check_access_rights('read')
except:
return {}
return records._message_get_suggested_recipients()
| agpl-3.0 | 1,157,204,141,075,389,200 | 48.14094 | 123 | 0.593076 | false |
f304646673/scheduler_frame | src/frame/loggingex.py | 1 | 3302 | import os
import sys
import inspect
import logging
import logging.config
from singleton import singleton
@singleton
class loggingex():
def __init__(self, conf_path):
error = 0
while True:
try:
logging.config.fileConfig(conf_path)
except IOError as e:
if error > 1:
raise e
if 2 == e.errno:
if os.path.isdir(e.filename):
os.makedirs(e.filename)
else:
os.makedirs(os.path.dirname(e.filename))
error = error + 1
except Exception as e:
raise e
else:
break
def log_debug(self, msg):
log_debug = logging.getLogger('logger_LogDebug') #https://docs.python.org/2/howto/logging.html
log_debug.debug(msg)
def log_info(self, msg):
log_info = logging.getLogger('logger_LogInfo')
log_info.info(msg)
def log_warning(self, msg):
log_warning_error_critical = logging.getLogger('logger_LogWarningErrorCritical')
log_warning_error_critical.warning(msg)
def log_error(self, msg):
log_warning_error_critical = logging.getLogger('logger_LogWarningErrorCritical')
log_warning_error_critical.error(msg)
def log_critical(self, msg):
log_warning_error_critical = logging.getLogger('logger_LogWarningErrorCritical')
log_warning_error_critical.critical(msg)
def log_error_sql(self, msg):
log_error_sql = logging.getLogger('logger_SQL_ERROR')
log_error_sql.critical(msg)
def LOG_INIT(conf_path):
global logger_obj
logger_obj = loggingex(conf_path)
def modify_msg(msg):
stack_info = inspect.stack()
if len(stack_info) > 2:
file_name = inspect.stack()[2][1]
line = inspect.stack()[2][2]
function_name = inspect.stack()[2][3]
new_msg = file_name + " ^ " + function_name + " ^ " + str(line) + " ^ " + msg
return new_msg
def LOG_DEBUG(msg):
new_msg = modify_msg(msg)
try:
logger_obj.log_debug(new_msg)
except Exception as e:
print new_msg
def LOG_INFO(msg):
new_msg = modify_msg(msg)
try:
logger_obj.log_info(new_msg)
except Exception as e:
print new_msg
def LOG_WARNING(msg):
new_msg = modify_msg(msg)
try:
logger_obj.log_warning(new_msg)
except Exception as e:
print new_msg
def LOG_ERROR(msg):
new_msg = modify_msg(msg)
try:
logger_obj.log_error(new_msg)
except Exception as e:
print new_msg
def LOG_CRITICAL(msg):
new_msg = modify_msg(msg)
try:
logger_obj.log_critical(new_msg)
except Exception as e:
print new_msg
def LOG_ERROR_SQL(msg):
try:
logger_obj.log_error_sql(msg)
except Exception as e:
print msg
if __name__ == "__main__":
LOG_INIT("../../conf/log.conf")
LOG_DEBUG('LOG_DEBUG')
LOG_INFO('LOG_INFO')
LOG_WARNING('LOG_WARNING')
LOG_ERROR('LOG_ERROR')
LOG_CRITICAL('LOG_CRITICAL')
LOG_ERROR_SQL("Create XXX Error")
#global logger_obj
#logger_obj.log_debug('XXXXXXXXXXXX')
print "Hello World"
| apache-2.0 | -3,504,118,964,041,598,500 | 26.983051 | 109 | 0.574197 | false |
dosarudaniel/coala-bears | bears/python/BanditBear.py | 1 | 2385 | import json
from coalib.bearlib.abstractions.Linter import linter
from coalib.bears.requirements.PipRequirement import PipRequirement
from coalib.results.Result import Result
from coalib.results.RESULT_SEVERITY import RESULT_SEVERITY
from coalib.settings.Setting import typed_list
@linter(executable='bandit')
class BanditBear:
"""
Performs security analysis on Python source code, utilizing the ``ast``
module from the Python standard library.
"""
LANGUAGES = {"Python", "Python 2", "Python 3"}
REQUIREMENTS = {PipRequirement('bandit', '1.1')}
AUTHORS = {'The coala developers'}
AUTHORS_EMAILS = {'[email protected]'}
LICENSE = 'AGPL-3.0'
CAN_DETECT = {'Security'}
@staticmethod
def create_arguments(filename, file, config_file,
bandit_skipped_tests: typed_list(str)=
('B105', 'B106', 'B107', 'B404', 'B603', 'B606',
'B607')):
"""
:param bandit_skipped_tests:
The IDs of the tests ``bandit`` shall not perform. You can get
information about the available builtin codes at
https://github.com/openstack/bandit#usage.
"""
args = (filename, '-f', 'json')
if bandit_skipped_tests:
args += ('-s', ','.join(bandit_skipped_tests))
return args
severity_map = {'HIGH': RESULT_SEVERITY.MAJOR,
'MEDIUM': RESULT_SEVERITY.NORMAL,
'LOW': RESULT_SEVERITY.INFO}
confidence_map = {'HIGH': 90,
'MEDIUM': 70,
'LOW': 50}
def process_output(self, output, filename, file):
output = json.loads(output)
for error in output['errors']:
yield Result.from_values(
origin=self,
file=filename,
severity=RESULT_SEVERITY.MAJOR,
message=error['reason'])
for issue in output['results']:
yield Result.from_values(
origin=issue['test_id'],
file=filename,
message=issue['issue_text'],
severity=self.severity_map[issue['issue_severity']],
confidence=self.confidence_map[issue['issue_confidence']],
line=issue['line_range'][0],
end_line=issue['line_range'][-1])
| agpl-3.0 | -4,884,438,259,301,860,000 | 34.597015 | 75 | 0.571069 | false |
lapineige/Blender_add-ons | Material-Advanced-Override/material_advanded_override_v0-8.py | 1 | 7758 | ######################################################################################################
# A simple add-on that enhance the override material tool (from renderlayer panel) #
# Actualy partly uncommented - if you do not understand some parts of the code, #
# please see further version or contact me #
# Author: Lapineige #
# License: GPL v3 #
######################################################################################################
############# Add-on description (used by Blender)
bl_info = {
"name": "Material Advanced Override",
"description": 'Material Override Tools - with advanced exclude options',
"author": "Lapineige",
"version": (0, 8),
"blender": (2, 72, 0),
"location": "Properties > Render Layers",
"warning": "",
"wiki_url": "http://blenderlounge.fr/forum/viewtopic.php?f=26&t=810",
"tracker_url": "http://blenderlounge.fr/forum/viewtopic.php?f=26&t=810",
"category": "Render"}
import bpy
import blf
bpy.types.Scene.OW_only_selected = bpy.props.BoolProperty(name='Affect Only Selected Objects',default=False)
bpy.types.Scene.OW_exclude_type = bpy.props.EnumProperty(items=[('index','Material Index','',0),('group','Group','',1),('layer','Layer','',2)])
bpy.types.Scene.OW_pass_index = bpy.props.IntProperty(name='Pass Index',default=1)
bpy.types.Scene.OW_material = bpy.props.StringProperty(name='Material',maxlen=63)
bpy.types.Scene.OW_group = bpy.props.StringProperty(name='Group',maxlen=63)
bpy.types.Scene.OW_display_override = bpy.props.BoolProperty(name="Show 'Override ON' reminder",default=True)
#
def draw_callback_px(self, context):
if context.scene.OW_display_override:
font_id = 0 # XXX, need to find out how best to get this
blf.position(font_id, 28, bpy.context.area.height-85, 0)
blf.draw(font_id, "Override ON")
#
class OverrideDraw(bpy.types.Operator):
""" """
bl_idname = "view3d.display_override"
bl_label = "Display Override"
bl_options = {'INTERNAL'}
def execute(self, context):
context.area.tag_redraw()
self._handle = bpy.types.SpaceView3D.draw_handler_add(draw_callback_px, (self, context), 'WINDOW', 'POST_PIXEL')
return {'FINISHED'}
class OverrideSetup(bpy.types.Operator):
"""Tooltip"""
bl_idname = "render.overwrite_setup"
bl_label = "Overwrite Setup"
l_m = list()
l_mesh = list()
bpy.types.Scene.override_layer = bpy.props.BoolVectorProperty(subtype='LAYER', size=20)
@classmethod
def poll(cls, context):
return context.scene.OW_material
def execute(self, context):
context.scene.OW_display_override = True
bpy.ops.view3d.display_override()
for obj in bpy.data.objects:
if (obj.select == True)*context.scene.OW_only_selected or not context.scene.OW_only_selected:
if not obj.data.name in self.l_mesh:
self.l_mesh.append(obj.data.name)
else:
continue
if not len(obj.material_slots) and hasattr(obj.data,'materials'):
new_mat = bpy.data.materials.new('Default')
obj.data.materials.append(new_mat)
elif len(obj.material_slots):
if context.scene.OW_exclude_type == 'index':
if not obj.material_slots[0].material.pass_index == context.scene.OW_pass_index:
self._save_mat(obj)
self._change_mat(context,obj)
obj.material_slots[0].material = bpy.data.materials[context.scene.OW_material]
elif context.scene.OW_exclude_type == 'group' and context.scene.OW_group:
if obj.name in [g_obj.name for g_obj in bpy.data.groups[context.scene.OW_group].objects]:
self._save_mat(obj)
self._change_mat(context,obj)
obj.material_slots[0].material = bpy.data.materials[context.scene.OW_material]
elif context.scene.OW_exclude_type == 'layer':
if not (True in [(context.scene.override_layer[index])*(context.scene.override_layer[index]==obj.layers[index]) for index in range(len(obj.layers))]):
self._save_mat(obj)
self._change_mat(context,obj)
obj.material_slots[0].material = bpy.data.materials[context.scene.OW_material]
return {'FINISHED'}
def _save_mat(self, obj):
self.l_m.append( (obj,[]) )
for slot in obj.material_slots:
self.l_m[-1][1].append( (slot,slot.material) )
def _change_mat(self, context, obj):
for slot in obj.material_slots:
slot.material = bpy.data.materials[context.scene.OW_material]
class OverrideRestore(bpy.types.Operator):
"""Tooltip"""
bl_idname = "render.overwrite_restore"
bl_label = "Overwrite Restore"
l_m = []
@classmethod
def poll(cls, context):
return True
def execute(self, context):
context.scene.OW_display_override = False
for data in bpy.types.RENDER_OT_overwrite_setup.l_m:
obj, mat_data = data
for slot, material in mat_data:
slot.material = material
bpy.types.RENDER_OT_overwrite_setup.l_m = list()
bpy.types.RENDER_OT_overwrite_setup.l_mesh = list()
return {'FINISHED'}
class MaterialOverrideTools(bpy.types.Panel):
""" """
bl_label = "Material Override Tools"
bl_idname = "material_override_tools"
bl_space_type = 'PROPERTIES'
bl_region_type = 'WINDOW'
bl_context = "render_layer"
def draw(self, context):
layout = self.layout
if bpy.types.RENDER_OT_overwrite_setup.l_m:
layout.operator('render.overwrite_restore')
layout.label('Do not save before having restored the material(s)', icon='CANCEL')
layout.prop(context.scene, 'OW_display_override')
else:
layout.operator('render.overwrite_setup')
layout.prop_search(context.scene, "OW_material", bpy.data, "materials", icon='MATERIAL_DATA')
layout.prop(context.scene, 'OW_only_selected',toggle=True, icon='BORDER_RECT')
box = layout.box()
box.label('Exclude from effect:')
row = box.row()
row.prop(context.scene, 'OW_exclude_type', expand=True)
if context.scene.OW_exclude_type == 'index':
box.prop(context.scene, 'OW_pass_index')
elif context.scene.OW_exclude_type == 'group':
box.prop_search(context.scene, "OW_group", bpy.data, "groups", icon='GROUP')
elif context.scene.OW_exclude_type == 'layer':
box.prop(context.scene, 'override_layer', text='')
def register():
bpy.utils.register_class(OverrideSetup)
bpy.utils.register_class(OverrideRestore)
bpy.utils.register_class(MaterialOverrideTools)
bpy.utils.register_class(OverrideDraw)
def unregister():
if bpy.types.RENDER_OT_overwrite_setup.l_m:
bpy.ops.render.overwrite_restore() # To make sure materials will be restored
bpy.utils.unregister_class(OverrideSetup)
bpy.utils.unregister_class(OverrideRestore)
bpy.utils.unregister_class(MaterialOverrideTools)
bpy.utils.unregister_class(OverrideDraw)
if __name__ == "__main__":
register()
| gpl-3.0 | -2,470,813,614,757,134,000 | 42.1 | 174 | 0.579144 | false |
ownport/ansiblite | src/ansiblite/utils/encrypt.py | 1 | 6355 | # (c) 2012-2014, Michael DeHaan <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
import stat
import tempfile
import multiprocessing
import time
import warnings
PASSLIB_AVAILABLE = False
try:
import passlib.hash
PASSLIB_AVAILABLE = True
except:
pass
from ansiblite.utils.display import Display
display = Display()
KEYCZAR_AVAILABLE=False
try:
try:
# some versions of pycrypto may not have this?
from Crypto.pct_warnings import PowmInsecureWarning
except ImportError:
PowmInsecureWarning = RuntimeWarning
with warnings.catch_warnings(record=True) as warning_handler:
warnings.simplefilter("error", PowmInsecureWarning)
try:
import keyczar.errors as key_errors
from keyczar.keys import AesKey
except PowmInsecureWarning:
display.system_warning(
"The version of gmp you have installed has a known issue regarding " + \
"timing vulnerabilities when used with pycrypto. " + \
"If possible, you should update it (i.e. yum update gmp)."
)
warnings.resetwarnings()
warnings.simplefilter("ignore")
import keyczar.errors as key_errors
from keyczar.keys import AesKey
KEYCZAR_AVAILABLE=True
except ImportError:
pass
from ansiblite import constants as C
from ansiblite.errors import AnsibleError
from ansiblite.utils._text import to_text, to_bytes
__all__ = ['do_encrypt']
_LOCK = multiprocessing.Lock()
def do_encrypt(result, encrypt, salt_size=None, salt=None):
if PASSLIB_AVAILABLE:
try:
crypt = getattr(passlib.hash, encrypt)
except:
raise AnsibleError("passlib does not support '%s' algorithm" % encrypt)
if salt_size:
result = crypt.encrypt(result, salt_size=salt_size)
elif salt:
if crypt._salt_is_bytes:
salt = to_bytes(salt, encoding='ascii', errors='strict')
else:
salt = to_text(salt, encoding='ascii', errors='strict')
result = crypt.encrypt(result, salt=salt)
else:
result = crypt.encrypt(result)
else:
raise AnsibleError("passlib must be installed to encrypt vars_prompt values")
# Hashes from passlib.hash should be represented as ascii strings of hex
# digits so this should not traceback. If it's not representable as such
# we need to traceback and then blacklist such algorithms because it may
# impact calling code.
return to_text(result, errors='strict')
def key_for_hostname(hostname):
# fireball mode is an implementation of ansible firing up zeromq via SSH
# to use no persistent daemons or key management
if not KEYCZAR_AVAILABLE:
raise AnsibleError("python-keyczar must be installed on the control machine to use accelerated modes")
key_path = os.path.expanduser(C.ACCELERATE_KEYS_DIR)
if not os.path.exists(key_path):
# avoid race with multiple forks trying to create paths on host
# but limit when locking is needed to creation only
with(_LOCK):
if not os.path.exists(key_path):
# use a temp directory and rename to ensure the directory
# searched for only appears after permissions applied.
tmp_dir = tempfile.mkdtemp(dir=os.path.dirname(key_path))
os.chmod(tmp_dir, int(C.ACCELERATE_KEYS_DIR_PERMS, 8))
os.rename(tmp_dir, key_path)
elif not os.path.isdir(key_path):
raise AnsibleError('ACCELERATE_KEYS_DIR is not a directory.')
if stat.S_IMODE(os.stat(key_path).st_mode) != int(C.ACCELERATE_KEYS_DIR_PERMS, 8):
raise AnsibleError('Incorrect permissions on the private key directory. Use `chmod 0%o %s` to correct this issue, and make sure any of the keys files contained within that directory are set to 0%o' % (int(C.ACCELERATE_KEYS_DIR_PERMS, 8), C.ACCELERATE_KEYS_DIR, int(C.ACCELERATE_KEYS_FILE_PERMS, 8)))
key_path = os.path.join(key_path, hostname)
# use new AES keys every 2 hours, which means fireball must not allow running for longer either
if not os.path.exists(key_path) or (time.time() - os.path.getmtime(key_path) > 60*60*2):
# avoid race with multiple forks trying to create key
# but limit when locking is needed to creation only
with(_LOCK):
if not os.path.exists(key_path) or (time.time() - os.path.getmtime(key_path) > 60*60*2):
key = AesKey.Generate()
# use temp file to ensure file only appears once it has
# desired contents and permissions
with tempfile.NamedTemporaryFile(mode='w', dir=os.path.dirname(key_path), delete=False) as fh:
tmp_key_path = fh.name
fh.write(str(key))
os.chmod(tmp_key_path, int(C.ACCELERATE_KEYS_FILE_PERMS, 8))
os.rename(tmp_key_path, key_path)
return key
if stat.S_IMODE(os.stat(key_path).st_mode) != int(C.ACCELERATE_KEYS_FILE_PERMS, 8):
raise AnsibleError('Incorrect permissions on the key file for this host. Use `chmod 0%o %s` to correct this issue.' % (int(C.ACCELERATE_KEYS_FILE_PERMS, 8), key_path))
fh = open(key_path)
key = AesKey.Read(fh.read())
fh.close()
return key
def keyczar_encrypt(key, msg):
return key.Encrypt(msg.encode('utf-8'))
def keyczar_decrypt(key, msg):
try:
return key.Decrypt(msg)
except key_errors.InvalidSignatureError:
raise AnsibleError("decryption failed")
| gpl-3.0 | 654,723,775,037,806,000 | 39.737179 | 307 | 0.663415 | false |
0xquad/mfu | tests.py | 1 | 18569 | #!/usr/bin/env python3
#
# The test program for the MFU Python library.
#
# Copyright (c) 2015, Alexandre Hamelin <alexandre.hamelin gmail.com>
#
# This work is distributed under the LGPL license. See LICENSE.txt for details.
import sys
import unittest
from unittest.mock import Mock, call, patch, ANY
from ultralight import MFUCard, MFUPage, MFUPageViewProxy
class MFUTests(unittest.TestCase):
def __init__(self, *args):
super().__init__(*args)
content = (
'04AD7150'
'FADA2E80'
'8E48E000'
'00000000'
'00000000'
'31880220'
'633C0000'
'E92D2412'
'00000000'
'00000000'
'00013634'
'0000907B'
'00000000'
'00000000'
'00000000'
'00000000'
)
content = bytearray.fromhex(content)
self.card = MFUCard(bytes=content)
def test_iter_bytes(self):
iterator = iter(self.card)
firstbytes = [next(iterator) for i in range(4)]
self.assertEqual(firstbytes, [0x04, 0xad, 0x71, 0x50])
for i in range(len(self.card) - 4):
next(iterator)
with self.assertRaises(StopIteration):
next(iterator)
def test_length(self):
self.assertEqual(len(self.card), 64)
def test_hexdump(self):
output = []
def myprint(data, *args, **kwargs):
output.append(str(data))
output.append('\n')
mock_print = Mock(side_effect=myprint)
# patching sys.stdout doesn't work since the function already has
# a reference to the real sys.stdout at define time
with patch('builtins.print', mock_print):
self.card.hexdump()
expected = (
'04ad7150\n'
'fada2e80\n'
'8e48e000\n'
'00000000\n'
'00000000\n'
'31880220\n'
'633c0000\n'
'e92d2412\n'
'00000000\n'
'00000000\n'
'00013634\n'
'0000907b\n'
'00000000\n'
'00000000\n'
'00000000\n'
'00000000\n'
)
self.assertEqual(''.join(output), expected)
def test_hexdump_with_custom_output(self):
output = []
def write(data):
output.append(data)
filemock = Mock()
filemock.write.side_effect = write
self.card.hexdump(file=filemock)
expected = (
'04ad7150\n'
'fada2e80\n'
'8e48e000\n'
'00000000\n'
'00000000\n'
'31880220\n'
'633c0000\n'
'e92d2412\n'
'00000000\n'
'00000000\n'
'00013634\n'
'0000907b\n'
'00000000\n'
'00000000\n'
'00000000\n'
'00000000\n'
)
self.assertEqual(''.join(output), expected)
def test_dump(self):
output = []
def write(data):
output.append(data)
filemock = Mock(sys.stdout)
filemock.write.side_effect = write
self.card.dump(filemock)
expected = (
b'\x04\xad\x71\x50'
b'\xfa\xda\x2e\x80'
b'\x8e\x48\xe0\x00'
b'\x00\x00\x00\x00'
b'\x00\x00\x00\x00'
b'\x31\x88\x02\x20'
b'\x63\x3c\x00\x00'
b'\xe9\x2d\x24\x12'
b'\x00\x00\x00\x00'
b'\x00\x00\x00\x00'
b'\x00\x01\x36\x34'
b'\x00\x00\x90\x7b'
b'\x00\x00\x00\x00'
b'\x00\x00\x00\x00'
b'\x00\x00\x00\x00'
b'\x00\x00\x00\x00'
)
self.assertEqual(b''.join(output), expected)
def test_page_view_is_singleton(self):
view1 = self.card.pages
view2 = self.card.pages
self.assertIs(view1, view2)
def test_get_byte_by_index(self):
self.assertEqual(self.card[0], 4)
self.assertEqual(self.card[4], 0xfa)
self.assertEqual(self.card[8], 0x8e)
self.assertEqual(self.card[47], 0x7b)
self.assertEqual(self.card[-1], 0)
self.assertEqual(self.card[-len(self.card)], 4)
def test_get_bytes_by_slice(self):
data = self.card[:4]
self.assertEqual(data, b'\x04\xad\x71\x50')
data = self.card[10:12]
self.assertEqual(data, b'\xe0\x00')
data = self.card[60:]
self.assertEqual(data, b'\x00\x00\x00\x00')
def test_get_bytes_by_invalid_index(self):
for i in (str, dict, list, tuple, set, bytes, bytearray,
complex, lambda: None, object()):
with self.assertRaises(TypeError):
self.card[i]
with self.assertRaises(IndexError):
self.card[-len(self.card)-1]
with self.assertRaises(IndexError):
self.card[len(self.card)]
def test_get_page(self):
self.assertEqual(bytes(self.card.pages[0]), b'\x04\xAD\x71\x50')
def test_byte_by_getitem(self):
self.assertEqual(self.card[0], 0x04)
def test_bytes_by_slice(self):
self.assertEqual(self.card[:2], b'\x04\xAD')
def test_basic_attributes(self):
self.assertEqual(self.card.id, 0x04ad71fada2e80)
self.assertEqual(self.card.manufacturer, 0x04)
self.assertEqual(self.card.bcc0, 0x50)
self.assertEqual(self.card.bcc1, 0x8e)
self.assertEqual(self.card.id0, 0x04)
self.assertEqual(self.card.id1, 0xad)
self.assertEqual(self.card.id2, 0x71)
self.assertEqual(self.card.id3, 0xfa)
self.assertEqual(self.card.id4, 0xda)
self.assertEqual(self.card.id5, 0x2e)
self.assertEqual(self.card.id6, 0x80)
def test_first_pages_are_readonly(self):
for p in [0, 1]:
self.assertTrue(self.card.pages[p].readonly)
def test_locked_pages_are_read_only(self):
self.skipTest('not implemented')
#self.assertTrue(card.otp_locked)
#self.assertTrue(card.pages4to9_blocked)
#self.assertFalse(card.pages10to15_blocked)
#self.assertFalse(card.otp_blocked)
def test_init_default_empty(self):
# initialized to all zeroes
card = MFUCard()
self.assertEqual(bytes(card), b'\x00' * 64)
def test_init_one_param_only(self):
with self.assertRaises(RuntimeError):
mfu = MFUCard(bytes=b'abcd'*4, file=1)
with self.assertRaises(RuntimeError):
mfu = MFUCard(bytes=b'abcd'*4, hexfile=1)
with self.assertRaises(RuntimeError):
mfu = MFUCard(file=1, hexfile=1)
with self.assertRaises(RuntimeError):
mfu = MFUCard(bytes=b'abcd'*4, file=1, hexfile=1)
def test_init_bytestring(self):
# initialized with bytes, must be 64 bytes
card = MFUCard(bytes=b'\x01' * 64)
self.assertEqual(bytes(card), b'\x01' * 64)
def test_init_bytes(self):
card = MFUCard(bytes=bytes(64))
self.assertEqual(bytes(card), b'\x00' * 64)
card = MFUCard(bytes=bytearray([i for i in range(64)]))
self.assertEqual(list(card), [i for i in range(64)])
def test_init_from_file(self):
# load from a 64-byte binary file
content = b'\x01\x02\03\x04' * 16
fp_mock = Mock(sys.stdin)
fp_mock.fileno.return_value = 3
with patch('builtins.open', return_value=fp_mock) as mock_open, \
patch('os.read', return_value=content) as mock_sysread:
card = MFUCard(file='card.bin')
self.assertEqual(bytes(card), content)
def test_init_from_file_descriptor(self):
def sysread(desc, n):
return b'\x01' * n
with patch('os.read', wraps=sysread) as mock_sysread:
card = MFUCard(file=3)
mock_sysread.assert_called_with(3, ANY)
self.assertEqual(bytes(card), b'\x01' * 64)
def test_init_from_hexfile(self):
# load from an ASCII hex file, spaces ignored, case-insensitive
content = b'0badc0de' * 16
fp_mock = Mock(sys.stdin)
fp_mock.fileno.return_value = 3
with patch('builtins.open', return_value=fp_mock) as mock_open, \
patch('os.read', return_value=content) as mock_sysread:
card = MFUCard(hexfile='card.txt')
self.assertEqual(bytes(card), b'\x0b\xad\xc0\xde' * 16)
def test_init_from_hexfile_file_descriptor(self):
def sysread(desc, n):
if not hasattr(sysread, 'filepos'):
sysread.filepos = 0
filedata = (
b'00010203'
b'01020304'
b'02030405'
b'03040506'
b'04050607'
b'05060708'
b'06070809'
b'0708090a'
b'08090a0b'
b'090a0b0c'
b'0a0b0c0d'
b'0b0c0d0e'
b'0c0d0e0f'
b'0d0e0f00'
b'0e0f0001'
b'0f000102'
)
chunk = filedata[sysread.filepos:sysread.filepos+n]
sysread.filepos = min(sysread.filepos + n, len(filedata))
return chunk
with patch('os.read', wraps=sysread) as mock_sysread:
card = MFUCard(hexfile=3)
mock_sysread.assert_called_with(3, ANY)
expected = b''.join(bytes([i, (i + 1) % 16,
(i + 2) % 16,
(i + 3) % 16])
for i in range(16))
self.assertEqual(bytes(card), expected)
class MFUPageTests(unittest.TestCase):
def __init__(self, name):
super().__init__(name)
card = MFUCard(bytes=bytes([1,2,3,4]*16))
self.page = MFUPage(card, 0)
def test_iter_bytes(self):
byteiter = iter(self.page)
b = next(byteiter)
self.assertEqual(b, 1)
b = next(byteiter)
self.assertEqual(b, 2)
b = next(byteiter)
self.assertEqual(b, 3)
b = next(byteiter)
self.assertEqual(b, 4)
with self.assertRaises(StopIteration):
next(byteiter)
def test_as_list(self):
bytelist = list(self.page)
self.assertIsInstance(bytelist, list)
self.assertEqual(bytelist, [1, 2, 3, 4])
def test_slice(self):
self.assertEqual(self.page[0], 1)
self.assertEqual(self.page[1:-1], b'\x02\x03')
@unittest.skip('item assignment is not implemented')
def test_set_bytes_types(self):
self.assertNotEqual(self.page[0], 99)
self.page[0] = 99
self.assertEqual(self.page[0], 99)
self.page[0] = b'\x99'
self.assertEqual(self.page[0], 0x99)
@unittest.skip('item assignment is not implemented')
def test_set_bytes_negative_index(self):
self.assertNotEqual(self.page[-1], 99)
self.page[-1] = 99
self.assertEqual(self.page[-1], 99)
@unittest.skip('item assignment is not implemented')
def test_set_bytes_slice_value_types(self):
self.assertNotEqual(self.page[:2], b'\x88\x99')
self.page[:2] = bytes([0x88, 0x99])
self.assertEqual(self.page[:2], b'\x88\x99')
self.page[:2] = bytes([0x10, 0x20])
self.assertEqual(self.page[:2], b'\x10\x20')
self.page[:2] = b'\x11\x21'
self.assertEqual(self.page[:2], b'\x11\x21')
self.page[:2] = [0x12, 0x22]
self.assertEqual(self.page[:2], b'\x12\x22')
class C:
def __iter__(self):
return next(self)
def __next__(self):
yield 0x13
yield 0x23
self.page[:2] = C()
self.assertEqual(self.page[:2], b'\x13\x23')
@unittest.skip('item assignment is not implemented')
def test_set_bytes_invalid_value(self):
for t in (str, complex, float, set, list, tuple, dict):
with self.assertRaises(ValueError):
self.page[0] = t()
with self.assertRaises(ValueError):
self.page[0] = 256
with self.assertRaises(ValueError):
self.page[0] = -1
@unittest.skip('item assignment is not implemented')
def test_set_bytes_invalid_index(self):
for t in (str, complex, float, set, list, tuple, dict):
with self.assertRaises(TypeError):
self.page[t()] = 0
with self.assertRaises(ValueError):
self.page[5] = 0
def test_invalid_index(self):
for t in (str, list, set, dict, complex, object):
with self.assertRaises(TypeError):
self.page[t()]
def test_to_hex(self):
hexstr = self.page.to_hex()
self.assertEqual(hexstr, '01020304')
def test_to_int(self):
value = self.page.to_int()
self.assertEqual(value, 0x01020304)
def test_length(self):
self.assertEqual(len(self.page), 4)
def test_init_invalid_page(self):
card = MFUCard()
with self.assertRaises(ValueError):
MFUPage(card, -1)
with self.assertRaises(ValueError):
MFUPage(card, 16)
def test_init_invalid_card(self):
card = object()
with self.assertRaises(TypeError):
MFUPage(card, 0)
def test_readonly(self):
card = MFUCard()
pages = [MFUPage(card, i) for i in range(16)]
for p in (0, 1):
self.assertTrue(pages[p].readonly)
for p in range(2, 16):
self.assertFalse(pages[p].readonly)
card = MFUCard(bytes=
b'\x00\x00\x00\x00' * 2 +
# lock bytes value = 0x55aa
# meaning: pages 5, 7, 8, 10, 12, 14 are LOCKED
# pages 4, 6, 9, 11, 13, 15 are not locked
# otp locking protection is off
# pages 9-4 locking protection is ON
# pages 15-10 locking protection is off
# otp area is LOCKED
b'\x00\x00\xaa\x55' +
b'\x00\x00\x00\x00' * 13
)
pages = [MFUPage(card, i) for i in range(16)]
for p in (0, 1):
# readonly pages
self.assertTrue(pages[p].readonly)
for p in (5, 7, 8, 10, 12, 14):
# locked pages
self.assertTrue(pages[p].readonly)
for p in (4, 6, 9, 11, 13, 15):
# pages not locked
self.assertFalse(pages[p].readonly)
class MFUPageViewProxyTests(unittest.TestCase):
def __init__(self, name):
super().__init__(name)
self.card = MFUCard()
def test_length(self):
self.assertEqual(len(self.card.pages), 16)
def test_pages_proxy(self):
self.assertIsInstance(self.card.pages, MFUPageViewProxy)
def test_page_by_index(self):
self.assertIsInstance(self.card.pages[0], MFUPage)
self.assertIs(self.card.pages[-1], self.card.pages[15])
def test_pages_by_slice(self):
pages = self.card.pages[:2]
self.assertIsInstance(pages, list)
self.assertEqual(len(pages), 2)
self.assertTrue(all(isinstance(p, MFUPage) for p in pages))
pages = self.card.pages[10:]
self.assertIsInstance(pages, list)
self.assertEqual(len(pages), 6)
self.assertTrue(all(isinstance(p, MFUPage) for p in pages))
pages = self.card.pages[8:10]
self.assertIsInstance(pages, list)
self.assertEqual(len(pages), 2)
self.assertTrue(all(isinstance(p, MFUPage) for p in pages))
pages = self.card.pages[10:8:-1]
self.assertIsInstance(pages, list)
self.assertEqual(len(pages), 2)
self.assertTrue(all(isinstance(p, MFUPage) for p in pages))
pages = self.card.pages[:1]
self.assertIsInstance(pages, list)
self.assertEqual(len(pages), 1)
self.assertTrue(all(isinstance(p, MFUPage) for p in pages))
def test_page_by_invalid_index(self):
with self.assertRaises(IndexError):
self.card.pages[16]
for t in (object, str, float, complex, bytes, bytearray):
with self.assertRaises(TypeError):
self.card.pages[t()]
def test_page_iterator(self):
iterable = iter(self.card.pages)
item = next(iterable)
self.assertIsInstance(item, MFUPage)
self.assertIs(item, self.card.pages[0])
items = list(iterable)
self.assertEqual(len(items), 15)
for i, p in enumerate(items):
self.assertIs(p, self.card.pages[i + 1])
def test_set_page_from_int(self):
self.card.pages[0] = 0x11223344
self.assertEqual(self.card.pages[0].to_int(), 0x11223344)
self.assertEqual(self.card.pages[0].to_hex(), '11223344')
def test_set_page_from_bytes(self):
self.card.pages[0] = bytes([0x11, 0x22, 0x33, 0x44])
self.assertEqual(self.card.pages[0].to_int(), 0x11223344)
self.assertEqual(self.card.pages[0].to_hex(), '11223344')
self.card.pages[0] = b'\x55\x66\x77\x88'
self.assertEqual(self.card.pages[0].to_int(), 0x55667788)
self.assertEqual(self.card.pages[0].to_hex(), '55667788')
def test_set_page_from_bytearray(self):
self.card.pages[0] = bytearray([0x11, 0x22, 0x33, 0x44])
self.assertEqual(self.card.pages[0].to_int(), 0x11223344)
self.assertEqual(self.card.pages[0].to_hex(), '11223344')
def test_set_page_from_string(self):
self.card.pages[0] = '\x11\x22\x33\x44'
self.assertEqual(self.card.pages[0].to_int(), 0x11223344)
self.assertEqual(self.card.pages[0].to_hex(), '11223344')
def test_set_page_with_invalid_value(self):
for t in (object, complex, float, dict, set, list, tuple):
with self.assertRaises(ValueError):
self.card.pages[0] = t()
with self.assertRaises(ValueError):
self.card.pages[0] = None
def test_set_page_with_invalid_int_index(self):
with self.assertRaises(IndexError):
self.card.pages[len(self.card.pages)] = 0
def test_set_page_with_invalid_index(self):
for t in (str, object, complex, float, dict, set, list, tuple):
with self.assertRaises(TypeError):
self.card.pages[t()] = 0
def test_set_page_slices_unsupported(self):
with self.assertRaises(NotImplementedError):
self.card.pages[:2] = [0, 0]
unittest.main()
| lgpl-3.0 | -3,854,851,223,214,129,000 | 29.743377 | 79 | 0.557865 | false |
ghost9023/DeepLearningPythonStudy | DeepLearning/DeepLearning/09_Deep_SongJW/numpy_study.py | 1 | 4208 | import numpy as np
'''
벡터, 행렬의 생성, 차원수, 형상
'''
# A=np.array([1,2,3,4])
# print(A)
# print(np.ndim(A)) # ndim() : 차원 반환
# print(A.shape) # shape : 튜플 형태로 형상 반환. 벡터의 경우 반환된 튜플이 한개의 원소만 갖음. (4,)
# print(A.shape[0])
# B=np.array([[1,2],[3,4],[5,6]])
# print(B)
# print(np.ndim(B)) # 2
# print(B.shape) # (3,2)
'''
행렬의 내적
'''
# A=np.array([[1,2],[3,4]])
# print(A.shape)
# B=np.array([[5,6],[7,8]])
# print(B.shape)
# print(np.dot(A,B)) # dot(A,B) : 내적. 일반적으로 dot(A,B) != dot(B,A)
# A=np.array([[1,2,3],[4,5,6]])
# print(A.shape)
# B=np.array([[1,2],[3,4],[5,6]])
# print(B.shape)
# print(np.dot(A,B))
# print(np.dot(B,A)) # 2x3 X 3x2 = 2x2, 3x2 X 2x3 = 3x3
# 에러. 앞 행렬의 열 수와 뒷 행렬의 행 수가 일치하지 않음
# A=np.array([[1,2,3],[4,5,6]])
# C=np.array([[1,2], [3,4]])
# print(C.shape)
# print(A.shape)
# print(np.dot(A,C)) # ValueError: shapes (2,3) and (2,2) not aligned: 3 (dim 1) != 2 (dim 0)
# index : 행 = 0, 열 = 1
# # 행렬과 벡터의 곱.
# A=np.array([[1,2],[3,4],[5,6]])
# print(A.shape)
# B=np.array([7,8])
# print(B.shape)
# C=np.dot(A,B)
# print(C, C.shape)
####################################################################################
#
# print('\n원소접근')
# a=np.array([[51, 55],[14, 19],[0,4]])
# print(a)
# print(a[0])
# print(a[0][1])
# b=np.array([1,2,3,4,5,6])
# print(b[np.array([0,1,3])]) # 인덱스벡터로 벡터 원소에 접근
# x=np.array([10,20,25,30,5,10])
# print(x[x>15]) # 원소에 조건걸기
# print(x>15) # bool 벡터 생성
# print('create array\n')
# a=np.array([1,5])
# b=np.array([[1,2],[2,3]])
# c=np.array([[1],[2],[3]])
# d=np.arange(1,5,1) # 1~4 까지 1 간격으로 나열
# e=np.arange(1,7,1).reshape(2,3) # 1~6 까지 1간격으로 2행 3열 배치
# print(a)
# print(b)
# print(c)
# print(d)
# print(e,'\n')
#
# print('operation\n')
# x1=np.array([1,2,3])
# y1=np.array([5,10,15])
# x2=np.array([[1,2],[3,4]])
# y2=np.array([[5,10],[15,20]])
# z1=np.array([-1, -2])
# z2=np.array([[5],[10],[15]])
#
# print('일반 연산은 대응하는 원소끼리')
# print(x1+y1)
# print(x1-y1)
# print(x1*y1)
# print(x1/y1)
# print(x2+y2)
# print(x2*y2,'\n')
#
# print('브로드캐스팅\n매트릭스의 열의 수와 벡터의 원소수가 같은 경우의 연산은\n'
# '벡터를 매트릭스의 행의 수만큼 복제하여 대응하는 원소끼리 연산')
# print(x2+z1)
# print(x2*z1,'\n')
#
# print('기타')
# print(x1**2)
# print(x1>=2)
# print(x2.flatten()) # 메트릭스를 벡터로 평탄화
# print(x2.reshape(4,1)) # 메트릭스 형태 변환
# print(x2.reshape(1,4))
#
# print('쌓기')
# a=np.array([1,2,3])
# b=np.array([3,4,5])
# print(a.shape,b.shape)
# print(np.vstack([a,b]))
# print(np.hstack([a,b]))
#
# print('일반함수')
# a=np.array([1,2,3,6,5,4])
# print(np.argmax(a), a[np.argmax(a)]) # 차례로 최대값의 인덱스, 인덱스로 출력한 최대값
# a=np.array([[1,2,3],[4,6,5],[9,8,7]])
# print(np.argmax(a,axis=0), np.argmax(a,axis=1)) # axis : 0은 열단위로, 1은 행단위로 최대값의 인덱스 반환
# print()
#
# print('전치')
# a=np.array([[1,2,3],[4,5,6]])
# print(a,'\n',np.transpose(a))
# b=np.array([1,2,3,4,5])
# print(np.transpose(b)) # 벡터는 전치가 되지 않는다.
#
# print('\n내적-dot')
# a=np.array([[1,2],[3,4]])
# b=np.array([[5,6],[7,8]])
# c=np.array([1,2,3])
# d=np.array([[1],[2],[3]]) # 벡터끼리의 곱은 행벡터, 열벡터 간의 곱과 같다.
# print(np.dot(a,b))
# print(np.dot(c,d))
#
# print('\n신경망의 두 레이어 사이의 모습-2입력과 3노드의 연결')
# input=np.array([1,2]) # 입력 1, 2
# weight=np.array([[1,3,5],[2,4,6]]) # 노드의 연결이 차례로 (1,2), (3,4), (5,6) 가중치를 가짐
# net_input=np.dot(input,weight)
# print(net_input)
# print('\n벡터 원소에 접근')
# a=np.array([1,2,3,4,5])
# print(a.size) # 벡터 사이즈
# print(a[3])
#
# print('\n벡터, 매트릭스의 복사')
# b=a
# c=a[:]
# print(id(a), id(b), id(c))
# d=np.array([[1,2],[3,4]])
# e=d
# f=d[:]
# print(id(d),id(e),id(f))
| mit | 7,087,468,554,610,165,000 | 22.226667 | 96 | 0.520953 | false |
AlexStarov/Shop | applications/bitrix/management/commands/1cbitrix.py | 1 | 12839 | # -*- coding: utf-8 -*-
from django.core.management.base import BaseCommand
import xml.etree.ElementTree as ET
from time import sleep
import os
from applications.product.models import Category
__author__ = 'AlexStarov'
def search_in_category(name, id_1c, parent=None, ):
try:
cat = Category.objects.get(title=name, id_1c=id_1c, parent=parent)
return cat
except Category.DoesNotExist:
cat = Category()
cat.title = name
if parent:
cat.parent = parent
cat.id_1c = id_1c
# cat.save()
return None
except Category.MultipleObjectsReturned:
cats = Category.objects.filter(title=name, id_1c=id_1c, parent=parent)
if len(cats) > 1:
raise 'MultiCat'
elif len(cats) == 1:
return cats[0]
elif len(cats) == 0:
try:
cat = Category.objects.get(title=name, parent=parent)
except Category.DoesNotExist:
cat = Category()
cat.title = name
if parent:
cat.parent = parent
cat.id_1c = id_1c
# cat.save()
return None
class Command(BaseCommand, ):
from optparse import make_option
option_list = BaseCommand.option_list + (
make_option('--id', '--pk', '--delivery_id', '--delivery_pk',
action='store', type='int', dest='delivery_pk',
help=''),
make_option('--t', '--delivery_test', '--test',
action='store_true', dest='delivery_test',
help=''),
make_option('--g', '--delivery_general', '--general',
action='store_true', dest='delivery_test',
help=''),
)
#self.verbosity = int(options.get('verbosity'))
#def add_arguments(self, parser):
# parser.add_argument('delivery_id', nargs='+', type=int)
def handle(self, *args, **options):
cwd = os.getcwd()
cwd = os.path.join(cwd, 'db')
for name in os.listdir(cwd):
path_and_filename = os.path.join(cwd, name)
if os.path.isfile(path_and_filename, ) and name == 'import.xml':
root = ET.parse(source=path_and_filename).getroot()
for elem_level1 in root:
# print 'level1', elem_level1, elem_level1.tag, elem_level1.attrib, elem_level1.text
if elem_level1.tag == u'Классификатор':
elems_level1 = list(elem_level1)
for elem_level2_Indx, elem_level2 in enumerate(elems_level1):
# print 'level2', elem_level2_Indx, elem_level2, elem_level2.tag, elem_level2.attrib, elem_level2.text
if elem_level2.tag == u'Наименование'\
and elem_level2.text == u'Классификатор (Каталог товаров)'\
and elems_level1[elem_level2_Indx+1].tag == u'Группы':
elems_level2 = list(elems_level1[elem_level2_Indx+1])
for elem_level3_Indx, elem_level3 in enumerate(elems_level2):
# print 'level3', elem_level3_Indx, elem_level3, elem_level3.tag, elem_level3.attrib, elem_level3.text
elems_level3 = list(elem_level3)
for elem_level4_Indx, elem_level4 in enumerate(elems_level3):
# print 'level4', elem_level4_Indx, elem_level4, elem_level4.tag, elem_level4.attrib, elem_level4.text
if elem_level4.tag == u'Наименование' \
and elem_level4.text == u'Товары' \
and elems_level3[elem_level4_Indx + 1].tag == u'Группы':
elems_level4 = list(elems_level3[elem_level4_Indx + 1])
for elem_level5_Indx, elem_level5 in enumerate(elems_level4):
# print 'level5', elem_level5_Indx, elem_level5, elem_level5.tag, elem_level5.attrib, elem_level5.text
if elem_level5.tag == u'Группа':
try:
elems_level5 = list(elems_level4[elem_level5_Indx])
for elem_level6_Indx, elem_level6 in enumerate(elems_level5):
# print 'level6', elem_level6_Indx, elem_level6, elem_level6.tag, elem_level6.attrib, elem_level6.text
if elem_level6.tag == u'Ид' and elems_level5[elem_level6_Indx + 1].tag == u'Наименование':
dict_elem_level6 = {'Id': elem_level6.text, 'Name': elems_level5[elem_level6_Indx + 1].text, }
parent_cat6 = search_in_category(name=dict_elem_level6['Name'], id_1c=dict_elem_level6['Id'])
#print 'level6: ', dict_elem_level6, parent_cat6
if elem_level6.tag == u'Группы':
elems_level6 = list(elems_level5[elem_level6_Indx])
for elem_level7_Indx, elem_level7 in enumerate(elems_level6):
# print 'level7', elem_level7_Indx, elem_level7, elem_level7.tag, elem_level7.attrib, elem_level7.text
if elem_level7.tag == u'Группа':
try:
elems_level7 = list(elems_level6[elem_level7_Indx])
for elem_level8_Indx, elem_level8 in enumerate(elems_level7):
# print 'level8', elem_level8_Indx, elem_level8, elem_level8.tag, elem_level8.attrib, elem_level8.text
if elem_level8.tag == u'Ид' and elems_level7[elem_level8_Indx + 1].tag == u'Наименование':
dict_elem_level8 = {'Id': elem_level8.text, 'Name': elems_level7[elem_level8_Indx + 1].text, }
parent_cat8 = search_in_category(name=dict_elem_level8['Name'], id_1c=dict_elem_level8['Id'], parent=parent_cat6)
#print 'level6: ', dict_elem_level6, parent_cat8
if elem_level8.tag == u'Группы':
elems_level8 = list(elems_level7[elem_level8_Indx])
for elem_level9_Indx, elem_level9 in enumerate(elems_level8):
# print 'level9', elem_level9_Indx, elem_level9, elem_level9.tag, elem_level9.attrib, elem_level9.text
if elem_level9.tag == u'Группа':
try:
elems_level9 = list(elems_level8[elem_level9_Indx])
for elem_level10_Indx, elem_level10 in enumerate(elems_level9):
# print 'level10', elem_level10_Indx, elem_level10, elem_level10.tag, elem_level8.attrib, elem_level10.text
if elem_level10.tag == u'Ид' and elems_level9[elem_level10_Indx + 1].tag == u'Наименование':
dict_elem_level10 = {'Id': elem_level10.text, 'Name': elems_level9[elem_level10_Indx + 1].text, }
parent_cat10 = search_in_category(name=dict_elem_level10['Name'], id_1c=dict_elem_level10['Id'], parent=parent_cat8)
#print 'level6: ', dict_elem_level6, parent_cat10
if elem_level10.tag == u'Группы':
level10 = True
except IndexError:
pass
except IndexError:
pass
except IndexError:
pass
if elem_level1.tag == u'Каталог':
elems_level1 = list(elem_level1)
for elem_level2_Indx, elem_level2 in enumerate(elems_level1):
print('level2', elem_level2_Indx, elem_level2, elem_level2.tag, elem_level2.attrib, elem_level2.text, )
if elem_level2.tag == u'Наименование' \
and elem_level2.text == u'Каталог товаров' \
and elems_level1[elem_level2_Indx + 1].tag == u'Товары':
elems_level2 = list(elems_level1[elem_level2_Indx + 1])
for elem_level3_Indx, elem_level3 in enumerate(elems_level2):
# print 'level3', elem_level3_Indx, elem_level3, elem_level3.tag, elem_level3.attrib, elem_level3.text
if elem_level3.tag == u'Товар':
elems_level3 = list(elem_level3)
for elem_level4_Indx, elem_level4 in enumerate(elems_level3):
# print 'level4', elem_level4_Indx, elem_level4, elem_level4.tag, elem_level4.attrib, elem_level4.text
if elem_level4.tag == u'Ид':
id_1c_prod = elem_level4.text
if elems_level3[elem_level4_Indx + 1].tag == u'Артикул':
articul = elems_level3[elem_level4_Indx + 1].text
if elems_level3[elem_level4_Indx + 2].tag == u'Наименование':
name = elems_level3[elem_level4_Indx + 2].text
if elem_level4.tag == u'Группы':
elems_level4 = list(elems_level3[elem_level4_Indx])
for elem_level5_Indx, elem_level5 in enumerate(elems_level4):
# print 'level5', elem_level5_Indx, elem_level5, elem_level5.tag, elem_level5.attrib, elem_level5.text
if elem_level5.tag == u'Ид':
id_1c_cat = elem_level5.text
if 'level10' in locals():
print('level10', )
| apache-2.0 | -4,928,253,206,802,143,000 | 60.794118 | 220 | 0.399889 | false |
astrorafael/twisted-mqtt | mqtt/test/test_pdu.py | 1 | 20218 | # ----------------------------------------------------------------------
# Copyright (C) 2015 by Rafael Gonzalez
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
# ----------------------------------------------------------------------
from twisted.trial import unittest
from twisted.test import proto_helpers
from mqtt import v31, v311
from mqtt.pdu import (
CONNECT,
CONNACK,
DISCONNECT,
PINGREQ,
PINGRES,
SUBSCRIBE,
SUBACK,
UNSUBSCRIBE,
UNSUBACK,
PUBLISH,
PUBACK,
PUBREC,
PUBREL,
PUBCOMP,
)
class PDUTestCase(unittest.TestCase):
def test_CONNECT_encdec(self):
request = CONNECT()
response = CONNECT()
request.clientId = "client-foo"
request.version = v31
request.keepalive = 0
request.willTopic = None
request.willMessage = None
request.willQoS = None
request.willRetain = None
request.username = None
request.password = None
request.cleanStart = True
response.decode(request.encode())
self.assertEqual(request.encoded[0], response.encoded[0])
self.assertEqual(request.version, response.version)
self.assertEqual(request.clientId, response.clientId)
self.assertEqual(request.keepalive, response.keepalive)
self.assertEqual(request.willTopic, response.willTopic)
self.assertEqual(request.willMessage, response.willMessage)
self.assertEqual(request.willQoS, response.willQoS)
self.assertEqual(request.willRetain, response.willRetain)
self.assertEqual(request.username, response.username)
self.assertEqual(request.password, response.password)
self.assertEqual(request.cleanStart, response.cleanStart)
def test_CONNECT_encdec_keepalive(self):
request = CONNECT()
response = CONNECT()
request.version = v31
request.clientId = "client-foo"
request.keepalive = 12
request.willTopic = None
request.willMessage = None
request.willQoS = None
request.willRetain = None
request.username = None
request.password = None
request.cleanStart = True
response.decode(request.encode())
self.assertEqual(request.encoded[0], response.encoded[0])
self.assertEqual(request.version, response.version)
self.assertEqual(request.clientId, response.clientId)
self.assertEqual(request.keepalive, response.keepalive)
self.assertEqual(request.willTopic, response.willTopic)
self.assertEqual(request.willMessage, response.willMessage)
self.assertEqual(request.willQoS, response.willQoS)
self.assertEqual(request.willRetain, response.willRetain)
self.assertEqual(request.username, response.username)
self.assertEqual(request.password, response.password)
self.assertEqual(request.cleanStart, response.cleanStart)
self.assertEqual(request.version, response.version)
def test_CONNECT_encdec_willTopic(self):
request = CONNECT()
response = CONNECT()
request.clientId = "client-foo"
request.keepalive = 1
request.willTopic = "foo-topic"
request.willMessage = ""
request.willQoS = 1
request.willRetain = False
request.username = None
request.password = None
request.cleanStart = True
request.version = v31
response.decode(request.encode())
self.assertEqual(request.encoded[0], response.encoded[0])
self.assertEqual(request.version, response.version)
self.assertEqual(request.clientId, response.clientId)
self.assertEqual(request.keepalive, response.keepalive)
self.assertEqual(request.willTopic, response.willTopic)
self.assertEqual(request.willMessage, response.willMessage)
self.assertEqual(request.willQoS, response.willQoS)
self.assertEqual(request.willRetain, response.willRetain)
self.assertEqual(request.username, response.username)
self.assertEqual(request.password, response.password)
self.assertEqual(request.cleanStart, response.cleanStart)
def test_CONNECT_encdec_willMessage(self):
request = CONNECT()
response = CONNECT()
request.clientId = "client-foo"
request.keepalive = 1
request.willTopic = "foo-topic"
request.willMessage = "Hello World"
request.willQoS = 2
request.willRetain = False
request.username = None
request.password = None
request.cleanStart = True
request.version = v31
response.decode(request.encode())
self.assertEqual(request.encoded[0], response.encoded[0])
self.assertEqual(request.version, response.version)
self.assertEqual(request.clientId, response.clientId)
self.assertEqual(request.keepalive, response.keepalive)
self.assertEqual(request.willTopic, response.willTopic)
self.assertEqual(request.willMessage, response.willMessage)
self.assertEqual(request.willQoS, response.willQoS)
self.assertEqual(request.willRetain, response.willRetain)
self.assertEqual(request.username, response.username)
self.assertEqual(request.password, response.password)
self.assertEqual(request.cleanStart, response.cleanStart)
def test_CONNECT_encdec_willRetain(self):
request = CONNECT()
response = CONNECT()
request.clientId = "client-foo"
request.keepalive = 1
request.willTopic = "foo-topic"
request.willMessage = "Hello World"
request.willQoS = 2
request.willRetain = True
request.username = None
request.password = None
request.cleanStart = True
request.version = v31
response.decode(request.encode())
self.assertEqual(request.encoded[0], response.encoded[0])
self.assertEqual(request.version, response.version)
self.assertEqual(request.clientId, response.clientId)
self.assertEqual(request.keepalive, response.keepalive)
self.assertEqual(request.willTopic, response.willTopic)
self.assertEqual(request.willMessage, response.willMessage)
self.assertEqual(request.willQoS, response.willQoS)
self.assertEqual(request.willRetain, response.willRetain)
self.assertEqual(request.username, response.username)
self.assertEqual(request.password, response.password)
self.assertEqual(request.cleanStart, response.cleanStart)
def test_CONNECT_encdec_userpass(self):
request = CONNECT()
response = CONNECT()
request.clientId = "client-foo"
request.keepalive = 12000
request.willTopic = "foo-topic"
request.willMessage = ""
request.willQoS = 0
request.willRetain = False
request.username = "foouser"
request.password = "foopasswd"
request.cleanStart = True
request.version = v31
response.decode(request.encode())
self.assertEqual(request.encoded[0], response.encoded[0])
self.assertEqual(request.version, response.version)
self.assertEqual(request.clientId, response.clientId)
self.assertEqual(request.keepalive, response.keepalive)
self.assertEqual(request.willTopic, response.willTopic)
self.assertEqual(request.willMessage, response.willMessage)
self.assertEqual(request.willQoS, response.willQoS)
self.assertEqual(request.willRetain, response.willRetain)
self.assertEqual(request.username, response.username)
self.assertEqual(request.password, response.password.decode(encoding='ascii', errors='ignore'))
self.assertEqual(request.cleanStart, response.cleanStart)
def test_CONNECT_encdec_session(self):
request = CONNECT()
response = CONNECT()
request.clientId = "client-foo"
request.keepalive = 1200
request.willTopic = "foo-topic"
request.willMessage = ""
request.willQoS = 1
request.willRetain = False
request.username = None
request.password = None
request.cleanStart = False
request.version = v31
response.decode(request.encode())
self.assertEqual(request.encoded[0], response.encoded[0])
self.assertEqual(request.version, response.version)
self.assertEqual(request.clientId, response.clientId)
self.assertEqual(request.keepalive, response.keepalive)
self.assertEqual(request.willTopic, response.willTopic)
self.assertEqual(request.willMessage, response.willMessage)
self.assertEqual(request.willQoS, response.willQoS)
self.assertEqual(request.willRetain, response.willRetain)
self.assertEqual(request.username, response.username)
self.assertEqual(request.password, response.password)
self.assertEqual(request.cleanStart, response.cleanStart)
def test_CONNECT_encdec_version(self):
request = CONNECT()
response = CONNECT()
request.clientId = "client-foo"
request.keepalive = 120
request.willTopic = "foo-topic"
request.willMessage = ""
request.willQoS = 0
request.willRetain = False
request.username = None
request.password = None
request.cleanStart = True
request.version = v311
response.decode(request.encode())
self.assertEqual(request.encoded[0], response.encoded[0])
self.assertEqual(request.version, response.version)
self.assertEqual(request.clientId, response.clientId)
self.assertEqual(request.keepalive, response.keepalive)
self.assertEqual(request.willTopic, response.willTopic)
self.assertEqual(request.willMessage, response.willMessage)
self.assertEqual(request.willQoS, response.willQoS)
self.assertEqual(request.willRetain, response.willRetain)
self.assertEqual(request.username, response.username)
self.assertEqual(request.password, response.password)
self.assertEqual(request.cleanStart, response.cleanStart)
def test_PINGREQ_encdec(self):
request = PINGREQ()
response = PINGREQ()
response.decode(request.encode())
self.assertEqual(request.encoded[0], response.encoded[0])
def test_PINGRES_encdec(self):
request = PINGRES()
response = PINGRES()
response.decode(request.encode())
self.assertEqual(request.encoded[0], response.encoded[0])
def test_DISCONNECT_encdec(self):
request = DISCONNECT()
response = DISCONNECT()
response.decode(request.encode())
self.assertEqual(request.encoded[0], response.encoded[0])
def test_CONNACK_encdec(self):
request = CONNACK()
response = CONNACK()
request.session = True
request.resultCode = 2
response.decode(request.encode())
self.assertEqual(request.encoded[0], response.encoded[0])
self.assertEqual(request.session, response.session)
self.assertEqual(request.resultCode, response.resultCode)
def test_SUBSCRIBE_encdec(self):
request = SUBSCRIBE()
response = SUBSCRIBE()
request.topics = [('foo', 1), ('bar',0), ('baz',2)]
request.msgId = 5
response.decode(request.encode())
self.assertEqual(request.msgId, response.msgId)
self.assertEqual(request.topics, response.topics)
def test_SUBACK_encdec(self):
request = SUBACK()
response = SUBACK()
request.msgId = 5
request.granted = [(0, False), (0, True), (1,False), (1,True), (2,False), (2,True)]
response.decode(request.encode())
self.assertEqual(request.msgId, response.msgId)
self.assertEqual(request.granted, response.granted)
def test_UNSUBSCRIBE_encdec(self):
request = UNSUBSCRIBE()
response = UNSUBSCRIBE()
request.topics = ['foo', 'bar', 'baz']
request.msgId = 6
response.decode(request.encode())
self.assertEqual(request.msgId, response.msgId)
self.assertEqual(request.topics, response.topics)
def test_UNSUBACK_encdec(self):
request = UNSUBACK()
response = UNSUBACK()
request.msgId = 5
response.decode(request.encode())
self.assertEqual(request.msgId, response.msgId)
def test_PUBACK_encdec(self):
request = PUBACK()
response = PUBACK()
request.msgId = 65535
response.decode(request.encode())
self.assertEqual(request.msgId, response.msgId)
def test_PUBREC_encdec(self):
request = PUBREC()
response = PUBREC()
request.msgId = 30001
response.decode(request.encode())
self.assertEqual(request.msgId, response.msgId)
def test_PUBREL_encdec(self):
request = PUBREL()
response = PUBREL()
request.msgId = 30002
response.decode(request.encode())
self.assertEqual(request.msgId, response.msgId)
def test_PUBCOMP_encdec(self):
request = PUBCOMP()
response = PUBCOMP()
request.msgId = 30002
response.decode(request.encode())
self.assertEqual(request.msgId, response.msgId)
def test_PUBLISH_encdec(self):
request = PUBLISH()
response = PUBLISH()
request.msgId = None
request.qos = 0
request.dup = False
request.retain = False
request.topic = "foo"
request.payload = "foo"
response.decode(request.encode())
self.assertEqual(request.msgId, response.msgId)
self.assertEqual(request.qos, response.qos)
self.assertEqual(request.dup, response.dup)
self.assertEqual(request.retain, response.retain)
self.assertEqual(request.topic, response.topic)
self.assertEqual(request.payload, response.payload.decode(encoding='utf-8'))
def test_PUBLISH_encdec_qos(self):
request = PUBLISH()
response = PUBLISH()
request.msgId = 30001
request.qos = 1
request.dup = False
request.retain = False
request.topic = "foo"
request.payload = "foo"
response.decode(request.encode())
self.assertEqual(request.msgId, response.msgId)
self.assertEqual(request.qos, response.qos)
self.assertEqual(request.dup, response.dup)
self.assertEqual(request.retain, response.retain)
self.assertEqual(request.topic, response.topic)
self.assertEqual(request.payload, response.payload.decode(encoding='utf-8'))
def test_PUBLISH_encdec_dup(self):
request = PUBLISH()
response = PUBLISH()
request.msgId = 30001
request.qos = 1
request.dup = True
request.retain = False
request.topic = "foo"
request.payload = "foo"
response.decode(request.encode())
self.assertEqual(request.msgId, response.msgId)
self.assertEqual(request.qos, response.qos)
self.assertEqual(request.dup, response.dup)
self.assertEqual(request.retain, response.retain)
self.assertEqual(request.topic, response.topic)
self.assertEqual(request.payload, response.payload.decode(encoding='utf-8'))
def test_PUBLISH_encdec_retain(self):
request = PUBLISH()
response = PUBLISH()
request.msgId = 30001
request.qos = 1
request.dup = False
request.retain = True
request.topic = "foo"
request.payload = "foo"
response.decode(request.encode())
self.assertEqual(request.msgId, response.msgId)
self.assertEqual(request.qos, response.qos)
self.assertEqual(request.dup, response.dup)
self.assertEqual(request.retain, response.retain)
self.assertEqual(request.topic, response.topic)
self.assertEqual(request.payload, response.payload.decode(encoding='utf-8'))
def test_PUBLISH_encdec_payload_str(self):
request = PUBLISH()
response = PUBLISH()
request.msgId = 30001
request.qos = 1
request.dup = False
request.retain = True
request.topic = "foo"
request.payload = ""
response.decode(request.encode())
self.assertEqual(request.msgId, response.msgId)
self.assertEqual(request.qos, response.qos)
self.assertEqual(request.dup, response.dup)
self.assertEqual(request.retain, response.retain)
self.assertEqual(request.topic, response.topic)
self.assertEqual(request.payload, response.payload.decode(encoding='utf-8'))
def test_PUBLISH_encdec_payload_bytearray(self):
request = PUBLISH()
response = PUBLISH()
request.msgId = 30001
request.qos = 1
request.dup = False
request.retain = True
request.topic = "foo"
request.payload = bytearray(5)
response.decode(request.encode())
self.assertEqual(request.msgId, response.msgId)
self.assertEqual(request.qos, response.qos)
self.assertEqual(request.dup, response.dup)
self.assertEqual(request.retain, response.retain)
self.assertEqual(request.topic, response.topic)
self.assertEqual(request.payload, response.payload)
class PDUTestCase2(unittest.TestCase):
def test_PUBREC_enc_fail1(self):
request = PUBACK()
response = PUBACK()
request.msgId = -1
self.assertRaises(ValueError, request.encode)
def test_PUBREC_enc_fail2(self):
request = PUBACK()
response = PUBACK()
request.msgId = 2000000
self.assertRaises(ValueError, request.encode)
def test_PUBLISH_encdec_payload_int(self):
request = PUBLISH()
request.msgId = 30001
request.qos = 1
request.dup = False
request.retain = True
request.topic = "foo"
request.payload = 65537
self.assertRaises(TypeError, request.encode)
def test_PUBLISH_encdec_payload_float(self):
request = PUBLISH()
response = PUBLISH()
request.msgId = 30001
request.qos = 1
request.dup = False
request.retain = True
request.topic = "foo"
request.payload = 12.25
self.assertRaises(TypeError, request.encode)
| mit | 5,394,915,169,349,317,000 | 38.565558 | 106 | 0.633594 | false |
DittmarLab/HGTector | hgtector/tests/test_database.py | 1 | 16509 | #!/usr/bin/env python3
# ----------------------------------------------------------------------------
# Copyright (c) 2013--, Qiyun Zhu and Katharina Dittmar.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file LICENSE, distributed with this software.
# ----------------------------------------------------------------------------
from unittest import TestCase, main
from os import remove, makedirs, listdir
from os.path import join, dirname, realpath, isfile, isdir
from shutil import rmtree, copy
from tempfile import mkdtemp
import gzip
import pandas as pd
from hgtector.database import Database
from hgtector.util import taxdump_from_text
class DatabaseTests(TestCase):
def setUp(self):
self.tmpdir = mkdtemp()
self.datadir = join(dirname(realpath(__file__)), 'data')
# whether to test remote functions, which highly depend on the network
# connection and the current status of the NCBI server
self.test_remote = False
def tearDown(self):
rmtree(self.tmpdir)
def test___call__(self):
# TODO
pass
def test_set_parameters(self):
# TODO
pass
def test_connect_server(self):
# TODO
pass
def test_retrieve_taxdump(self):
# TODO
pass
def test_retrieve_summary(self):
# TODO
pass
def test_retrieve_categories(self):
# TODO
pass
def test_filter_genomes(self):
me = Database()
header = ('# assembly_accession', 'assembly_level')
data = (('GCF_000000001.1', 'Chromosome'),
('GCF_000000002.1', 'Complete Genome'),
('GCF_000000003.2', 'Scaffold'),
('GCF_000000004.1', 'Contig'),
('GCA_000000004.1', 'Contig'))
df = pd.DataFrame(data, columns=header)
me.complete = False
me.genoids = None
me.exclude = False
# drop duplicates
me.df = df.copy()
me.filter_genomes()
self.assertEqual(me.df.shape[0], 4)
self.assertListEqual(me.df['genome'].tolist(), [
'G000000001', 'G000000002', 'G000000003', 'G000000004'])
self.assertEqual(me.df.query(
'accession == "GCF_000000004.1"').shape[0], 1)
# complete genomes only
me.complete = True
me.df = df.copy()
me.filter_genomes()
self.assertListEqual(me.df['accnov'].tolist(), [
'GCF_000000001', 'GCF_000000002'])
# include certain genomes
me.complete = False
me.genoids = 'G000000001,G000000003'
me.df = df.copy()
me.filter_genomes()
self.assertListEqual(me.df['accession'].tolist(), [
'GCF_000000001.1', 'GCF_000000003.2'])
# exclude certain genomes
me.genoids = ['GCF_000000002.1', 'GCF_000000004']
me.exclude = True
me.df = df.copy()
me.filter_genomes()
self.assertListEqual(me.df['accession'].tolist(), [
'GCF_000000001.1', 'GCF_000000003.2'])
def test_identify_taxonomy(self):
me = Database()
header = ('organism_name', 'taxid', 'species', 'species_taxid')
data = (('Escherichia coli UMN026', '585056', 'E. coli', '562'),
('Escherichia coli O104:H4', '1038927', 'E. coli', '562'),
('Klebsiella aerogenes', '548', 'Klebsiella aerogenes', '548'),
('unclassified Gammaproteobacteria', '118884', '', ''),
('Plasmid pPY113', '126792', '', ''))
df = pd.DataFrame(data, columns=header)
# organism names must be capital and latinate
me.capital = True
me.block = None
me.latin = True
me.taxids = None
me.exclude = False
me.taxdump = taxdump_from_text(taxdump_proteo)
me.df = df.copy()
me.identify_taxonomy()
self.assertNotIn('species_taxid', me.df.columns)
self.assertListEqual(me.df.index.tolist(), [0, 1, 2])
self.assertListEqual(me.df['species'].tolist(), ['562', '562', '548'])
# block word
me.block = 'plasmid'
me.latin = False
me.df = df.copy()
me.identify_taxonomy()
self.assertListEqual(me.df.index.tolist(), [0, 1, 2])
# no Escherichia
me.taxids = '561'
me.exclude = True
me.df = df.copy()
me.identify_taxonomy()
self.assertListEqual(me.df.index.tolist(), [2])
def test_sample_by_taxonomy(self):
me = Database()
# do nothing
me.sample = None
self.assertIsNone(me.sample_by_taxonomy())
# xxx
header = ('genome', 'taxid', 'refseq_category', 'assembly_level')
data = (('G1', '585056', '', 'Chromosome'), # E. coli UMN026
('G2', '1038927', 'representative genome', 'Chromosome'),
# E. coli O104:H4 (rep. genome to be prioritized over G1)
('G3', '2580236', '', 'Contig'), # sync E. coli
('G4', '622', '', 'Scaffold'), # Shigella
('G5', '548', '', 'Scaffold'), # Klebsiella
('G6', '126792', 'reference genome', 'Contig')) # plasmid
df = pd.DataFrame(data, columns=header)
me.reference = False
me.representative = False
me.taxdump = taxdump_from_text(taxdump_proteo)
# up to one genome per genus
me.rank = 'genus'
me.sample = 1
me.df = df.copy()
me.sample_by_taxonomy()
self.assertListEqual(me.df.columns.tolist(), list(header) + ['genus'])
self.assertListEqual(me.df['genome'].tolist(), ['G2', 'G4', 'G5'])
# include reference genome (plasmid)
me.reference = True
me.df = df.copy()
me.sample_by_taxonomy()
self.assertEqual(me.df['genome'].tolist()[-1], 'G6')
# up to two genomes for entire cellular life
me.rank = 'superkingdom'
me.sample = 2
me.reference = False
me.df = df.copy()
me.sample_by_taxonomy()
self.assertListEqual(me.df['genome'].tolist(), ['G1', 'G2'])
def test_download_genomes(self):
# TODO
pass
def test_extract_genomes(self):
# TODO
pass
def test_genome_lineages(self):
me = Database()
me.output = self.tmpdir
me.taxdump = taxdump_from_text(taxdump_proteo)
data = (('G1', '1224', ''), # Proteobacteria
('G2', '562', '562'), # Escherichia coli
('G3', '622', '622'), # Shigella dysenteriae
('G4', '548', '548')) # Klebsiella aerogenes
me.df = pd.DataFrame(data, columns=[
'genome', 'taxid', 'species']).set_index('genome')
for rank in ['superkingdom', 'kingdom', 'phylum', 'class', 'order',
'family', 'genus']:
me.df[rank] = ''
me.genome_lineages()
with open(join(self.tmpdir, 'lineages.txt'), 'r') as f:
obs = dict(x.split('\t') for x in f.read().splitlines())
proteo = 'k__Bacteria; p__Proteobacteria;'
self.assertEqual(obs['G1'], proteo + ' c__; o__; f__; g__; s__')
entero = proteo + ' c__Gammaproteobacteria; o__Enterobacterales;' +\
' f__Enterobacteriaceae;'
self.assertEqual(
obs['G2'], entero + ' g__Escherichia; s__Escherichia coli')
self.assertEqual(
obs['G3'], entero + ' g__Shigella; s__Shigella dysenteriae')
self.assertEqual(
obs['G4'], entero + ' g__Klebsiella; s__Klebsiella aerogenes')
remove(join(self.tmpdir, 'lineages.txt'))
def test_genome_metadata(self):
me = Database()
me.output = self.tmpdir
me.df = pd.Series({
'genome': 'G1',
'accession': 'GCF_000123456.1',
'asm_name': 'ASM123v1',
'bioproject': 'PRJNA123456',
'biosample': 'SAMN00123456',
'assembly_level': 'Chromosome',
'organism_name': 'hypothetical organism',
'infraspecific_name': '',
'isolate': '',
'taxid': '12345',
'ftp_path': ('ftp://ftp.ncbi.nlm.nih.gov/genomes/all/GCF/000/123/'
'456/GCF_000123456.1_ASM123v1'),
'proteins': 100,
'residues': 12500,
'whatever': 'nonsense'}).to_frame().T
me.genome_metadata()
with open(join(self.tmpdir, 'genomes.tsv'), 'r') as f:
obs = f.read().splitlines()
exp = ('genome', 'proteins', 'residues', 'assembly_level', 'accession',
'bioproject', 'biosample', 'asm_name', 'organism_name',
'infraspecific_name', 'isolate', 'taxid', 'ftp_path')
self.assertEqual(obs[0], '\t'.join(exp))
exp = ('G1', '100', '12500', 'Chromosome', 'GCF_000123456.1',
'PRJNA123456', 'SAMN00123456', 'ASM123v1',
'hypothetical organism', '', '', '12345',
('ftp://ftp.ncbi.nlm.nih.gov/genomes/all/GCF/000/123/456/'
'GCF_000123456.1_ASM123v1'))
self.assertEqual(obs[1], '\t'.join(exp))
remove(join(self.tmpdir, 'genomes.tsv'))
def test_build_taxdump(self):
me = Database()
me.output = self.tmpdir
me.tmpdir = join(self.datadir, 'DnaK', 'taxdump')
me.taxdump = taxdump_from_text(taxdump_proteo)
data = (('G1', '1224'), # Proteobacteria
('G2', '562'), # Escherichia coli
('G3', '585056'), # E. coli UMN026
('G4', '1038927')) # E. coli O104:H4
me.df = pd.DataFrame(data, columns=[
'genome', 'taxid']).set_index('genome')
me.build_taxdump()
with open(join(self.tmpdir, 'taxdump', 'nodes.dmp'), 'r') as f:
obs = set(x.split('\t')[0] for x in f.read().splitlines())
exp = {'1', '131567', '2', '1224', '1236', '91347', '543', '561',
'562', '585056', '1038927'}
self.assertSetEqual(obs, exp)
rmtree(join(self.tmpdir, 'taxdump'))
def test_build_taxonmap(self):
me = Database()
me.output = self.tmpdir
me.taxdump = taxdump_from_text(taxdump_proteo)
me.p2tids = {'P1': {'766'}, # Rickettsiales
'P2': {'570', '548'}, # Klebsiella
'P3': {'620', '622'}, # Shigella
'P4': {'561', '562'}, # Escherichia
'P5': {'126792', '28211'}} # root
me.build_taxonmap()
exp = {'P1': '766', 'P2': '570', 'P3': '620', 'P4': '561', 'P5': '1'}
self.assertDictEqual(me.taxonmap, exp)
with gzip.open(join(self.tmpdir, 'taxon.map.gz'), 'rt') as f:
obs = dict(x.split('\t') for x in f.read().splitlines())
self.assertDictEqual(obs, exp)
remove(join(self.tmpdir, 'taxon.map.gz'))
def test_compile_database(self):
me = Database()
me.output = self.tmpdir
# don't compile
me.compile = 'none'
me.compile_database()
self.assertListEqual(listdir(self.tmpdir), [])
# get database files
copy(join(self.datadir, 'DnaK', 'linear.faa'),
join(self.tmpdir, 'db.faa'))
makedirs(join(self.tmpdir, 'taxdump'))
copy(join(self.datadir, 'DnaK', 'taxdump', 'nodes.dmp'),
join(self.tmpdir, 'taxdump', 'nodes.dmp'))
copy(join(self.datadir, 'DnaK', 'taxdump', 'names.dmp'),
join(self.tmpdir, 'taxdump', 'names.dmp'))
with open(join(self.datadir, 'DnaK', 'prot2tid.txt'), 'r') as f:
me.taxonmap = dict(x.split('\t') for x in f.read().splitlines())
# set parameters
me.threads = 1
me.tmpdir = self.tmpdir
me.makeblastdb = 'makeblastdb'
me.diamond = 'diamond'
# compile blast database
me.compile = 'blast'
me.compile_database()
self.assertTrue(isdir(join(self.tmpdir, 'blast')))
for ext in ('phr', 'pin', 'pog', 'psd', 'psi', 'psq'):
self.assertTrue(isfile(join(self.tmpdir, 'blast', f'db.{ext}')))
rmtree(join(self.tmpdir, 'blast'))
# compile diamond database
me.compile = 'diamond'
me.compile_database()
self.assertTrue(isdir(join(self.tmpdir, 'diamond')))
self.assertTrue(isfile(join(self.tmpdir, 'diamond', 'db.dmnd')))
rmtree(join(self.tmpdir, 'diamond'))
# compile both databases
me.compile = 'both'
me.compile_database()
self.assertTrue(isdir(join(self.tmpdir, 'blast')))
for ext in ('phr', 'pin', 'pog', 'psd', 'psi', 'psq'):
self.assertTrue(isfile(join(self.tmpdir, 'blast', f'db.{ext}')))
self.assertTrue(isdir(join(self.tmpdir, 'diamond')))
self.assertTrue(isfile(join(self.tmpdir, 'diamond', 'db.dmnd')))
rmtree(join(self.tmpdir, 'blast'))
rmtree(join(self.tmpdir, 'diamond'))
# clean up
remove(join(self.tmpdir, 'db.faa'))
rmtree(join(self.tmpdir, 'taxdump'))
def test_build_blast_db(self):
me = Database()
me.output = self.tmpdir
me.makeblastdb = 'makeblastdb'
me.tmpdir = self.tmpdir
copy(join(self.datadir, 'DnaK', 'linear.faa'),
join(self.tmpdir, 'db.faa'))
with open(join(self.datadir, 'DnaK', 'prot2tid.txt'), 'r') as f:
me.taxonmap = dict(x.split('\t') for x in f.read().splitlines())
me.build_blast_db()
self.assertTrue(isdir(join(self.tmpdir, 'blast')))
for ext in ('phr', 'pin', 'pog', 'psd', 'psi', 'psq'):
self.assertTrue(isfile(join(self.tmpdir, 'blast', f'db.{ext}')))
rmtree(join(self.tmpdir, 'blast'))
remove(join(self.tmpdir, 'db.faa'))
def test_build_diamond_db(self):
me = Database()
me.output = self.tmpdir
me.diamond = 'diamond'
me.threads = 1
me.tmpdir = self.tmpdir
copy(join(self.datadir, 'DnaK', 'linear.faa'),
join(self.tmpdir, 'db.faa'))
with open(join(self.datadir, 'DnaK', 'prot2tid.txt'), 'r') as f:
me.taxonmap = dict(x.split('\t') for x in f.read().splitlines())
makedirs(join(self.tmpdir, 'taxdump'))
copy(join(self.datadir, 'DnaK', 'taxdump', 'nodes.dmp'),
join(self.tmpdir, 'taxdump', 'nodes.dmp'))
copy(join(self.datadir, 'DnaK', 'taxdump', 'names.dmp'),
join(self.tmpdir, 'taxdump', 'names.dmp'))
me.build_diamond_db()
self.assertTrue(isdir(join(self.tmpdir, 'diamond')))
self.assertTrue(isfile(join(self.tmpdir, 'diamond', 'db.dmnd')))
rmtree(join(self.tmpdir, 'diamond'))
remove(join(self.tmpdir, 'db.faa'))
remove(join(self.tmpdir, 'taxdump', 'nodes.dmp'))
remove(join(self.tmpdir, 'taxdump', 'names.dmp'))
def test_check_local_file(self):
me = Database()
# file does not exist
file = join(self.tmpdir, 'tmp.in')
self.assertFalse(me.check_local_file(file))
# empty file will be deleted
open(file, 'w').close()
self.assertFalse(me.check_local_file(file))
self.assertFalse(isfile(file))
# file exists and has content
with open(file, 'w') as f:
f.write('Hello world!')
self.assertTrue(isfile(file))
self.assertTrue(me.check_local_file(file))
# overwrite existing file
self.assertFalse(me.check_local_file(file, overwrite=True))
self.assertFalse(isfile(file))
"""Constants"""
taxdump_proteo = (
'1,root,1,no rank',
'131567,cellular organisms,1,no rank',
'2,Bacteria,131567,superkingdom',
'1224,Proteobacteria,2,phylum',
'28211,Alphaproteobacteria,1224,class',
'766,Rickettsiales,28211,order',
'1236,Gammaproteobacteria,1224,class',
'91347,Enterobacterales,1236,order',
'543,Enterobacteriaceae,91347,family',
'561,Escherichia,543,genus',
'562,Escherichia coli,561,species',
'585056,Escherichia coli UMN026,562,no rank',
'1038927,Escherichia coli O104:H4,562,no rank',
'2580236,synthetic Escherichia coli Syn61,561,species',
'620,Shigella,543,genus',
'622,Shigella dysenteriae,620,species',
'570,Klebsiella,543,genus',
'548,Klebsiella aerogenes,570,species',
'118884,unclassified Gammaproteobacteria,1236,no rank',
'126792,Plasmid pPY113,1,species')
if __name__ == '__main__':
main()
| bsd-3-clause | 2,908,852,564,089,388,000 | 36.951724 | 79 | 0.550185 | false |
DayGitH/Family-Tree | mainwindow1.py | 1 | 7475 | # -*- coding: utf-8 -*-
# Form implementation generated from reading ui file '.\mainwindow.ui'
#
# Created: Wed Dec 17 21:45:47 2014
# by: pyside-uic 0.2.15 running on PySide 1.2.2
#
# WARNING! All changes made in this file will be lost!
from PySide import QtCore, QtGui
class Ui_MainWindow(object):
def setupUi(self, MainWindow):
MainWindow.setObjectName("MainWindow")
MainWindow.resize(640, 480)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Maximum, QtGui.QSizePolicy.Maximum)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(MainWindow.sizePolicy().hasHeightForWidth())
MainWindow.setSizePolicy(sizePolicy)
MainWindow.setIconSize(QtCore.QSize(32, 32))
self.centralwidget = QtGui.QWidget(MainWindow)
self.centralwidget.setObjectName("centralwidget")
self.gridLayout = QtGui.QGridLayout(self.centralwidget)
self.gridLayout.setObjectName("gridLayout")
self.graphicsView = QtGui.QGraphicsView(self.centralwidget)
self.graphicsView.setResizeAnchor(QtGui.QGraphicsView.AnchorUnderMouse)
self.graphicsView.setObjectName("graphicsView")
self.gridLayout.addWidget(self.graphicsView, 0, 0, 1, 1)
MainWindow.setCentralWidget(self.centralwidget)
self.menubar = QtGui.QMenuBar(MainWindow)
self.menubar.setGeometry(QtCore.QRect(0, 0, 640, 21))
self.menubar.setObjectName("menubar")
self.menuFile = QtGui.QMenu(self.menubar)
self.menuFile.setObjectName("menuFile")
self.menu_New = QtGui.QMenu(self.menuFile)
self.menu_New.setObjectName("menu_New")
self.menuEdit = QtGui.QMenu(self.menuFile)
self.menuEdit.setObjectName("menuEdit")
self.menuTrees = QtGui.QMenu(self.menubar)
self.menuTrees.setObjectName("menuTrees")
self.menuAbout = QtGui.QMenu(self.menubar)
self.menuAbout.setObjectName("menuAbout")
MainWindow.setMenuBar(self.menubar)
self.statusbar = QtGui.QStatusBar(MainWindow)
self.statusbar.setObjectName("statusbar")
MainWindow.setStatusBar(self.statusbar)
self.actionNew_Person = QtGui.QAction(MainWindow)
self.actionNew_Person.setObjectName("actionNew_Person")
self.actionNew_Relationship = QtGui.QAction(MainWindow)
self.actionNew_Relationship.setObjectName("actionNew_Relationship")
self.actionEdit_Person = QtGui.QAction(MainWindow)
self.actionEdit_Person.setObjectName("actionEdit_Person")
self.actionEdit_Relationship = QtGui.QAction(MainWindow)
self.actionEdit_Relationship.setObjectName("actionEdit_Relationship")
self.actionSave_Tree = QtGui.QAction(MainWindow)
self.actionSave_Tree.setObjectName("actionSave_Tree")
self.actionLoad_Tree = QtGui.QAction(MainWindow)
self.actionLoad_Tree.setObjectName("actionLoad_Tree")
self.actionNew_Tree = QtGui.QAction(MainWindow)
self.actionNew_Tree.setObjectName("actionNew_Tree")
self.actionExit = QtGui.QAction(MainWindow)
self.actionExit.setObjectName("actionExit")
self.actionDescendants = QtGui.QAction(MainWindow)
self.actionDescendants.setObjectName("actionDescendants")
self.actionHourglass = QtGui.QAction(MainWindow)
self.actionHourglass.setObjectName("actionHourglass")
self.actionPython = QtGui.QAction(MainWindow)
self.actionPython.setObjectName("actionPython")
self.actionPyside = QtGui.QAction(MainWindow)
self.actionPyside.setObjectName("actionPyside")
self.actionFamily_Tree = QtGui.QAction(MainWindow)
self.actionFamily_Tree.setObjectName("actionFamily_Tree")
self.menu_New.addAction(self.actionNew_Person)
self.menu_New.addAction(self.actionNew_Relationship)
self.menu_New.addAction(self.actionNew_Tree)
self.menuEdit.addAction(self.actionEdit_Person)
self.menuEdit.addAction(self.actionEdit_Relationship)
self.menuFile.addAction(self.menu_New.menuAction())
self.menuFile.addAction(self.menuEdit.menuAction())
self.menuFile.addSeparator()
self.menuFile.addAction(self.actionSave_Tree)
self.menuFile.addAction(self.actionLoad_Tree)
self.menuFile.addSeparator()
self.menuFile.addAction(self.actionExit)
self.menuTrees.addAction(self.actionDescendants)
self.menuTrees.addAction(self.actionHourglass)
self.menuAbout.addAction(self.actionPython)
self.menuAbout.addAction(self.actionPyside)
self.menuAbout.addAction(self.actionFamily_Tree)
self.menubar.addAction(self.menuFile.menuAction())
self.menubar.addAction(self.menuTrees.menuAction())
self.menubar.addAction(self.menuAbout.menuAction())
self.retranslateUi(MainWindow)
QtCore.QMetaObject.connectSlotsByName(MainWindow)
def retranslateUi(self, MainWindow):
MainWindow.setWindowTitle(QtGui.QApplication.translate("MainWindow", "My Family Tree", None, QtGui.QApplication.UnicodeUTF8))
self.menuFile.setTitle(QtGui.QApplication.translate("MainWindow", "File", None, QtGui.QApplication.UnicodeUTF8))
self.menu_New.setTitle(QtGui.QApplication.translate("MainWindow", "New", None, QtGui.QApplication.UnicodeUTF8))
self.menuEdit.setTitle(QtGui.QApplication.translate("MainWindow", "Edit", None, QtGui.QApplication.UnicodeUTF8))
self.menuTrees.setTitle(QtGui.QApplication.translate("MainWindow", "Trees", None, QtGui.QApplication.UnicodeUTF8))
self.menuAbout.setTitle(QtGui.QApplication.translate("MainWindow", "About", None, QtGui.QApplication.UnicodeUTF8))
self.actionNew_Person.setText(QtGui.QApplication.translate("MainWindow", "New Person", None, QtGui.QApplication.UnicodeUTF8))
self.actionNew_Relationship.setText(QtGui.QApplication.translate("MainWindow", "New Relationship", None, QtGui.QApplication.UnicodeUTF8))
self.actionEdit_Person.setText(QtGui.QApplication.translate("MainWindow", "Edit Person", None, QtGui.QApplication.UnicodeUTF8))
self.actionEdit_Relationship.setText(QtGui.QApplication.translate("MainWindow", "Edit Relationship", None, QtGui.QApplication.UnicodeUTF8))
self.actionSave_Tree.setText(QtGui.QApplication.translate("MainWindow", "Save Tree", None, QtGui.QApplication.UnicodeUTF8))
self.actionLoad_Tree.setText(QtGui.QApplication.translate("MainWindow", "Load Tree", None, QtGui.QApplication.UnicodeUTF8))
self.actionNew_Tree.setText(QtGui.QApplication.translate("MainWindow", "New Tree", None, QtGui.QApplication.UnicodeUTF8))
self.actionExit.setText(QtGui.QApplication.translate("MainWindow", "Exit", None, QtGui.QApplication.UnicodeUTF8))
self.actionDescendants.setText(QtGui.QApplication.translate("MainWindow", "Descendants", None, QtGui.QApplication.UnicodeUTF8))
self.actionHourglass.setText(QtGui.QApplication.translate("MainWindow", "Hourglass", None, QtGui.QApplication.UnicodeUTF8))
self.actionPython.setText(QtGui.QApplication.translate("MainWindow", "Python", None, QtGui.QApplication.UnicodeUTF8))
self.actionPyside.setText(QtGui.QApplication.translate("MainWindow", "Pyside", None, QtGui.QApplication.UnicodeUTF8))
self.actionFamily_Tree.setText(QtGui.QApplication.translate("MainWindow", "Family Tree", None, QtGui.QApplication.UnicodeUTF8))
| cc0-1.0 | -3,483,384,771,466,825,000 | 62.347458 | 147 | 0.734849 | false |
openstack/oslo.utils | oslo_utils/tests/test_netutils.py | 1 | 20222 | # Copyright 2012 OpenStack Foundation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import contextlib
import io
import socket
from unittest import mock
import netaddr
import netifaces
from oslotest import base as test_base
from oslo_utils import netutils
class NetworkUtilsTest(test_base.BaseTestCase):
def test_no_host(self):
result = netutils.urlsplit('http://')
self.assertEqual('', result.netloc)
self.assertIsNone(result.port)
self.assertIsNone(result.hostname)
self.assertEqual('http', result.scheme)
def test_parse_host_port(self):
self.assertEqual(('server01', 80),
netutils.parse_host_port('server01:80'))
self.assertEqual(('server01', None),
netutils.parse_host_port('server01'))
self.assertEqual(('server01', 1234),
netutils.parse_host_port('server01',
default_port=1234))
self.assertEqual(('::1', 80),
netutils.parse_host_port('[::1]:80'))
self.assertEqual(('::1', None),
netutils.parse_host_port('[::1]'))
self.assertEqual(('::1', 1234),
netutils.parse_host_port('[::1]',
default_port=1234))
self.assertEqual(('2001:db8:85a3::8a2e:370:7334', 1234),
netutils.parse_host_port(
'2001:db8:85a3::8a2e:370:7334',
default_port=1234))
def test_urlsplit(self):
result = netutils.urlsplit('rpc://myhost?someparam#somefragment')
self.assertEqual(result.scheme, 'rpc')
self.assertEqual(result.netloc, 'myhost')
self.assertEqual(result.path, '')
self.assertEqual(result.query, 'someparam')
self.assertEqual(result.fragment, 'somefragment')
result = netutils.urlsplit(
'rpc://myhost/mypath?someparam#somefragment',
allow_fragments=False)
self.assertEqual(result.scheme, 'rpc')
self.assertEqual(result.netloc, 'myhost')
self.assertEqual(result.path, '/mypath')
self.assertEqual(result.query, 'someparam#somefragment')
self.assertEqual(result.fragment, '')
result = netutils.urlsplit(
'rpc://user:pass@myhost/mypath?someparam#somefragment',
allow_fragments=False)
self.assertEqual(result.scheme, 'rpc')
self.assertEqual(result.netloc, 'user:pass@myhost')
self.assertEqual(result.path, '/mypath')
self.assertEqual(result.query, 'someparam#somefragment')
self.assertEqual(result.fragment, '')
def test_urlsplit_ipv6(self):
ipv6_url = 'http://[::1]:443/v2.0/'
result = netutils.urlsplit(ipv6_url)
self.assertEqual(result.scheme, 'http')
self.assertEqual(result.netloc, '[::1]:443')
self.assertEqual(result.path, '/v2.0/')
self.assertEqual(result.hostname, '::1')
self.assertEqual(result.port, 443)
ipv6_url = 'http://user:pass@[::1]/v2.0/'
result = netutils.urlsplit(ipv6_url)
self.assertEqual(result.scheme, 'http')
self.assertEqual(result.netloc, 'user:pass@[::1]')
self.assertEqual(result.path, '/v2.0/')
self.assertEqual(result.hostname, '::1')
self.assertIsNone(result.port)
ipv6_url = 'https://[2001:db8:85a3::8a2e:370:7334]:1234/v2.0/xy?ab#12'
result = netutils.urlsplit(ipv6_url)
self.assertEqual(result.scheme, 'https')
self.assertEqual(result.netloc, '[2001:db8:85a3::8a2e:370:7334]:1234')
self.assertEqual(result.path, '/v2.0/xy')
self.assertEqual(result.hostname, '2001:db8:85a3::8a2e:370:7334')
self.assertEqual(result.port, 1234)
self.assertEqual(result.query, 'ab')
self.assertEqual(result.fragment, '12')
def test_urlsplit_params(self):
test_url = "http://localhost/?a=b&c=d"
result = netutils.urlsplit(test_url)
self.assertEqual({'a': 'b', 'c': 'd'}, result.params())
self.assertEqual({'a': 'b', 'c': 'd'}, result.params(collapse=False))
test_url = "http://localhost/?a=b&a=c&a=d"
result = netutils.urlsplit(test_url)
self.assertEqual({'a': 'd'}, result.params())
self.assertEqual({'a': ['b', 'c', 'd']}, result.params(collapse=False))
test_url = "http://localhost"
result = netutils.urlsplit(test_url)
self.assertEqual({}, result.params())
test_url = "http://localhost?"
result = netutils.urlsplit(test_url)
self.assertEqual({}, result.params())
def test_set_tcp_keepalive(self):
mock_sock = mock.Mock()
netutils.set_tcp_keepalive(mock_sock, True, 100, 10, 5)
calls = [
mock.call.setsockopt(socket.SOL_SOCKET,
socket.SO_KEEPALIVE, True),
]
if hasattr(socket, 'TCP_KEEPIDLE'):
calls += [
mock.call.setsockopt(socket.IPPROTO_TCP,
socket.TCP_KEEPIDLE, 100)
]
if hasattr(socket, 'TCP_KEEPINTVL'):
calls += [
mock.call.setsockopt(socket.IPPROTO_TCP,
socket.TCP_KEEPINTVL, 10),
]
if hasattr(socket, 'TCP_KEEPCNT'):
calls += [
mock.call.setsockopt(socket.IPPROTO_TCP,
socket.TCP_KEEPCNT, 5)
]
mock_sock.assert_has_calls(calls)
mock_sock.reset_mock()
netutils.set_tcp_keepalive(mock_sock, False)
self.assertEqual(1, len(mock_sock.mock_calls))
@mock.patch.object(netutils, 'LOG', autospec=True)
def test_is_valid_ipv4(self, mock_log):
expected_log = 'Converting in non strict mode is deprecated. ' \
'You should pass strict=False if you want to preserve ' \
'legacy behavior'
self.assertTrue(netutils.is_valid_ipv4('42.42.42.42'))
self.assertFalse(netutils.is_valid_ipv4('-1.11.11.11'))
self.assertFalse(netutils.is_valid_ipv4(''))
self.assertTrue(netutils.is_valid_ipv4('10'))
mock_log.warn.assert_called_with(expected_log)
mock_log.reset_mock()
self.assertTrue(netutils.is_valid_ipv4('10.10'))
mock_log.warn.assert_called_with(expected_log)
mock_log.reset_mock()
self.assertTrue(netutils.is_valid_ipv4('10.10.10'))
mock_log.warn.assert_called_with(expected_log)
mock_log.reset_mock()
self.assertTrue(netutils.is_valid_ipv4('10.10.10.10'))
mock_log.warn.assert_not_called()
mock_log.reset_mock()
self.assertFalse(
netutils.is_valid_ipv4('10', strict=True)
)
self.assertFalse(
netutils.is_valid_ipv4('10.10', strict=True)
)
self.assertFalse(
netutils.is_valid_ipv4('10.10.10', strict=True)
)
mock_log.warn.assert_not_called()
mock_log.reset_mock()
self.assertTrue(
netutils.is_valid_ipv4('10', strict=False)
)
self.assertTrue(
netutils.is_valid_ipv4('10.10', strict=False)
)
self.assertTrue(
netutils.is_valid_ipv4('10.10.10', strict=False)
)
mock_log.warn.assert_not_called()
mock_log.reset_mock()
def test_is_valid_ipv6(self):
self.assertTrue(netutils.is_valid_ipv6('::1'))
self.assertTrue(netutils.is_valid_ipv6('fe80::1%eth0'))
self.assertFalse(netutils.is_valid_ip('fe%80::1%eth0'))
self.assertFalse(netutils.is_valid_ipv6(
'1fff::a88:85a3::172.31.128.1'))
self.assertFalse(netutils.is_valid_ipv6(''))
def test_escape_ipv6(self):
self.assertEqual('[1234::1234]', netutils.escape_ipv6('1234::1234'))
self.assertEqual('127.0.0.1', netutils.escape_ipv6('127.0.0.1'))
def test_is_valid_ip(self):
self.assertTrue(netutils.is_valid_ip('127.0.0.1'))
self.assertTrue(netutils.is_valid_ip('2001:db8::ff00:42:8329'))
self.assertTrue(netutils.is_valid_ip('fe80::1%eth0'))
self.assertFalse(netutils.is_valid_ip('256.0.0.0'))
self.assertFalse(netutils.is_valid_ip('::1.2.3.'))
self.assertFalse(netutils.is_valid_ip(''))
self.assertFalse(netutils.is_valid_ip(None))
def test_is_valid_mac(self):
self.assertTrue(netutils.is_valid_mac("52:54:00:cf:2d:31"))
self.assertTrue(netutils.is_valid_mac(u"52:54:00:cf:2d:31"))
self.assertFalse(netutils.is_valid_mac("127.0.0.1"))
self.assertFalse(netutils.is_valid_mac("not:a:mac:address"))
self.assertFalse(netutils.is_valid_mac("52-54-00-cf-2d-31"))
self.assertFalse(netutils.is_valid_mac("aa bb cc dd ee ff"))
self.assertTrue(netutils.is_valid_mac("AA:BB:CC:DD:EE:FF"))
self.assertFalse(netutils.is_valid_mac("AA BB CC DD EE FF"))
self.assertFalse(netutils.is_valid_mac("AA-BB-CC-DD-EE-FF"))
def test_is_valid_cidr(self):
self.assertTrue(netutils.is_valid_cidr('10.0.0.0/24'))
self.assertTrue(netutils.is_valid_cidr('10.0.0.1/32'))
self.assertTrue(netutils.is_valid_cidr('0.0.0.0/0'))
self.assertTrue(netutils.is_valid_cidr('2600::/64'))
self.assertTrue(netutils.is_valid_cidr(
'0000:0000:0000:0000:0000:0000:0000:0001/32'))
self.assertFalse(netutils.is_valid_cidr('10.0.0.1'))
self.assertFalse(netutils.is_valid_cidr('10.0.0.1/33'))
self.assertFalse(netutils.is_valid_cidr(10))
def test_is_valid_ipv6_cidr(self):
self.assertTrue(netutils.is_valid_ipv6_cidr("2600::/64"))
self.assertTrue(netutils.is_valid_ipv6_cidr(
"abcd:ef01:2345:6789:abcd:ef01:192.168.254.254/48"))
self.assertTrue(netutils.is_valid_ipv6_cidr(
"0000:0000:0000:0000:0000:0000:0000:0001/32"))
self.assertTrue(netutils.is_valid_ipv6_cidr(
"0000:0000:0000:0000:0000:0000:0000:0001"))
self.assertFalse(netutils.is_valid_ipv6_cidr("foo"))
self.assertFalse(netutils.is_valid_ipv6_cidr("127.0.0.1"))
def test_valid_port(self):
valid_inputs = [0, '0', 1, '1', 2, '3', '5', 8, 13, 21,
'80', '3246', '65535']
for input_str in valid_inputs:
self.assertTrue(netutils.is_valid_port(input_str))
def test_valid_port_fail(self):
invalid_inputs = ['-32768', '65536', 528491, '528491',
'528.491', 'thirty-seven', None]
for input_str in invalid_inputs:
self.assertFalse(netutils.is_valid_port(input_str))
def test_get_my_ip(self):
sock_attrs = {
'return_value.getsockname.return_value': ['1.2.3.4', '']}
with mock.patch('socket.socket', **sock_attrs):
addr = netutils.get_my_ipv4()
self.assertEqual(addr, '1.2.3.4')
def test_is_int_in_range(self):
valid_inputs = [(1, -100, 100),
('1', -100, 100),
(100, -100, 100),
('100', -100, 100),
(-100, -100, 100),
('-100', -100, 100)]
for input_value in valid_inputs:
self.assertTrue(netutils._is_int_in_range(*input_value))
def test_is_int_not_in_range(self):
invalid_inputs = [(None, 1, 100),
('ten', 1, 100),
(-1, 0, 255),
('None', 1, 100)]
for input_value in invalid_inputs:
self.assertFalse(netutils._is_int_in_range(*input_value))
def test_valid_icmp_type(self):
valid_inputs = [1, '1', 0, '0', 255, '255']
for input_value in valid_inputs:
self.assertTrue(netutils.is_valid_icmp_type(input_value))
def test_invalid_icmp_type(self):
invalid_inputs = [-1, '-1', 256, '256', None, 'None', 'five']
for input_value in invalid_inputs:
self.assertFalse(netutils.is_valid_icmp_type(input_value))
def test_valid_icmp_code(self):
valid_inputs = [1, '1', 0, '0', 255, '255', None]
for input_value in valid_inputs:
self.assertTrue(netutils.is_valid_icmp_code(input_value))
def test_invalid_icmp_code(self):
invalid_inputs = [-1, '-1', 256, '256', 'None', 'zero']
for input_value in invalid_inputs:
self.assertFalse(netutils.is_valid_icmp_code(input_value))
@mock.patch('socket.socket')
@mock.patch('oslo_utils.netutils._get_my_ipv4_address')
def test_get_my_ip_socket_error(self, ip, mock_socket):
mock_socket.side_effect = socket.error
ip.return_value = '1.2.3.4'
addr = netutils.get_my_ipv4()
self.assertEqual(addr, '1.2.3.4')
@mock.patch('netifaces.gateways')
@mock.patch('netifaces.ifaddresses')
def test_get_my_ipv4_address_with_default_route(
self, ifaddr, gateways):
with mock.patch.dict(netifaces.__dict__, {'AF_INET': '0'}):
ifaddr.return_value = {'0': [{'addr': '172.18.204.1'}]}
addr = netutils._get_my_ipv4_address()
self.assertEqual('172.18.204.1', addr)
@mock.patch('netifaces.gateways')
@mock.patch('netifaces.ifaddresses')
def test_get_my_ipv4_address_without_default_route(
self, ifaddr, gateways):
with mock.patch.dict(netifaces.__dict__, {'AF_INET': '0'}):
ifaddr.return_value = {}
addr = netutils._get_my_ipv4_address()
self.assertEqual('127.0.0.1', addr)
@mock.patch('netifaces.gateways')
@mock.patch('netifaces.ifaddresses')
def test_get_my_ipv4_address_without_default_interface(
self, ifaddr, gateways):
gateways.return_value = {}
addr = netutils._get_my_ipv4_address()
self.assertEqual('127.0.0.1', addr)
self.assertFalse(ifaddr.called)
class IPv6byEUI64TestCase(test_base.BaseTestCase):
"""Unit tests to generate IPv6 by EUI-64 operations."""
def test_generate_IPv6_by_EUI64(self):
addr = netutils.get_ipv6_addr_by_EUI64('2001:db8::',
'00:16:3e:33:44:55')
self.assertEqual('2001:db8::216:3eff:fe33:4455', addr.format())
def test_generate_IPv6_with_IPv4_prefix(self):
ipv4_prefix = '10.0.8'
mac = '00:16:3e:33:44:55'
self.assertRaises(ValueError, lambda:
netutils.get_ipv6_addr_by_EUI64(ipv4_prefix, mac))
def test_generate_IPv6_with_bad_mac(self):
bad_mac = '00:16:3e:33:44:5Z'
prefix = '2001:db8::'
self.assertRaises(ValueError, lambda:
netutils.get_ipv6_addr_by_EUI64(prefix, bad_mac))
def test_generate_IPv6_with_bad_prefix(self):
mac = '00:16:3e:33:44:55'
bad_prefix = 'bb'
self.assertRaises(ValueError, lambda:
netutils.get_ipv6_addr_by_EUI64(bad_prefix, mac))
def test_generate_IPv6_with_error_prefix_type(self):
mac = '00:16:3e:33:44:55'
prefix = 123
self.assertRaises(TypeError, lambda:
netutils.get_ipv6_addr_by_EUI64(prefix, mac))
def test_generate_IPv6_with_empty_prefix(self):
mac = '00:16:3e:33:44:55'
prefix = ''
self.assertRaises(ValueError, lambda:
netutils.get_ipv6_addr_by_EUI64(prefix, mac))
class MACbyIPv6TestCase(test_base.BaseTestCase):
"""Unit tests to extract MAC from IPv6."""
def test_reverse_generate_IPv6_by_EUI64(self):
self.assertEqual(
netaddr.EUI('00:16:3e:33:44:55'),
netutils.get_mac_addr_by_ipv6(
netaddr.IPAddress('2001:db8::216:3eff:fe33:4455')),
)
def test_random_qemu_mac(self):
self.assertEqual(
netaddr.EUI('52:54:00:42:02:19'),
netutils.get_mac_addr_by_ipv6(
netaddr.IPAddress('fe80::5054:ff:fe42:219')),
)
def test_local(self):
self.assertEqual(
netaddr.EUI('02:00:00:00:00:00'),
netutils.get_mac_addr_by_ipv6(
netaddr.IPAddress('fe80::ff:fe00:0')),
)
def test_universal(self):
self.assertEqual(
netaddr.EUI('00:00:00:00:00:00'),
netutils.get_mac_addr_by_ipv6(
netaddr.IPAddress('fe80::200:ff:fe00:0')),
)
@contextlib.contextmanager
def mock_file_content(content):
# Allows StringIO to act like a context manager-enabled file.
yield io.StringIO(content)
class TestIsIPv6Enabled(test_base.BaseTestCase):
def setUp(self):
super(TestIsIPv6Enabled, self).setUp()
def reset_detection_flag():
netutils._IS_IPV6_ENABLED = None
reset_detection_flag()
self.addCleanup(reset_detection_flag)
@mock.patch('os.path.exists', return_value=True)
@mock.patch('builtins.open', return_value=mock_file_content('0'))
def test_enabled(self, mock_open, exists):
enabled = netutils.is_ipv6_enabled()
self.assertTrue(enabled)
@mock.patch('os.path.exists', return_value=True)
@mock.patch('builtins.open', return_value=mock_file_content('1'))
def test_disabled(self, mock_open, exists):
enabled = netutils.is_ipv6_enabled()
self.assertFalse(enabled)
@mock.patch('os.path.exists', return_value=False)
@mock.patch('builtins.open',
side_effect=AssertionError('should not read'))
def test_disabled_non_exists(self, mock_open, exists):
enabled = netutils.is_ipv6_enabled()
self.assertFalse(enabled)
@mock.patch('os.path.exists', return_value=True)
def test_memoize_enabled(self, exists):
# Reset the flag to appear that we haven't looked for it yet.
netutils._IS_IPV6_ENABLED = None
with mock.patch('builtins.open',
return_value=mock_file_content('0')) as mock_open:
enabled = netutils.is_ipv6_enabled()
self.assertTrue(mock_open.called)
self.assertTrue(netutils._IS_IPV6_ENABLED)
self.assertTrue(enabled)
# The second call should not use open again
with mock.patch('builtins.open',
side_effect=AssertionError('should not be called')):
enabled = netutils.is_ipv6_enabled()
self.assertTrue(enabled)
@mock.patch('os.path.exists', return_value=True)
def test_memoize_disabled(self, exists):
# Reset the flag to appear that we haven't looked for it yet.
netutils._IS_IPV6_ENABLED = None
with mock.patch('builtins.open',
return_value=mock_file_content('1')):
enabled = netutils.is_ipv6_enabled()
self.assertFalse(enabled)
# The second call should not use open again
with mock.patch('builtins.open',
side_effect=AssertionError('should not be called')):
enabled = netutils.is_ipv6_enabled()
self.assertFalse(enabled)
@mock.patch('os.path.exists', return_value=False)
@mock.patch('builtins.open',
side_effect=AssertionError('should not read'))
def test_memoize_not_exists(self, mock_open, exists):
# Reset the flag to appear that we haven't looked for it yet.
netutils._IS_IPV6_ENABLED = None
enabled = netutils.is_ipv6_enabled()
self.assertFalse(enabled)
enabled = netutils.is_ipv6_enabled()
self.assertFalse(enabled)
| apache-2.0 | 6,529,047,190,910,266,000 | 38.964427 | 79 | 0.591435 | false |
spirali/shampoo | src/ui/editor.py | 1 | 6904 | #
# Copyright (C) 2014 Stanislav Bohm
#
# This file is part of Shampoo.
#
# Shampoo is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, version 3 of the License.
#
# Shampoo is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Shampoo. If not, see <http://www.gnu.org/licenses/>.
#
from base.pointers import make_pointer
from PyQt4.QtGui import QLineEdit, \
QFormLayout, \
QDoubleValidator, \
QGroupBox, \
QSpinBox, \
QHBoxLayout, \
QRegExpValidator, \
QComboBox, \
QCheckBox
import PyQt4.QtCore as QtCore
import logging
class EditorBase:
set_text = False
class FloatEditor(EditorBase):
def make_widget(self, pointer):
def value_changed(value):
try:
f = float(value)
except ValueError:
logging.debug("Float editor: Invalid value")
return
pointer.set(f)
#if call_update:
# owner.update()
widget = QLineEdit(str(pointer.get()))
widget.setValidator(QDoubleValidator())
widget.textEdited.connect(value_changed)
return widget
class StringEditor(EditorBase):
def __init__(self, regex=None, identifier=False):
if identifier:
assert not regex
regex=r'^[A-Za-z_][A-Za-z_\d]*$'
if regex:
self.regex = QtCore.QRegExp(regex)
else:
self.regex = None
def make_widget(self, pointer):
def value_changed(value):
pointer.set(value)
widget = QLineEdit(str(pointer.get()))
if self.regex is not None:
validator = QRegExpValidator(self.regex)
widget.setValidator(validator)
widget.textEdited.connect(value_changed)
return widget
class IntEditor(EditorBase):
def __init__(self, min_value, max_value):
self.min_value = min_value
self.max_value = max_value
def make_widget(self, pointer):
def value_changed(value):
pointer.set(value)
widget = QSpinBox()
widget.setMinimum(self.min_value)
widget.setMaximum(self.max_value)
widget.setValue(pointer.get())
widget.valueChanged.connect(value_changed)
return widget
class BoolEditor(EditorBase):
set_text = True
def __init__(self):
pass
def make_widget(self, pointer):
def value_changed(value):
pointer.set(value)
widget = QCheckBox()
#widget.valueChanged.connect(value_changed)
return widget
class ChooseEditor(EditorBase):
def __init__(self, options):
self.options = options
def make_widget(self, pointer):
widget = QComboBox()
for name, value in self.options:
widget.addItem(name)
values = [ value for name, value in self.options ]
widget.setCurrentIndex(values.index(pointer.get()))
return widget
class VertexEditor(EditorBase):
def __init__(self):
pass
def make_widget(self, pointer):
def value_changed():
try:
value_x = float(x.text())
value_y = float(y.text())
value_z = float(z.text())
except ValueError:
logging.debug("Float editor: Invalid value")
return
pointer.set((value_x, value_y, value_z))
vertex = pointer.get()
layout = QHBoxLayout()
x = QLineEdit(str(vertex[0]))
x.setValidator(QDoubleValidator())
x.textEdited.connect(value_changed)
layout.addWidget(x)
y = QLineEdit(str(vertex[1]))
y.setValidator(QDoubleValidator())
y.textEdited.connect(value_changed)
layout.addWidget(y)
z = QLineEdit(str(vertex[2]))
z.setValidator(QDoubleValidator())
z.textEdited.connect(value_changed)
layout.addWidget(z)
return layout
class Group:
def __init__(self, name):
self.name = name
self.items = []
def add(self, editor, name, attr, update_method):
self.items.append((editor, name, attr, update_method))
def make_widget(self, owner, layout):
def add_row(editor, name, attr, update_method):
if update_method:
update_callback = lambda: getattr(owner, update_method)()
else:
update_callback = None
pointer = make_pointer(owner, attr, update_callback)
widget = editor.make_widget(pointer)
if editor.set_text:
widget.setText(name)
form_layout.addRow(widget)
else:
form_layout.addRow(name, widget)
form_layout = QFormLayout()
box = QGroupBox(self.name);
layout.addWidget(box)
box.setLayout(form_layout)
for editor, name, attr, update_method in self.items:
add_row(editor, name, attr, update_method)
def add_float(self, name, attr, update_method="update"):
self.add(FloatEditor(), name, attr, update_method)
def add_int(self, name, attr, update_method="update", *args, **kw):
self.add(IntEditor(*args, **kw), name, attr, update_method)
def add_bool(self, name, attr, update_method="update", *args, **kw):
self.add(BoolEditor(*args, **kw), name, attr, update_method)
def add_vertex(self, name, attr, update_method="update", *args, **kw):
self.add(VertexEditor(*args, **kw), name, attr, update_method)
def add_string(self, name, attr, update_method="update", *args, **kw):
self.add(StringEditor(*args, **kw), name, attr, update_method)
def add_choose(self, name, attr, update_method="update", *args, **kw):
self.add(ChooseEditor(*args, **kw), name, attr, update_method)
class EditorBuilder:
def __init__(self):
self.groups = []
def add_group(self, name):
group = Group(name)
self.groups.append(group)
return group
"""
last = self.groups[-1]
if last.name is None and not last.items:
# Last group is empty default group, so we can remove it
self.groups = [ Group ]
else:
self.groups.append(group)
"""
def build(self, owner, layout):
for group in self.groups:
group.make_widget(owner, layout)
| gpl-3.0 | 8,687,659,416,699,172,000 | 28.630901 | 74 | 0.581547 | false |
alephu5/Soundbyte | environment/lib/python3.3/site-packages/sympy/solvers/tests/test_solvers.py | 1 | 55773 | from sympy import (
Abs, And, Derivative, Dummy, Eq, Float, Function, Gt, I, Integral,
LambertW, Lt, Matrix, Or, Piecewise, Poly, Q, Rational, S, Symbol,
Wild, acos, asin, atan, atanh, cos, cosh, diff, erf, erfinv, erfc,
erfcinv, erf2, erf2inv, exp, expand, im, log, pi, re, sec, sin,
sinh, solve, solve_linear, sqrt, sstr, symbols, sympify, tan, tanh,
root, simplify, atan2, arg, Mul, SparseMatrix)
from sympy.core.function import nfloat
from sympy.solvers import solve_linear_system, solve_linear_system_LU, \
solve_undetermined_coeffs
from sympy.solvers.solvers import _invert, unrad, checksol, posify, _ispow, \
det_quick, det_perm, det_minor
from sympy.polys.rootoftools import RootOf
from sympy.utilities.pytest import slow, XFAIL, raises, skip
from sympy.utilities.randtest import test_numerically as tn
from sympy.abc import a, b, c, d, k, h, p, x, y, z, t, q, m
def NS(e, n=15, **options):
return sstr(sympify(e).evalf(n, **options), full_prec=True)
def test_swap_back():
f, g = map(Function, 'fg')
fx, gx = f(x), g(x)
assert solve([fx + y - 2, fx - gx - 5], fx, y, gx) == \
{fx: gx + 5, y: -gx - 3}
assert solve(fx + gx*x - 2, [fx, gx]) == {fx: 2, gx: 0}
assert solve(fx + gx**2*x - y, [fx, gx]) == [{fx: y - gx**2*x}]
assert solve([f(1) - 2, x + 2]) == [{x: -2, f(1): 2}]
def guess_solve_strategy(eq, symbol):
try:
solve(eq, symbol)
return True
except (TypeError, NotImplementedError):
return False
def test_guess_poly():
# polynomial equations
assert guess_solve_strategy( S(4), x ) # == GS_POLY
assert guess_solve_strategy( x, x ) # == GS_POLY
assert guess_solve_strategy( x + a, x ) # == GS_POLY
assert guess_solve_strategy( 2*x, x ) # == GS_POLY
assert guess_solve_strategy( x + sqrt(2), x) # == GS_POLY
assert guess_solve_strategy( x + 2**Rational(1, 4), x) # == GS_POLY
assert guess_solve_strategy( x**2 + 1, x ) # == GS_POLY
assert guess_solve_strategy( x**2 - 1, x ) # == GS_POLY
assert guess_solve_strategy( x*y + y, x ) # == GS_POLY
assert guess_solve_strategy( x*exp(y) + y, x) # == GS_POLY
assert guess_solve_strategy(
(x - y**3)/(y**2*sqrt(1 - y**2)), x) # == GS_POLY
def test_guess_poly_cv():
# polynomial equations via a change of variable
assert guess_solve_strategy( sqrt(x) + 1, x ) # == GS_POLY_CV_1
assert guess_solve_strategy(
x**Rational(1, 3) + sqrt(x) + 1, x ) # == GS_POLY_CV_1
assert guess_solve_strategy( 4*x*(1 - sqrt(x)), x ) # == GS_POLY_CV_1
# polynomial equation multiplying both sides by x**n
assert guess_solve_strategy( x + 1/x + y, x ) # == GS_POLY_CV_2
def test_guess_rational_cv():
# rational functions
assert guess_solve_strategy( (x + 1)/(x**2 + 2), x) # == GS_RATIONAL
assert guess_solve_strategy(
(x - y**3)/(y**2*sqrt(1 - y**2)), y) # == GS_RATIONAL_CV_1
# rational functions via the change of variable y -> x**n
assert guess_solve_strategy( (sqrt(x) + 1)/(x**Rational(1, 3) + sqrt(x) + 1), x ) \
#== GS_RATIONAL_CV_1
def test_guess_transcendental():
#transcendental functions
assert guess_solve_strategy( exp(x) + 1, x ) # == GS_TRANSCENDENTAL
assert guess_solve_strategy( 2*cos(x) - y, x ) # == GS_TRANSCENDENTAL
assert guess_solve_strategy(
exp(x) + exp(-x) - y, x ) # == GS_TRANSCENDENTAL
assert guess_solve_strategy(3**x - 10, x) # == GS_TRANSCENDENTAL
assert guess_solve_strategy(-3**x + 10, x) # == GS_TRANSCENDENTAL
assert guess_solve_strategy(a*x**b - y, x) # == GS_TRANSCENDENTAL
def test_solve_args():
# implicit symbol to solve for
assert set(solve(x**2 - 4)) == set([S(2), -S(2)])
assert solve([x + y - 3, x - y - 5]) == {x: 4, y: -1}
assert solve(x - exp(x), x, implicit=True) == [exp(x)]
# no symbol to solve for
assert solve(42) == []
assert solve([1, 2]) == []
# duplicate symbols removed
assert solve((x - 3, y + 2), x, y, x) == {x: 3, y: -2}
# unordered symbols
# only 1
assert solve(y - 3, set([y])) == [3]
# more than 1
assert solve(y - 3, set([x, y])) == [{y: 3}]
# multiple symbols: take the first linear solution
assert solve(x + y - 3, [x, y]) == [{x: 3 - y}]
# unless it is an undetermined coefficients system
assert solve(a + b*x - 2, [a, b]) == {a: 2, b: 0}
assert solve(a*x**2 + b*x + c -
((x - h)**2 + 4*p*k)/4/p,
[h, p, k], exclude=[a, b, c], dict=True) == \
[{k: (4*a*c - b**2)/(4*a), h: -b/(2*a), p: 1/(4*a)}]
# failing undetermined system
assert solve(a*x + b**2/(x + 4) - 3*x - 4/x, a, b) == \
[{a: (-b**2*x + 3*x**3 + 12*x**2 + 4*x + 16)/(x**2*(x + 4))}]
# failed single equation
assert solve(1/(1/x - y + exp(y))) == []
raises(
NotImplementedError, lambda: solve(exp(x) + sin(x) + exp(y) + sin(y)))
# failed system
# -- when no symbols given, 1 fails
assert solve([y, exp(x) + x]) == [{x: -LambertW(1), y: 0}]
# both fail
assert solve(
(exp(x) - x, exp(y) - y)) == [{x: -LambertW(-1), y: -LambertW(-1)}]
# -- when symbols given
solve([y, exp(x) + x], x, y) == [(-LambertW(1), 0)]
# symbol is a number
assert solve(x**2 - pi, pi) == [x**2]
# no equations
assert solve([], [x]) == []
# overdetermined system
# - nonlinear
assert solve([(x + y)**2 - 4, x + y - 2]) == [{x: -y + 2}]
# - linear
assert solve((x + y - 2, 2*x + 2*y - 4)) == {x: -y + 2}
def test_solve_polynomial1():
assert solve(3*x - 2, x) == [Rational(2, 3)]
assert solve(Eq(3*x, 2), x) == [Rational(2, 3)]
assert set(solve(x**2 - 1, x)) == set([-S(1), S(1)])
assert set(solve(Eq(x**2, 1), x)) == set([-S(1), S(1)])
assert solve(x - y**3, x) == [y**3]
assert set(solve(x - y**3, y)) == set([
(-x**Rational(1, 3))/2 + I*sqrt(3)*x**Rational(1, 3)/2,
x**Rational(1, 3),
(-x**Rational(1, 3))/2 - I*sqrt(3)*x**Rational(1, 3)/2,
])
a11, a12, a21, a22, b1, b2 = symbols('a11,a12,a21,a22,b1,b2')
assert solve([a11*x + a12*y - b1, a21*x + a22*y - b2], x, y) == \
{
x: (a22*b1 - a12*b2)/(a11*a22 - a12*a21),
y: (a11*b2 - a21*b1)/(a11*a22 - a12*a21),
}
solution = {y: S.Zero, x: S.Zero}
assert solve((x - y, x + y), x, y ) == solution
assert solve((x - y, x + y), (x, y)) == solution
assert solve((x - y, x + y), [x, y]) == solution
assert set(solve(x**3 - 15*x - 4, x)) == set([
-2 + 3**Rational(1, 2),
S(4),
-2 - 3**Rational(1, 2)
])
assert set(solve((x**2 - 1)**2 - a, x)) == \
set([sqrt(1 + sqrt(a)), -sqrt(1 + sqrt(a)),
sqrt(1 - sqrt(a)), -sqrt(1 - sqrt(a))])
def test_solve_polynomial2():
assert solve(4, x) == []
def test_solve_polynomial_cv_1a():
"""
Test for solving on equations that can be converted to a polynomial equation
using the change of variable y -> x**Rational(p, q)
"""
assert solve( sqrt(x) - 1, x) == [1]
assert solve( sqrt(x) - 2, x) == [4]
assert solve( x**Rational(1, 4) - 2, x) == [16]
assert solve( x**Rational(1, 3) - 3, x) == [27]
assert solve(sqrt(x) + x**Rational(1, 3) + x**Rational(1, 4), x) == [0]
def test_solve_polynomial_cv_1b():
assert set(solve(4*x*(1 - a*sqrt(x)), x)) == set([S(0), 1/a**2])
assert set(solve(x * (x**(S(1)/3) - 3), x)) == set([S(0), S(27)])
def test_solve_polynomial_cv_2():
"""
Test for solving on equations that can be converted to a polynomial equation
multiplying both sides of the equation by x**m
"""
assert solve(x + 1/x - 1, x) in \
[[ Rational(1, 2) + I*sqrt(3)/2, Rational(1, 2) - I*sqrt(3)/2],
[ Rational(1, 2) - I*sqrt(3)/2, Rational(1, 2) + I*sqrt(3)/2]]
def test_quintics_1():
f = x**5 - 110*x**3 - 55*x**2 + 2310*x + 979
s = solve(f, check=False)
for root in s:
res = f.subs(x, root.n()).n()
assert tn(res, 0)
f = x**5 - 15*x**3 - 5*x**2 + 10*x + 20
s = solve(f)
for root in s:
assert root.func == RootOf
# if one uses solve to get the roots of a polynomial that has a RootOf
# solution, make sure that the use of nfloat during the solve process
# doesn't fail. Note: if you want numerical solutions to a polynomial
# it is *much* faster to use nroots to get them than to solve the
# equation only to get RootOf solutions which are then numerically
# evaluated. So for eq = x**5 + 3*x + 7 do Poly(eq).nroots() rather
# than [i.n() for i in solve(eq)] to get the numerical roots of eq.
assert nfloat(solve(x**5 + 3*x**3 + 7)[0], exponent=False) == \
RootOf(x**5 + 3*x**3 + 7, 0).n()
def test_highorder_poly():
# just testing that the uniq generator is unpacked
sol = solve(x**6 - 2*x + 2)
assert all(isinstance(i, RootOf) for i in sol) and len(sol) == 6
@XFAIL
@slow
def test_quintics_2():
f = x**5 + 15*x + 12
s = solve(f, check=False)
for root in s:
res = f.subs(x, root.n()).n()
assert tn(res, 0)
f = x**5 - 15*x**3 - 5*x**2 + 10*x + 20
s = solve(f)
for root in s:
assert root.func == RootOf
def test_solve_rational():
"""Test solve for rational functions"""
assert solve( ( x - y**3 )/( (y**2)*sqrt(1 - y**2) ), x) == [y**3]
def test_solve_nonlinear():
assert solve(x**2 - y**2, x, y) == [{x: -y}, {x: y}]
assert solve(x**2 - y**2/exp(x), x, y) == [{x: 2*LambertW(y/2)}]
assert solve(x**2 - y**2/exp(x), y, x) == [{y: -x*exp(x/2)}, {y: x*exp(x/2)}]
def test_issue_4129():
assert solve(4**(2*(x**2) + 2*x) - 8, x) == [-Rational(3, 2), S.Half]
def test_issue_4091():
assert solve(log(x-3) + log(x+3), x) == [sqrt(10)]
def test_linear_system():
x, y, z, t, n = symbols('x, y, z, t, n')
assert solve([x - 1, x - y, x - 2*y, y - 1], [x, y]) == []
assert solve([x - 1, x - y, x - 2*y, x - 1], [x, y]) == []
assert solve([x - 1, x - 1, x - y, x - 2*y], [x, y]) == []
assert solve([x + 5*y - 2, -3*x + 6*y - 15], x, y) == {x: -3, y: 1}
M = Matrix([[0, 0, n*(n + 1), (n + 1)**2, 0],
[n + 1, n + 1, -2*n - 1, -(n + 1), 0],
[-1, 0, 1, 0, 0]])
assert solve_linear_system(M, x, y, z, t) == \
{x: -t - t/n, z: -t - t/n, y: 0}
assert solve([x + y + z + t, -z - t], x, y, z, t) == {x: -y, z: -t}
def test_linear_system_function():
a = Function('a')
assert solve([a(0, 0) + a(0, 1) + a(1, 0) + a(1, 1), -a(1, 0) - a(1, 1)],
a(0, 0), a(0, 1), a(1, 0), a(1, 1)) == {a(1, 0): -a(1, 1), a(0, 0): -a(0, 1)}
def test_linear_systemLU():
n = Symbol('n')
M = Matrix([[1, 2, 0, 1], [1, 3, 2*n, 1], [4, -1, n**2, 1]])
assert solve_linear_system_LU(M, [x, y, z]) == {z: -3/(n**2 + 18*n),
x: 1 - 12*n/(n**2 + 18*n),
y: 6*n/(n**2 + 18*n)}
# Note: multiple solutions exist for some of these equations, so the tests
# should be expected to break if the implementation of the solver changes
# in such a way that a different branch is chosen
def test_tsolve():
assert solve(exp(x) - 3, x) == [log(3)]
assert set(solve((a*x + b)*(exp(x) - 3), x)) == set([-b/a, log(3)])
assert solve(cos(x) - y, x) == [-acos(y) + 2*pi, acos(y)]
assert solve(2*cos(x) - y, x) == [-acos(y/2) + 2*pi, acos(y/2)]
assert solve(Eq(cos(x), sin(x)), x) == [-3*pi/4, pi/4]
assert set(solve(exp(x) + exp(-x) - y, x)) in [set([
log(y/2 - sqrt(y**2 - 4)/2),
log(y/2 + sqrt(y**2 - 4)/2),
]), set([
log(y - sqrt(y**2 - 4)) - log(2),
log(y + sqrt(y**2 - 4)) - log(2)]),
set([
log(y/2 - sqrt((y - 2)*(y + 2))/2),
log(y/2 + sqrt((y - 2)*(y + 2))/2)])]
assert solve(exp(x) - 3, x) == [log(3)]
assert solve(Eq(exp(x), 3), x) == [log(3)]
assert solve(log(x) - 3, x) == [exp(3)]
assert solve(sqrt(3*x) - 4, x) == [Rational(16, 3)]
assert solve(3**(x + 2), x) == []
assert solve(3**(2 - x), x) == []
assert solve(x + 2**x, x) == [-LambertW(log(2))/log(2)]
ans = solve(3*x + 5 + 2**(-5*x + 3), x)
assert len(ans) == 1 and ans[0].expand() == \
-Rational(5, 3) + LambertW(-10240*2**(S(1)/3)*log(2)/3)/(5*log(2))
assert solve(5*x - 1 + 3*exp(2 - 7*x), x) == \
[Rational(1, 5) + LambertW(-21*exp(Rational(3, 5))/5)/7]
assert solve(2*x + 5 + log(3*x - 2), x) == \
[Rational(2, 3) + LambertW(2*exp(-Rational(19, 3))/3)/2]
assert solve(3*x + log(4*x), x) == [LambertW(Rational(3, 4))/3]
assert set(solve((2*x + 8)*(8 + exp(x)), x)) == set([S(-4), log(8) + pi*I])
eq = 2*exp(3*x + 4) - 3
ans = solve(eq, x) # this generated a failure in flatten
assert len(ans) == 3 and all(eq.subs(x, a).n(chop=True) == 0 for a in ans)
assert solve(2*log(3*x + 4) - 3, x) == [(exp(Rational(3, 2)) - 4)/3]
assert solve(exp(x) + 1, x) == [pi*I]
eq = 2*(3*x + 4)**5 - 6*7**(3*x + 9)
result = solve(eq, x)
ans = [(log(2401) + 5*LambertW(-log(7**(7*3**Rational(1, 5)/5))))/(3*log(7))/-1]
assert result == ans
# it works if expanded, too
assert solve(eq.expand(), x) == result
assert solve(z*cos(x) - y, x) == [-acos(y/z) + 2*pi, acos(y/z)]
assert solve(z*cos(2*x) - y, x) == [-acos(y/z)/2 + pi, acos(y/z)/2]
assert solve(z*cos(sin(x)) - y, x) == [
asin(acos(y/z) - 2*pi) + pi, -asin(acos(y/z)) + pi,
-asin(acos(y/z) - 2*pi), asin(acos(y/z))]
assert solve(z*cos(x), x) == [pi/2, 3*pi/2]
# issue #1409
assert solve(y - b*x/(a + x), x) in [[-a*y/(y - b)], [a*y/(b - y)]]
assert solve(y - b*exp(a/x), x) == [a/log(y/b)]
# issue #1408
assert solve(y - b/(1 + a*x), x) in [[(b - y)/(a*y)], [-((y - b)/(a*y))]]
# issue #1407
assert solve(y - a*x**b, x) == [(y/a)**(1/b)]
# issue #1406
assert solve(z**x - y, x) == [log(y)/log(z)]
# issue #1405
assert solve(2**x - 10, x) == [log(10)/log(2)]
# issue #3645
assert solve(x*y) == [{x: 0}, {y: 0}]
assert solve([x*y]) == [{x: 0}, {y: 0}]
assert solve(x**y - 1) == [{x: 1}, {y: 0}]
assert solve([x**y - 1]) == [{x: 1}, {y: 0}]
assert solve(x*y*(x**2 - y**2)) == [{x: 0}, {x: -y}, {x: y}, {y: 0}]
assert solve([x*y*(x**2 - y**2)]) == [{x: 0}, {x: -y}, {x: y}, {y: 0}]
#issue #1640
assert solve(exp(log(5)*x) - 2**x, x) == [0]
def test_solve_for_functions_derivatives():
t = Symbol('t')
x = Function('x')(t)
y = Function('y')(t)
a11, a12, a21, a22, b1, b2 = symbols('a11,a12,a21,a22,b1,b2')
soln = solve([a11*x + a12*y - b1, a21*x + a22*y - b2], x, y)
assert soln == {
x: (a22*b1 - a12*b2)/(a11*a22 - a12*a21),
y: (a11*b2 - a21*b1)/(a11*a22 - a12*a21),
}
assert solve(x - 1, x) == [1]
assert solve(3*x - 2, x) == [Rational(2, 3)]
soln = solve([a11*x.diff(t) + a12*y.diff(t) - b1, a21*x.diff(t) +
a22*y.diff(t) - b2], x.diff(t), y.diff(t))
assert soln == { y.diff(t): (a11*b2 - a21*b1)/(a11*a22 - a12*a21),
x.diff(t): (a22*b1 - a12*b2)/(a11*a22 - a12*a21) }
assert solve(x.diff(t) - 1, x.diff(t)) == [1]
assert solve(3*x.diff(t) - 2, x.diff(t)) == [Rational(2, 3)]
eqns = set((3*x - 1, 2*y - 4))
assert solve(eqns, set((x, y))) == { x: Rational(1, 3), y: 2 }
x = Symbol('x')
f = Function('f')
F = x**2 + f(x)**2 - 4*x - 1
assert solve(F.diff(x), diff(f(x), x)) == [(-x + 2)/f(x)]
# Mixed cased with a Symbol and a Function
x = Symbol('x')
y = Function('y')(t)
soln = solve([a11*x + a12*y.diff(t) - b1, a21*x +
a22*y.diff(t) - b2], x, y.diff(t))
assert soln == { y.diff(t): (a11*b2 - a21*b1)/(a11*a22 - a12*a21),
x: (a22*b1 - a12*b2)/(a11*a22 - a12*a21) }
def test_issue626():
f = Function('f')
F = x**2 + f(x)**2 - 4*x - 1
e = F.diff(x)
assert solve(e, f(x).diff(x)) in [[(2 - x)/f(x)], [-((x - 2)/f(x))]]
def test_issue771():
a, b, c, d = symbols('a b c d')
A = Matrix(2, 2, [a, b, c, d])
B = Matrix(2, 2, [0, 2, -3, 0])
C = Matrix(2, 2, [1, 2, 3, 4])
assert solve(A*B - C, [a, b, c, d]) == {a: 1, b: -S(1)/3, c: 2, d: -1}
assert solve([A*B - C], [a, b, c, d]) == {a: 1, b: -S(1)/3, c: 2, d: -1}
assert solve(Eq(A*B, C), [a, b, c, d]) == {a: 1, b: -S(1)/3, c: 2, d: -1}
assert solve([A*B - B*A], [a, b, c, d]) == {a: d, b: -S(2)/3*c}
assert solve([A*C - C*A], [a, b, c, d]) == {a: d - c, b: S(2)/3*c}
assert solve([A*B - B*A, A*C - C*A], [a, b, c, d]) == {a: d, b: 0, c: 0}
assert solve([Eq(A*B, B*A)], [a, b, c, d]) == {a: d, b: -S(2)/3*c}
assert solve([Eq(A*C, C*A)], [a, b, c, d]) == {a: d - c, b: S(2)/3*c}
assert solve([Eq(A*B, B*A), Eq(A*C, C*A)], [a, b, c, d]) == {a: d, b: 0, c: 0}
def test_solve_linear():
w = Wild('w')
assert solve_linear(x, x) == (0, 1)
assert solve_linear(x, y - 2*x) in [(x, y/3), (y, 3*x)]
assert solve_linear(x, y - 2*x, exclude=[x]) == (y, 3*x)
assert solve_linear(3*x - y, 0) in [(x, y/3), (y, 3*x)]
assert solve_linear(3*x - y, 0, [x]) == (x, y/3)
assert solve_linear(3*x - y, 0, [y]) == (y, 3*x)
assert solve_linear(x**2/y, 1) == (y, x**2)
assert solve_linear(w, x) in [(w, x), (x, w)]
assert solve_linear(cos(x)**2 + sin(x)**2 + 2 + y) == \
(y, -2 - cos(x)**2 - sin(x)**2)
assert solve_linear(cos(x)**2 + sin(x)**2 + 2 + y, symbols=[x]) == (0, 1)
assert solve_linear(Eq(x, 3)) == (x, 3)
assert solve_linear(1/(1/x - 2)) == (0, 0)
assert solve_linear((x + 1)*exp(-x), symbols=[x]) == (x + 1, exp(x))
assert solve_linear((x + 1)*exp(x), symbols=[x]) == ((x + 1)*exp(x), 1)
assert solve_linear(x*exp(-x**2), symbols=[x]) == (0, 0)
raises(ValueError, lambda: solve_linear(Eq(x, 3), 3))
def test_solve_undetermined_coeffs():
assert solve_undetermined_coeffs(a*x**2 + b*x**2 + b*x + 2*c*x + c + 1, [a, b, c], x) == \
{a: -2, b: 2, c: -1}
# Test that rational functions work
assert solve_undetermined_coeffs(a/x + b/(x + 1) - (2*x + 1)/(x**2 + x), [a, b], x) == \
{a: 1, b: 1}
# Test cancellation in rational functions
assert solve_undetermined_coeffs(((c + 1)*a*x**2 + (c + 1)*b*x**2 +
(c + 1)*b*x + (c + 1)*2*c*x + (c + 1)**2)/(c + 1), [a, b, c], x) == \
{a: -2, b: 2, c: -1}
def test_solve_inequalities():
system = [Lt(x**2 - 2, 0), Gt(x**2 - 1, 0)]
assert solve(system) == \
And(Or(And(Lt(-sqrt(2), re(x)), Lt(re(x), -1)),
And(Lt(1, re(x)), Lt(re(x), sqrt(2)))), Eq(im(x), 0))
assert solve(system, assume=Q.real(x)) == \
Or(And(Lt(-sqrt(2), x), Lt(x, -1)), And(Lt(1, x), Lt(x, sqrt(2))))
# issue 3528, 3448
assert solve((x - 3)/(x - 2) < 0, x, assume=Q.real(x)) == And(Lt(2, x), Lt(x, 3))
assert solve(x/(x + 1) > 1, x, assume=Q.real(x)) == Lt(x, -1)
def test_issue_1694():
assert solve(1/x) == []
assert solve(x*(1 - 5/x)) == [5]
assert solve(x + sqrt(x) - 2) == [1]
assert solve(-(1 + x)/(2 + x)**2 + 1/(2 + x)) == []
assert solve(-x**2 - 2*x + (x + 1)**2 - 1) == []
assert solve((x/(x + 1) + 3)**(-2)) == []
assert solve(x/sqrt(x**2 + 1), x) == [0]
assert solve(exp(x) - y, x) == [log(y)]
assert solve(exp(x)) == []
assert solve(x**2 + x + sin(y)**2 + cos(y)**2 - 1, x) in [[0, -1], [-1, 0]]
eq = 4*3**(5*x + 2) - 7
ans = solve(eq, x)
assert len(ans) == 5 and all(eq.subs(x, a).n(chop=True) == 0 for a in ans)
assert solve(log(x**2) - y**2/exp(x), x, y, set=True) == \
([y], set([
(-sqrt(exp(x)*log(x**2)),),
(sqrt(exp(x)*log(x**2)),)]))
assert solve(x**2*z**2 - z**2*y**2) == [{x: -y}, {x: y}, {z: 0}]
assert solve((x - 1)/(1 + 1/(x - 1))) == []
assert solve(x**(y*z) - x, x) == [1]
raises(NotImplementedError, lambda: solve(log(x) - exp(x), x))
raises(NotImplementedError, lambda: solve(2**x - exp(x) - 3))
def test_PR1964():
# 2072
assert solve(sqrt(x)) == solve(sqrt(x**3)) == [0]
assert solve(sqrt(x - 1)) == [1]
# 1363
a = Symbol('a')
assert solve(-3*a/sqrt(x), x) == []
# 1387
assert solve(2*x/(x + 2) - 1, x) == [2]
# 1397
assert set(solve((x**2/(7 - x)).diff(x))) == set([S(0), S(14)])
# 1596
f = Function('f')
assert solve((3 - 5*x/f(x))*f(x), f(x)) == [5*x/3]
# 1398
assert solve(1/(5 + x)**(S(1)/5) - 9, x) == [-295244/S(59049)]
assert solve(sqrt(x) + sqrt(sqrt(x)) - 4) == [-9*sqrt(17)/2 + 49*S.Half]
assert set(solve(Poly(sqrt(exp(x)) + sqrt(exp(-x)) - 4))) in \
[
set([2*log(-sqrt(3) + 2), 2*log(sqrt(3) + 2)]),
set([log(-4*sqrt(3) + 7), log(4*sqrt(3) + 7)]),
]
assert set(solve(Poly(exp(x) + exp(-x) - 4))) == \
set([log(-sqrt(3) + 2), log(sqrt(3) + 2)])
assert set(solve(x**y + x**(2*y) - 1, x)) == \
set([(-S.Half + sqrt(5)/2)**(1/y), (-S.Half - sqrt(5)/2)**(1/y)])
assert solve(exp(x/y)*exp(-z/y) - 2, y) == [(x - z)/log(2)]
assert solve(
x**z*y**z - 2, z) in [[log(2)/(log(x) + log(y))], [log(2)/(log(x*y))]]
# if you do inversion too soon then multiple roots as for the following will
# be missed, e.g. if exp(3*x) = exp(3) -> 3*x = 3
E = S.Exp1
assert set(solve(exp(3*x) - exp(3), x)) in [
set([S(1), log(-E/2 - sqrt(3)*E*I/2), log(-E/2 + sqrt(3)*E*I/2)]),
set([S(1), log(E*(-S(1)/2 - sqrt(3)*I/2)), log(E*(-S(1)/2 + sqrt(3)*I/2))]),
]
# coverage test
p = Symbol('p', positive=True)
assert solve((1/p + 1)**(p + 1)) == []
def test_issue_2098():
x = Symbol('x', real=True)
assert solve(x**2 + 1, x) == []
n = Symbol('n', integer=True, positive=True)
assert solve((n - 1)*(n + 2)*(2*n - 1), n) == [1]
x = Symbol('x', positive=True)
y = Symbol('y')
assert solve([x + 5*y - 2, -3*x + 6*y - 15], x, y) == []
# not {x: -3, y: 1} b/c x is positive
# The solution following should not contain (-sqrt(2), sqrt(2))
assert solve((x + y)*n - y**2 + 2, x, y) == [(sqrt(2), -sqrt(2))]
y = Symbol('y', positive=True)
# The solution following should not contain {y: -x*exp(x/2)}
assert solve(x**2 - y**2/exp(x), y, x) == [{y: x*exp(x/2)}]
assert solve(x**2 - y**2/exp(x), x, y) == [{x: 2*LambertW(y/2)}]
x, y, z = symbols('x y z', positive=True)
assert solve(z**2*x**2 - z**2*y**2/exp(x), y, x, z) == [{y: x*exp(x/2)}]
def test_checking():
assert set(
solve(x*(x - y/x), x, check=False)) == set([sqrt(y), S(0), -sqrt(y)])
assert set(solve(x*(x - y/x), x, check=True)) == set([sqrt(y), -sqrt(y)])
# {x: 0, y: 4} sets denominator to 0 in the following so system should return None
assert solve((1/(1/x + 2), 1/(y - 3) - 1)) == []
# 0 sets denominator of 1/x to zero so None is returned
assert solve(1/(1/x + 2)) == []
def test_issue_1572_1364_1368():
assert solve((sqrt(x**2 - 1) - 2)) in ([sqrt(5), -sqrt(5)],
[-sqrt(5), sqrt(5)])
assert set(solve((2**exp(y**2/x) + 2)/(x**2 + 15), y)) == set([
-sqrt(x)*sqrt(-log(log(2)) + log(log(2) + I*pi)),
sqrt(x)*sqrt(-log(log(2)) + log(log(2) + I*pi))])
C1, C2 = symbols('C1 C2')
f = Function('f')
assert solve(C1 + C2/x**2 - exp(-f(x)), f(x)) == [log(x**2/(C1*x**2 + C2))]
a = Symbol('a')
E = S.Exp1
assert solve(1 - log(a + 4*x**2), x) in (
[-sqrt(-a + E)/2, sqrt(-a + E)/2],
[sqrt(-a + E)/2, -sqrt(-a + E)/2]
)
assert solve(log(a**(-3) - x**2)/a, x) in (
[-sqrt(-1 + a**(-3)), sqrt(-1 + a**(-3))],
[sqrt(-1 + a**(-3)), -sqrt(-1 + a**(-3))],)
assert solve(1 - log(a + 4*x**2), x) in (
[-sqrt(-a + E)/2, sqrt(-a + E)/2],
[sqrt(-a + E)/2, -sqrt(-a + E)/2],)
assert set(solve((
a**2 + 1) * (sin(a*x) + cos(a*x)), x)) == set([-pi/(4*a), 3*pi/(4*a)])
assert solve(3 - (sinh(a*x) + cosh(a*x)), x) == [log(3)/a]
assert set(solve(3 - (sinh(a*x) + cosh(a*x)**2), x)) == \
set([log(-2 + sqrt(5))/a, log(-sqrt(2) + 1)/a,
log(-sqrt(5) - 2)/a, log(1 + sqrt(2))/a])
assert solve(atan(x) - 1) == [tan(1)]
def test_issue_2033():
r, t = symbols('r,t')
assert set(solve([r - x**2 - y**2, tan(t) - y/x], [x, y])) == \
set([
(-sqrt(r*tan(t)**2/(tan(t)**2 + 1))/tan(t),
-sqrt(r*tan(t)**2/(tan(t)**2 + 1))),
(sqrt(r*tan(t)**2/(tan(t)**2 + 1))/tan(t),
sqrt(r*tan(t)**2/(tan(t)**2 + 1)))])
assert solve([exp(x) - sin(y), 1/y - 3], [x, y]) == \
[(log(sin(S(1)/3)), S(1)/3)]
assert solve([exp(x) - sin(y), 1/exp(y) - 3], [x, y]) == \
[(log(-sin(log(3))), -log(3))]
assert set(solve([exp(x) - sin(y), y**2 - 4], [x, y])) == \
set([(log(-sin(2)), -S(2)), (log(sin(2)), S(2))])
eqs = [exp(x)**2 - sin(y) + z**2, 1/exp(y) - 3]
assert solve(eqs, set=True) == \
([x, y], set([
(log(-sqrt(-z**2 - sin(log(3)))), -log(3)),
(log(sqrt(-z**2 - sin(log(3)))), -log(3))]))
assert solve(eqs, x, z, set=True) == \
([x], set([
(log(-sqrt(-z**2 + sin(y))),),
(log(sqrt(-z**2 + sin(y))),)]))
assert set(solve(eqs, x, y)) == \
set([
(log(-sqrt(-z**2 - sin(log(3)))), -log(3)),
(log(sqrt(-z**2 - sin(log(3)))), -log(3))])
assert set(solve(eqs, y, z)) == \
set([
(-log(3), -sqrt(-exp(2*x) - sin(log(3)))),
(-log(3), sqrt(-exp(2*x) - sin(log(3))))])
eqs = [exp(x)**2 - sin(y) + z, 1/exp(y) - 3]
assert solve(eqs, set=True) == ([x, y], set(
[
(log(-sqrt(-z - sin(log(3)))), -log(3)),
(log(sqrt(-z - sin(log(3)))), -log(3))]))
assert solve(eqs, x, z, set=True) == ([x], set(
[
(log(-sqrt(-z + sin(y))),),
(log(sqrt(-z + sin(y))),)]))
assert set(solve(eqs, x, y)) == set(
[
(log(-sqrt(-z - sin(log(3)))), -log(3)),
(log(sqrt(-z - sin(log(3)))), -log(3))])
assert solve(eqs, z, y) == \
[(-exp(2*x) - sin(log(3)), -log(3))]
assert solve((sqrt(x**2 + y**2) - sqrt(10), x + y - 4), set=True) == (
[x, y], set([(S(1), S(3)), (S(3), S(1))]))
assert set(solve((sqrt(x**2 + y**2) - sqrt(10), x + y - 4), x, y)) == \
set([(S(1), S(3)), (S(3), S(1))])
def test_issue_2236():
lam, a0, conc = symbols('lam a0 conc')
eqs = [lam + 2*y - a0*(1 - x/2)*x - 0.005*x/2*x,
a0*(1 - x/2)*x - 1*y - 0.743436700916726*y,
x + y - conc]
sym = [x, y, a0]
# there are 4 solutions but only two are valid
assert len(solve(eqs, sym, manual=True, minimal=True, simplify=False)) == 2
def test_issue_2236_float():
skip("This test hangs.")
lam, a0, conc = symbols('lam a0 conc')
eqs = [lam + 2*y - a0*(1 - x/2)*x - 0.005*x/2*x,
a0*(1 - x/2)*x - 1*y - 0.743436700916726*y,
x + y - conc]
sym = [x, y, a0]
assert len(
solve(eqs, sym, rational=False, check=False, simplify=False)) == 2
def test_issue_2668():
assert set(solve([x**2 + y + 4], [x])) == \
set([(-sqrt(-y - 4),), (sqrt(-y - 4),)])
def test_polysys():
assert set(solve([x**2 + 2/y - 2, x + y - 3], [x, y])) == \
set([(S(1), S(2)), (1 + sqrt(5), 2 - sqrt(5)),
(1 - sqrt(5), 2 + sqrt(5))])
assert solve([x**2 + y - 2, x**2 + y]) == []
# the ordering should be whatever the user requested
assert solve([x**2 + y - 3, x - y - 4], (x, y)) != solve([x**2 +
y - 3, x - y - 4], (y, x))
def test_unrad():
s = symbols('s', cls=Dummy)
# checkers to deal with possibility of answer coming
# back with a sign change (cf issue 2104)
def check(rv, ans):
rv, ans = list(rv), list(ans)
rv[0] = rv[0].expand()
ans[0] = ans[0].expand()
return rv[0] in [ans[0], -ans[0]] and rv[1:] == ans[1:]
def s_check(rv, ans):
# get the dummy
rv = list(rv)
d = rv[0].atoms(Dummy)
reps = list(zip(d, [s]*len(d)))
# replace s with this dummy
rv = (rv[0].subs(reps).expand(), [(p[0].subs(reps), p[1].subs(reps))
for p in rv[1]],
[a.subs(reps) for a in rv[2]])
ans = (ans[0].subs(reps).expand(), [(p[0].subs(reps), p[1].subs(reps))
for p in ans[1]],
[a.subs(reps) for a in ans[2]])
return str(rv[0]) in [str(ans[0]), str(-ans[0])] and \
str(rv[1:]) == str(ans[1:])
assert check(unrad(sqrt(x)),
(x, [], []))
assert check(unrad(sqrt(x) + 1),
(x - 1, [], []))
assert s_check(unrad(sqrt(x) + x**Rational(1, 3) + 2),
(2 + s**2 + s**3, [(s, x - s**6)], []))
assert check(unrad(sqrt(x)*x**Rational(1, 3) + 2),
(x**5 - 64, [], []))
assert check(unrad(sqrt(x) + (x + 1)**Rational(1, 3)),
(x**3 - (x + 1)**2, [], []))
assert check(unrad(sqrt(x) + sqrt(x + 1) + sqrt(2*x)),
(-2*sqrt(2)*x - 2*x + 1, [], []))
assert check(unrad(sqrt(x) + sqrt(x + 1) + 2),
(16*x - 9, [], []))
assert check(unrad(sqrt(x) + sqrt(x + 1) + sqrt(1 - x)),
(-4*x + 5*x**2, [], []))
assert check(unrad(a*sqrt(x) + b*sqrt(x) + c*sqrt(y) + d*sqrt(y)),
((a*sqrt(x) + b*sqrt(x))**2 - (c*sqrt(y) + d*sqrt(y))**2, [], []))
assert check(unrad(sqrt(x) + sqrt(1 - x)),
(2*x - 1, [], []))
assert check(unrad(sqrt(x) + sqrt(1 - x) - 3),
(9*x + (x - 5)**2 - 9, [], []))
assert check(unrad(sqrt(x) + sqrt(1 - x) + sqrt(2 + x)),
(-5*x**2 + 2*x - 1, [], []))
assert check(unrad(sqrt(x) + sqrt(1 - x) + sqrt(2 + x) - 3),
(-25*x**4 - 376*x**3 - 1256*x**2 + 2272*x - 784, [], []))
assert check(unrad(sqrt(x) + sqrt(1 - x) + sqrt(2 + x) - sqrt(1 - 2*x)),
(-41*x**4 - 40*x**3 - 232*x**2 + 160*x - 16, [], []))
assert check(unrad(sqrt(x) + sqrt(x + 1)), (S(1), [], []))
eq = sqrt(x) + sqrt(x + 1) + sqrt(1 - sqrt(x))
assert check(unrad(eq),
(16*x**3 - 9*x**2, [], []))
assert set(solve(eq, check=False)) == set([S(0), S(9)/16])
assert solve(eq) == []
# but this one really does have those solutions
assert set(solve(sqrt(x) - sqrt(x + 1) + sqrt(1 - sqrt(x)))) == \
set([S.Zero, S(9)/16])
'''NOTE
real_root changes the value of the result if the solution is
simplified; `a` in the text below is the root that is not 4/5:
>>> eq
sqrt(x) + sqrt(-x + 1) + sqrt(x + 1) - 6*sqrt(5)/5
>>> eq.subs(x, a).n()
-0.e-123 + 0.e-127*I
>>> real_root(eq.subs(x, a)).n()
-0.e-123 + 0.e-127*I
>>> (eq.subs(x,simplify(a))).n()
-0.e-126
>>> real_root(eq.subs(x, simplify(a))).n()
0.194825975605452 + 2.15093623885838*I
>>> sqrt(x).subs(x, real_root(a)).n()
0.809823827278194 - 0.e-25*I
>>> sqrt(x).subs(x, (a)).n()
0.809823827278194 - 0.e-25*I
>>> sqrt(x).subs(x, simplify(a)).n()
0.809823827278194 - 5.32999467690853e-25*I
>>> sqrt(x).subs(x, real_root(simplify(a))).n()
0.49864610868139 + 1.44572604257047*I
'''
eq = (sqrt(x) + sqrt(x + 1) + sqrt(1 - x) - 6*sqrt(5)/5)
ra = S('''-1484/375 - 4*(-1/2 + sqrt(3)*I/2)*(-12459439/52734375 +
114*sqrt(12657)/78125)**(1/3) - 172564/(140625*(-1/2 +
sqrt(3)*I/2)*(-12459439/52734375 + 114*sqrt(12657)/78125)**(1/3))''')
rb = S(4)/5
ans = solve(sqrt(x) + sqrt(x + 1) + sqrt(1 - x) - 6*sqrt(5)/5)
assert all(abs(eq.subs(x, i).n()) < 1e-10 for i in (ra, rb)) and \
len(ans) == 2 and \
set([i.n(chop=True) for i in ans]) == \
set([i.n(chop=True) for i in (ra, rb)])
raises(ValueError, lambda:
unrad(-root(x,3)**2 + 2**pi*root(x,3) - x + 2**pi))
raises(ValueError, lambda:
unrad(sqrt(x) + sqrt(x + 1) + sqrt(1 - sqrt(x)) + 3))
raises(ValueError, lambda:
unrad(sqrt(x) + (x + 1)**Rational(1, 3) + 2*sqrt(y)))
# same as last but consider only y
assert check(unrad(sqrt(x) + (x + 1)**Rational(1, 3) + 2*sqrt(y), y),
(4*y - (sqrt(x) + (x + 1)**(S(1)/3))**2, [], []))
assert check(unrad(sqrt(x/(1 - x)) + (x + 1)**Rational(1, 3)),
(x**3/(-x + 1)**3 - (x + 1)**2, [], [(-x + 1)**3]))
# same as last but consider only y; no y-containing denominators now
assert s_check(unrad(sqrt(x/(1 - x)) + 2*sqrt(y), y),
(x/(-x + 1) - 4*y, [], []))
assert check(unrad(sqrt(x)*sqrt(1 - x) + 2, x),
(x*(-x + 1) - 4, [], []))
# http://tutorial.math.lamar.edu/
# Classes/Alg/SolveRadicalEqns.aspx#Solve_Rad_Ex2_a
assert solve(Eq(x, sqrt(x + 6))) == [3]
assert solve(Eq(x + sqrt(x - 4), 4)) == [4]
assert solve(Eq(1, x + sqrt(2*x - 3))) == []
assert set(solve(Eq(sqrt(5*x + 6) - 2, x))) == set([-S(1), S(2)])
assert set(solve(Eq(sqrt(2*x - 1) - sqrt(x - 4), 2))) == set([S(5), S(13)])
assert solve(Eq(sqrt(x + 7) + 2, sqrt(3 - x))) == [-6]
# http://www.purplemath.com/modules/solverad.htm
assert solve((2*x - 5)**Rational(1, 3) - 3) == [16]
assert solve((x**3 - 3*x**2)**Rational(1, 3) + 1 - x) == []
assert set(solve(x + 1 - (x**4 + 4*x**3 - x)**Rational(1, 4))) == \
set([-S(1)/2, -S(1)/3])
assert set(solve(sqrt(2*x**2 - 7) - (3 - x))) == set([-S(8), S(2)])
assert solve(sqrt(2*x + 9) - sqrt(x + 1) - sqrt(x + 4)) == [0]
assert solve(sqrt(x + 4) + sqrt(2*x - 1) - 3*sqrt(x - 1)) == [5]
assert solve(sqrt(x)*sqrt(x - 7) - 12) == [16]
assert solve(sqrt(x - 3) + sqrt(x) - 3) == [4]
assert solve(sqrt(9*x**2 + 4) - (3*x + 2)) == [0]
assert solve(sqrt(x) - 2 - 5) == [49]
assert solve(sqrt(x - 3) - sqrt(x) - 3) == []
assert solve(sqrt(x - 1) - x + 7) == [10]
assert solve(sqrt(x - 2) - 5) == [27]
assert solve(sqrt(17*x - sqrt(x**2 - 5)) - 7) == [3]
assert solve(sqrt(x) - sqrt(x - 1) + sqrt(sqrt(x))) == []
# don't posify the expression in unrad and use _mexpand
z = sqrt(2*x + 1)/sqrt(x) - sqrt(2 + 1/x)
p = posify(z)[0]
assert solve(p) == []
assert solve(z) == []
assert solve(z + 6*I) == [-S(1)/11]
assert solve(p + 6*I) == []
eq = sqrt(2 + I) + 2*I
assert unrad(eq - x, x, all=True) == (x**4 + 4*x**2 + 8*x + 37, [], [])
ans = (81*x**8 - 2268*x**6 - 4536*x**5 + 22644*x**4 + 63216*x**3 -
31608*x**2 - 189648*x + 141358, [], [])
r = sqrt(sqrt(2)/3 + 7)
eq = sqrt(r) + r - x
assert unrad(eq, all=1)
r2 = sqrt(sqrt(2) + 21)/sqrt(3)
assert r != r2 and r.equals(r2)
assert unrad(eq - r + r2, all=True) == ans
@slow
def test_unrad_slow():
ans = solve(sqrt(x) + sqrt(x + 1) -
sqrt(1 - x) - sqrt(2 + x))
assert len(ans) == 1 and NS(ans[0])[:4] == '0.73'
# the fence optimization problem
# http://code.google.com/p/sympy/issues/detail?id=1694#c159
F = Symbol('F')
eq = F - (2*x + 2*y + sqrt(x**2 + y**2))
X = solve(eq, x, hint='minimal')[0]
Y = solve((x*y).subs(x, X).diff(y), y, simplify=False, minimal=True)
ans = 2*F/7 - sqrt(2)*F/14
assert any((a - ans).expand().is_zero for a in Y)
eq = S('''
-x + (1/2 - sqrt(3)*I/2)*(3*x**3/2 - x*(3*x**2 - 34)/2 + sqrt((-3*x**3
+ x*(3*x**2 - 34) + 90)**2/4 - 39304/27) - 45)**(1/3) + 34/(3*(1/2 -
sqrt(3)*I/2)*(3*x**3/2 - x*(3*x**2 - 34)/2 + sqrt((-3*x**3 + x*(3*x**2
- 34) + 90)**2/4 - 39304/27) - 45)**(1/3))''')
raises(NotImplementedError, lambda: solve(eq)) # not other code errors
def test__invert():
assert _invert(x - 2) == (2, x)
assert _invert(2) == (2, 0)
assert _invert(exp(1/x) - 3, x) == (1/log(3), x)
assert _invert(exp(1/x + a/x) - 3, x) == ((a + 1)/log(3), x)
assert _invert(a, x) == (a, 0)
def test_issue_1364():
assert solve(-a*x + 2*x*log(x), x) == [exp(a/2)]
assert solve(a/x + exp(x/2), x) == [2*LambertW(-a/2)]
assert solve(x**x) == []
assert solve(x**x - 2) == [exp(LambertW(log(2)))]
assert solve(((x - 3)*(x - 2))**((x - 3)*(x - 4))) == [2]
assert solve(
(a/x + exp(x/2)).diff(x), x) == [4*LambertW(sqrt(2)*sqrt(a)/4)]
def test_issue_2015():
a, b, c, d, e, f, g, h, i, j, k, l, m, n, o, p, q, r = symbols('a:r')
# there is no 'a' in the equation set but this is how the
# problem was originally posed
syms = a, b, c, f, h, k, n
eqs = [b + r/d - c/d,
c*(1/d + 1/e + 1/g) - f/g - r/d,
f*(1/g + 1/i + 1/j) - c/g - h/i,
h*(1/i + 1/l + 1/m) - f/i - k/m,
k*(1/m + 1/o + 1/p) - h/m - n/p,
n*(1/p + 1/q) - k/p]
assert len(solve(eqs, syms, manual=True, check=False, simplify=False)) == 1
def test_misc():
# make sure that the right variables is picked up in tsolve
raises(NotImplementedError, lambda: solve((exp(x) + 1)**x))
def test_issue_2750():
I1, I2, I3, I4, I5, I6 = symbols('I1:7')
dI1, dI4, dQ2, dQ4, Q2, Q4 = symbols('dI1,dI4,dQ2,dQ4,Q2,Q4')
e = (
I1 - I2 - I3,
I3 - I4 - I5,
I4 + I5 - I6,
-I1 + I2 + I6,
-2*I1 - 2*I3 - 2*I5 - 3*I6 - dI1/2 + 12,
-I4 + dQ4,
-I2 + dQ2,
2*I3 + 2*I5 + 3*I6 - Q2,
I4 - 2*I5 + 2*Q4 + dI4
)
ans = [{
dQ4: I3 - I5,
dI1: -4*I2 - 8*I3 - 4*I5 - 6*I6 + 24,
I4: I3 - I5,
dQ2: I2,
Q2: 2*I3 + 2*I5 + 3*I6,
I1: I2 + I3,
Q4: -I3/2 + 3*I5/2 - dI4/2}]
assert solve(e, I1, I4, Q2, Q4, dI1, dI4, dQ2, dQ4, manual=True) == ans
# the matrix solver (tested below) doesn't like this because it produces
# a zero row in the matrix. Is this related to issue 1452?
assert [ei.subs(
ans[0]) for ei in e] == [0, 0, I3 - I6, -I3 + I6, 0, 0, 0, 0, 0]
def test_2750_matrix():
'''Same as test_2750 but solved with the matrix solver.'''
I1, I2, I3, I4, I5, I6 = symbols('I1:7')
dI1, dI4, dQ2, dQ4, Q2, Q4 = symbols('dI1,dI4,dQ2,dQ4,Q2,Q4')
e = (
I1 - I2 - I3,
I3 - I4 - I5,
I4 + I5 - I6,
-I1 + I2 + I6,
-2*I1 - 2*I3 - 2*I5 - 3*I6 - dI1/2 + 12,
-I4 + dQ4,
-I2 + dQ2,
2*I3 + 2*I5 + 3*I6 - Q2,
I4 - 2*I5 + 2*Q4 + dI4
)
assert solve(e, I1, I4, Q2, Q4, dI1, dI4, dQ2, dQ4) == {
dI4: -I3 + 3*I5 - 2*Q4,
dI1: -4*I2 - 8*I3 - 4*I5 - 6*I6 + 24,
dQ2: I2,
I1: I2 + I3,
Q2: 2*I3 + 2*I5 + 3*I6,
dQ4: I3 - I5,
I4: I3 - I5}
def test_issue_2802():
f, g, h = map(Function, 'fgh')
a = Symbol('a')
D = Derivative(f(x), x)
G = Derivative(g(a), a)
assert solve(f(x) + f(x).diff(x), f(x)) == \
[-D]
assert solve(f(x) - 3, f(x)) == \
[3]
assert solve(f(x) - 3*f(x).diff(x), f(x)) == \
[3*D]
assert solve([f(x) - 3*f(x).diff(x)], f(x)) == \
{f(x): 3*D}
assert solve([f(x) - 3*f(x).diff(x), f(x)**2 - y + 4], f(x), y) == \
[{f(x): 3*D, y: 9*D**2 + 4}]
assert solve(-f(a)**2*g(a)**2 + f(a)**2*h(a)**2 + g(a).diff(a),
h(a), g(a), set=True) == \
([g(a)], set([
(-sqrt(h(a)**2 + G/f(a)**2),),
(sqrt(h(a)**2 + G/f(a)**2),)]))
args = [f(x).diff(x, 2)*(f(x) + g(x)) - g(x)**2 + 2, f(x), g(x)]
assert set(solve(*args)) == \
set([(-sqrt(2), sqrt(2)), (sqrt(2), -sqrt(2))])
eqs = [f(x)**2 + g(x) - 2*f(x).diff(x), g(x)**2 - 4]
assert solve(eqs, f(x), g(x), set=True) == \
([f(x), g(x)], set([
(-sqrt(2*D - 2), S(2)),
(sqrt(2*D - 2), S(2)),
(-sqrt(2*D + 2), -S(2)),
(sqrt(2*D + 2), -S(2))]))
# the underlying problem was in solve_linear that was not masking off
# anything but a Mul or Add; it now raises an error if it gets anything
# but a symbol and solve handles the substitutions necessary so solve_linear
# won't make this error
raises(
ValueError, lambda: solve_linear(f(x) + f(x).diff(x), symbols=[f(x)]))
assert solve_linear(f(x) + f(x).diff(x), symbols=[x]) == \
(f(x) + Derivative(f(x), x), 1)
assert solve_linear(f(x) + Integral(x, (x, y)), symbols=[x]) == \
(f(x) + Integral(x, (x, y)), 1)
assert solve_linear(f(x) + Integral(x, (x, y)) + x, symbols=[x]) == \
(x + f(x) + Integral(x, (x, y)), 1)
assert solve_linear(f(y) + Integral(x, (x, y)) + x, symbols=[x]) == \
(x, -f(y) - Integral(x, (x, y)))
assert solve_linear(x - f(x)/a + (f(x) - 1)/a, symbols=[x]) == \
(x, 1/a)
assert solve_linear(x + Derivative(2*x, x)) == \
(x, -2)
assert solve_linear(x + Integral(x, y), symbols=[x]) == \
(x, 0)
assert solve_linear(x + Integral(x, y) - 2, symbols=[x]) == \
(x, 2/(y + 1))
assert set(solve(x + exp(x)**2, exp(x))) == \
set([-sqrt(-x), sqrt(-x)])
assert solve(x + exp(x), x, implicit=True) == \
[-exp(x)]
assert solve(cos(x) - sin(x), x, implicit=True) == []
assert solve(x - sin(x), x, implicit=True) == \
[sin(x)]
assert solve(x**2 + x - 3, x, implicit=True) == \
[-x**2 + 3]
assert solve(x**2 + x - 3, x**2, implicit=True) == \
[-x + 3]
def test_issue_2813():
assert set(solve(x**2 - x - 0.1, rational=True)) == \
set([S(1)/2 + sqrt(35)/10, -sqrt(35)/10 + S(1)/2])
# [-0.0916079783099616, 1.09160797830996]
ans = solve(x**2 - x - 0.1, rational=False)
assert len(ans) == 2 and all(a.is_Number for a in ans)
ans = solve(x**2 - x - 0.1)
assert len(ans) == 2 and all(a.is_Number for a in ans)
def test_float_handling():
def test(e1, e2):
return len(e1.atoms(Float)) == len(e2.atoms(Float))
assert solve(x - 0.5, rational=True)[0].is_Rational
assert solve(x - 0.5, rational=False)[0].is_Float
assert solve(x - S.Half, rational=False)[0].is_Rational
assert solve(x - 0.5, rational=None)[0].is_Float
assert solve(x - S.Half, rational=None)[0].is_Rational
assert test(nfloat(1 + 2*x), 1.0 + 2.0*x)
for contain in [list, tuple, set]:
ans = nfloat(contain([1 + 2*x]))
assert type(ans) is contain and test(list(ans)[0], 1.0 + 2.0*x)
k, v = list(nfloat({2*x: [1 + 2*x]}).items())[0]
assert test(k, 2*x) and test(v[0], 1.0 + 2.0*x)
assert test(nfloat(cos(2*x)), cos(2.0*x))
assert test(nfloat(3*x**2), 3.0*x**2)
assert test(nfloat(3*x**2, exponent=True), 3.0*x**2.0)
assert test(nfloat(exp(2*x)), exp(2.0*x))
assert test(nfloat(x/3), x/3.0)
assert test(nfloat(x**4 + 2*x + cos(S(1)/3) + 1),
x**4 + 2.0*x + 1.94495694631474)
# don't call nfloat if there is no solution
tot = 100 + c + z + t
assert solve(((.7 + c)/tot - .6, (.2 + z)/tot - .3, t/tot - .1)) == []
def test_check_assumptions():
x = symbols('x', positive=True)
assert solve(x**2 - 1) == [1]
def test_solve_abs():
assert set(solve(abs(x - 7) - 8)) == set([-S(1), S(15)])
r = symbols('r', real=True)
raises(NotImplementedError, lambda: solve(2*abs(r) - abs(r - 1)))
def test_issue_2957():
assert solve(tanh(x + 3)*tanh(x - 3) - 1) == []
assert set([simplify(w) for w in solve(tanh(x - 1)*tanh(x + 1) + 1)]) == set([
-log(2)/2 + log(1 - I),
-log(2)/2 + log(-1 - I),
-log(2)/2 + log(1 + I),
-log(2)/2 + log(-1 + I),])
assert set([simplify(w) for w in solve((tanh(x + 3)*tanh(x - 3) + 1)**2)]) == set([
-log(2)/2 + log(1 - I),
-log(2)/2 + log(-1 - I),
-log(2)/2 + log(1 + I),
-log(2)/2 + log(-1 + I),])
def test_issue_2961():
x = Symbol('x')
absxm3 = Piecewise(
(x - 3, S(0) <= x - 3),
(3 - x, S(0) > x - 3)
)
y = Symbol('y')
assert solve(absxm3 - y, x) == [
Piecewise((-y + 3, S(0) > -y), (S.NaN, True)),
Piecewise((y + 3, S(0) <= y), (S.NaN, True))
]
y = Symbol('y', positive=True)
assert solve(absxm3 - y, x) == [-y + 3, y + 3]
def test_issue_2574():
eq = -x + exp(exp(LambertW(log(x)))*LambertW(log(x)))
assert checksol(eq, x, 2) is True
assert checksol(eq, x, 2, numerical=False) is None
def test_exclude():
R, C, Ri, Vout, V1, Vminus, Vplus, s = \
symbols('R, C, Ri, Vout, V1, Vminus, Vplus, s')
Rf = symbols('Rf', positive=True) # to eliminate Rf = 0 soln
eqs = [C*V1*s + Vplus*(-2*C*s - 1/R),
Vminus*(-1/Ri - 1/Rf) + Vout/Rf,
C*Vplus*s + V1*(-C*s - 1/R) + Vout/R,
-Vminus + Vplus]
assert solve(eqs, exclude=s*C*R) == [
{
Rf: Ri*(C*R*s + 1)**2/(C*R*s),
Vminus: Vplus,
V1: Vplus*(2*C*R*s + 1)/(C*R*s),
Vout: Vplus*(C**2*R**2*s**2 + 3*C*R*s + 1)/(C*R*s)},
{
Vplus: 0,
Vminus: 0,
V1: 0,
Vout: 0},
]
# TODO: Investingate why currently solution [0] is preferred over [1].
assert solve(eqs, exclude=[Vplus, s, C]) in [[{
Vminus: Vplus,
V1: Vout/2 + Vplus/2 + sqrt((Vout - 5*Vplus)*(Vout - Vplus))/2,
R: (Vout - 3*Vplus - sqrt(Vout**2 - 6*Vout*Vplus + 5*Vplus**2))/(2*C*Vplus*s),
Rf: Ri*(Vout - Vplus)/Vplus,
}, {
Vminus: Vplus,
V1: Vout/2 + Vplus/2 - sqrt((Vout - 5*Vplus)*(Vout - Vplus))/2,
R: (Vout - 3*Vplus + sqrt(Vout**2 - 6*Vout*Vplus + 5*Vplus**2))/(2*C*Vplus*s),
Rf: Ri*(Vout - Vplus)/Vplus,
}], [{
Vminus: Vplus,
Vout: (V1**2 - V1*Vplus - Vplus**2)/(V1 - 2*Vplus),
Rf: Ri*(V1 - Vplus)**2/(Vplus*(V1 - 2*Vplus)),
R: Vplus/(C*s*(V1 - 2*Vplus)),
}]]
def test_high_order_roots():
s = x**5 + 4*x**3 + 3*x**2 + S(7)/4
assert set(solve(s)) == set(Poly(s*4, domain='ZZ').all_roots())
def test_minsolve_linear_system():
def count(dic):
return len([x for x in dic.values() if x == 0])
assert count(solve([x + y + z, y + z + a + t], particular=True, quick=True)) \
== 3
assert count(solve([x + y + z, y + z + a + t], particular=True, quick=False)) \
== 3
assert count(solve([x + y + z, y + z + a], particular=True, quick=True)) == 1
assert count(solve([x + y + z, y + z + a], particular=True, quick=False)) == 2
def test_real_roots():
# cf. issue 3551
x = Symbol('x', real=True)
assert len(solve(x**5 + x**3 + 1)) == 1
@slow
def test_issue3429():
eqs = [
327600995*x**2 - 37869137*x + 1809975124*y**2 - 9998905626,
895613949*x**2 - 273830224*x*y + 530506983*y**2 - 10000000000]
assert len(solve(eqs, y, x)) == len(solve(eqs, y, x, manual=True)) == 4
def test_overdetermined():
eqs = [Abs(4*x - 7) - 5, Abs(3 - 8*x) - 1]
assert solve(eqs, x) == [(S.Half,)]
assert solve(eqs, x, manual=True) == [(S.Half,)]
assert solve(eqs, x, manual=True, check=False) == [(S.Half/2,), (S.Half,)]
def test_issue_3506():
x = symbols('x')
assert solve(4**(x/2) - 2**(x/3)) == [0]
# while the first one passed, this one failed
x = symbols('x', real=True)
assert solve(5**(x/2) - 2**(x/3)) == [0]
b = sqrt(6)*sqrt(log(2))/sqrt(log(5))
assert solve(5**(x/2) - 2**(3/x)) == [-b, b]
def test__ispow():
assert _ispow(x**2)
assert not _ispow(x)
assert not _ispow(True)
def test_issue_3545():
eq = -sqrt((m - q)**2 + (-m/(2*q) + S(1)/2)**2) + sqrt((-m**2/2 - sqrt(
4*m**4 - 4*m**2 + 8*m + 1)/4 - S(1)/4)**2 + (m**2/2 - m - sqrt(
4*m**4 - 4*m**2 + 8*m + 1)/4 - S(1)/4)**2)
assert solve(eq, q) == [
m**2/2 - sqrt(4*m**4 - 4*m**2 + 8*m + 1)/4 - S(1)/4,
m**2/2 + sqrt(4*m**4 - 4*m**2 + 8*m + 1)/4 - S(1)/4]
def test_issue_3653():
assert solve([a**2 + a, a - b], [a, b]) == [(-1, -1), (0, 0)]
assert solve([a**2 + a*c, a - b], [a, b]) == [(0, 0), (-c, -c)]
def test_issue_3693():
assert solve(x*(x - 1)**2*(x + 1)*(x**6 - x + 1)) == [
-1, 0, 1, RootOf(x**6 - x + 1, 0), RootOf(x**6 - x + 1, 1),
RootOf(x**6 - x + 1, 2), RootOf(x**6 - x + 1, 3), RootOf(x**6 - x + 1, 4),
RootOf(x**6 - x + 1, 5)]
def test_issues_3720_3721_3722_3149():
# 3722
x, y = symbols('x y')
assert solve(abs(x + 3) - 2*abs(x - 3)) == [1, 9]
assert solve([abs(x) - 2, arg(x) - pi], x) == [
{re(x): -2, x: -2, im(x): 0}, {re(x): 2, x: 2, im(x): 0}]
assert solve([re(x) - 1, im(x) - 2], x) == [
{re(x): 1, x: 1 + 2*I, im(x): 2}]
w = symbols('w', integer=True)
assert solve(2*x**w - 4*y**w, w) == solve((x/y)**w - 2, w)
x, y = symbols('x y', real=True)
assert solve(x + y*I + 3) == {y: 0, x: -3}
# github issue 2642
assert solve(x*(1 + I)) == [0]
x, y = symbols('x y', imaginary=True)
assert solve(x + y*I + 3 + 2*I) == {x: -2*I, y: 3*I}
x = symbols('x', real=True)
assert solve(x + y + 3 + 2*I) == {x: -3, y: -2*I}
# issue 3149
f = Function('f')
assert solve(f(x + 1) - f(2*x - 1)) == [2]
assert solve(log(x + 1) - log(2*x - 1)) == [2]
x = symbols('x')
assert solve(2**x + 4**x) == [I*pi/log(2)]
def test_issue_3890():
f = Function('f')
assert solve(Eq(-f(x), Piecewise((1, x > 0), (0, True))), f(x)) == \
[Piecewise((-1, x > 0), (0, True))]
def test_lambert_multivariate():
from sympy.abc import a, x, y
from sympy.solvers.bivariate import _filtered_gens, _lambert, _solve_lambert
assert _filtered_gens(Poly(x + 1/x + exp(x) + y), x) == set([x, exp(x)])
assert _lambert(x, x) == []
assert solve((x**2 - 2*x + 1).subs(x, log(x) + 3*x)) == [LambertW(3*S.Exp1)/3]
assert solve((x**2 - 2*x + 1).subs(x, (log(x) + 3*x)**2 - 1)) == \
[LambertW(3*exp(-sqrt(2)))/3, LambertW(3*exp(sqrt(2)))/3]
assert solve((x**2 - 2*x - 2).subs(x, log(x) + 3*x)) == \
[LambertW(3*exp(1 + sqrt(3)))/3, LambertW(3*exp(-sqrt(3) + 1))/3]
assert solve(x*log(x) + 3*x + 1, x) == [exp(-3 + LambertW(-exp(3)))]
eq = (x*exp(x) - 3).subs(x, x*exp(x))
assert solve(eq) == [LambertW(3*exp(-LambertW(3)))]
# coverage test
raises(NotImplementedError, lambda: solve(x - sin(x)*log(y - x), x))
# if sign is unknown then only this one solution is obtained
assert solve(3*log(a**(3*x + 5)) + a**(3*x + 5), x) == [
-((log(a**5) + LambertW(S(1)/3))/(3*log(a)))] # tested numerically
p = symbols('p', positive=True)
assert solve(3*log(p**(3*x + 5)) + p**(3*x + 5), x) == [
log((-3**(S(1)/3) - 3**(S(5)/6)*I)*LambertW(S(1)/3)**(S(1)/3)/(2*p**(S(5)/3)))/log(p),
log((-3**(S(1)/3) + 3**(S(5)/6)*I)*LambertW(S(1)/3)**(S(1)/3)/(2*p**(S(5)/3)))/log(p),
log((3*LambertW(S(1)/3)/p**5)**(1/(3*log(p)))),] # checked numerically
# check collection
assert solve(3*log(a**(3*x + 5)) + b*log(a**(3*x + 5)) + a**(3*x + 5), x) == [
-((log(a**5) + LambertW(1/(b + 3)))/(3*log(a)))]
eq = 4*2**(2*p + 3) - 2*p - 3
assert _solve_lambert(eq, p, _filtered_gens(Poly(eq), p)) == [
-S(3)/2 - LambertW(-4*log(2))/(2*log(2))]
# issue 1172
assert solve((a/x + exp(x/2)).diff(x, 2), x) == [
6*LambertW((-1)**(S(1)/3)*a**(S(1)/3)/3)]
assert solve((log(x) + x).subs(x, x**2 + 1)) == [
-I*sqrt(-LambertW(1) + 1), sqrt(-1 + LambertW(1))]
# these only give one of the solutions (see XFAIL below)
assert solve(x**3 - 3**x, x) == [-3/log(3)*LambertW(-log(3)/3)]
# replacing 3 with 2 in the above solution gives 2
assert solve(x**2 - 2**x, x) == [2]
assert solve(-x**2 + 2**x, x) == [2]
assert solve(3**cos(x) - cos(x)**3) == [
acos(-3*LambertW(-log(3)/3)/log(3))]
@XFAIL
def test_other_lambert():
from sympy.abc import x
assert solve(3*sin(x) - x*sin(3), x) == [3]
assert set(solve(3*log(x) - x*log(3))) == set(
[3, -3*LambertW(-log(3)/3)/log(3)])
a = S(6)/5
assert set(solve(x**a - a**x)) == set(
[a, -a*LambertW(-log(a)/a)/log(a)])
assert set(solve(3**cos(x) - cos(x)**3)) == set(
[acos(3), acos(-3*LambertW(-log(3)/3)/log(3))])
assert set(solve(x**2 - 2**x)) == set(
[2, -2/log(2)*LambertW(log(2)/2)])
def test_rewrite_trig():
assert solve(sin(x) + tan(x)) == [0, 2*pi]
assert solve(sin(x) + sec(x)) == [
-2*atan(-S.Half + sqrt(2 - 2*sqrt(3)*I)/2 + sqrt(3)*I/2),
2*atan(S.Half - sqrt(3)*I/2 + sqrt(2 - 2*sqrt(3)*I)/2),
2*atan(S.Half - sqrt(2 + 2*sqrt(3)*I)/2 + sqrt(3)*I/2),
2*atan(S.Half + sqrt(2 + 2*sqrt(3)*I)/2 + sqrt(3)*I/2)]
assert solve(sinh(x) + tanh(x)) == [0, I*pi]
@XFAIL
def test_rewrite_trigh():
# if this import passes then the test below should also pass
from sympy import sech
assert solve(sinh(x) + sech(x)) == [
2*atanh(-S.Half + sqrt(5)/2 - sqrt(-2*sqrt(5) + 2)/2),
2*atanh(-S.Half + sqrt(5)/2 + sqrt(-2*sqrt(5) + 2)/2),
2*atanh(-sqrt(5)/2 - S.Half + sqrt(2 + 2*sqrt(5))/2),
2*atanh(-sqrt(2 + 2*sqrt(5))/2 - sqrt(5)/2 - S.Half)]
def test_uselogcombine():
eq = z - log(x) + log(y/(x*(-1 + y**2/x**2)))
assert solve(eq, x, force=True) == [-sqrt(y*(y - exp(z))), sqrt(y*(y - exp(z)))]
assert solve(log(x + 3) + log(1 + 3/x) - 3) == [
-3 + sqrt(-12 + exp(3))*exp(S(3)/2)/2 + exp(3)/2,
-sqrt(-12 + exp(3))*exp(S(3)/2)/2 - 3 + exp(3)/2]
def test_atan2():
assert solve(atan2(x, 2) - pi/3, x) == [2*sqrt(3)]
def test_errorinverses():
assert solve(erf(x)-y,x)==[erfinv(y)]
assert solve(erfinv(x)-y,x)==[erf(y)]
assert solve(erfc(x)-y,x)==[erfcinv(y)]
assert solve(erfcinv(x)-y,x)==[erfc(y)]
def test_misc():
# shouldn't generate a GeneratorsNeeded error in _tsolve when the NaN is generated
# for eq_down. Actual answers, as determined numerically are approx. +/- 0.83
assert solve(sinh(x)*sinh(sinh(x)) + cosh(x)*cosh(sinh(x)) - 3) is not None
# watch out for recursive loop in tsolve
raises(NotImplementedError, lambda: solve((x+2)**y*x-3,x))
def test_gh2725():
R = Symbol('R')
eq = sqrt(2)*R*sqrt(1/(R + 1)) + (R + 1)*(sqrt(2)*sqrt(1/(R + 1)) - 1)
sol = solve(eq, R, set=True)[1]
assert sol == set([(S(5)/3 + 40/(3*(251 + 3*sqrt(111)*I)**(S(1)/3)) +
(251 + 3*sqrt(111)*I)**(S(1)/3)/3,), ((-160 + (1 +
sqrt(3)*I)*(10 - (1 + sqrt(3)*I)*(251 +
3*sqrt(111)*I)**(S(1)/3))*(251 +
3*sqrt(111)*I)**(S(1)/3))/Mul(6, (1 +
sqrt(3)*I), (251 + 3*sqrt(111)*I)**(S(1)/3),
evaluate=False),)])
def test_issue_2015_3512():
# See that it doesn't hang; this solves in about 2 seconds.
# Also check that the solution is relatively small.
# Note: the system in issue 3512 solves in about 5 seconds and has
# an op-count of 138336 (with simplify=False).
b, c, d, e, f, g, h, i, j, k, l, m, n, o, p, q, r = symbols('b:r')
eqs = Matrix([
[b - c/d + r/d], [c*(1/g + 1/e + 1/d) - f/g - r/d],
[-c/g + f*(1/j + 1/i + 1/g) - h/i], [-f/i + h*(1/m + 1/l + 1/i) - k/m],
[-h/m + k*(1/p + 1/o + 1/m) - n/p], [-k/p + n*(1/q + 1/p)]])
v = Matrix([f, h, k, n, b, c])
ans = solve(list(eqs) , list(v), simplify=False)
# If time is taken to simplify then then 2617 below becomes
# 1168 and the time is about 50 seconds instead of 2.
assert sum([s.count_ops() for s in ans.values()]) <= 2617
def test_det_quick():
m = Matrix(3, 3, symbols('a:9'))
assert m.det() == det_quick(m) # calls det_perm
m[0, 0] = 1
assert m.det() == det_quick(m) # calls det_minor
m = Matrix(3, 3, list(range(9)))
assert m.det() == det_quick(m) # defaults to .det()
# make sure they work with Sparse
s = SparseMatrix(2, 2, (1, 2, 1, 4))
assert det_perm(s) == det_minor(s) == s.det()
| gpl-3.0 | -7,980,767,662,230,217,000 | 37.597232 | 94 | 0.481523 | false |
treehopper-electronics/treehopper-sdk | Python/treehopper/libraries/sensors/inertial/bno055.py | 1 | 4767 | from time import sleep
from typing import List
from treehopper.api import I2C
from treehopper.libraries import SMBusDevice
from treehopper.libraries.sensors.inertial.bno055_registers import Bno055Registers, OperatingModes, PowerModes
from treehopper.libraries.sensors.inertial import Accelerometer, Gyroscope
from treehopper.libraries.sensors.magnetic.magnetometer import Magnetometer
from treehopper.libraries.sensors.temperature import TemperatureSensor
class Bno055(Accelerometer, Gyroscope, Magnetometer, TemperatureSensor):
"""Bosch BNO055 9-axis IMU with absolute orientation output"""
@staticmethod
def probe(i2c: I2C, rate=100) -> List['Bno055']:
devs = [] # type: List['Bno055']
try:
dev = SMBusDevice(0x28, i2c, rate)
who_am_i = dev.read_byte_data(0x00)
if who_am_i == 0xa0:
devs.append(Bno055(i2c, False, rate))
except RuntimeError:
pass
try:
dev = SMBusDevice(0x29, i2c, rate)
who_am_i = dev.read_byte_data(0x00)
if who_am_i == 0xa0:
devs.append(Bno055(i2c, False, rate))
except RuntimeError:
pass
return devs
def __init__(self, i2c: I2C, alt_address=False, rate=100):
super().__init__()
self._linear_acceleration = [0, 0, 0]
self._quaternion = [0, 0, 0, 0]
self._gravity = [0, 0, 0]
self._eular_angles = [0, 0, 0]
if alt_address:
dev = SMBusDevice(0x29, i2c, rate)
else:
dev = SMBusDevice(0x28, i2c, rate)
self._registers = Bno055Registers(dev)
self._registers.operatingMode.operatingMode = OperatingModes.ConfigMode
self._registers.operatingMode.write()
self._registers.sysTrigger.resetSys = 1
self._registers.sysTrigger.write()
self._registers.sysTrigger.resetSys = 0
dev_id = 0
while dev_id != 0xA0:
try:
self._registers.chipId.read()
dev_id = self._registers.chipId.value
except RuntimeError:
pass
sleep(0.05)
self._registers.powerMode.powerMode = PowerModes.Normal
self._registers.powerMode.write()
sleep(0.01)
self._registers.sysTrigger.selfTest = 0
self._registers.sysTrigger.write()
sleep(0.01)
self._registers.operatingMode.operatingMode = OperatingModes.NineDegreesOfFreedom
self._registers.operatingMode.write()
sleep(0.02)
@property
def linear_acceleration(self):
if self.auto_update_when_property_read:
self.update()
return self._linear_acceleration
@property
def gravity(self):
if self.auto_update_when_property_read:
self.update()
return self._gravity
@property
def eular_angles(self):
if self.auto_update_when_property_read:
self.update()
return self._eular_angles
@property
def quaternion(self):
if self.auto_update_when_property_read:
self.update()
return self._quaternion
def update(self):
self._registers.readRange(self._registers.accelX, self._registers.temp)
self._accelerometer = [self._registers.accelX.value / 16,
self._registers.accelY.value / 16,
self._registers.accelZ.value / 16]
self._magnetometer = [self._registers.magnetometerX.value / 16,
self._registers.magnetometerY.value / 16,
self._registers.magnetometerZ.value / 16]
self._gyroscope = [self._registers.gyroX.value / 16,
self._registers.gyroY.value / 16,
self._registers.gyroZ.value / 16]
self._linear_acceleration = [self._registers.linX.value / 100,
self._registers.linY.value / 100,
self._registers.linZ.value / 100]
self._gravity = [self._registers.gravX.value / 100,
self._registers.gravY.value / 100,
self._registers.gravZ.value / 100]
self._eular_angles = [self._registers.eulPitch.value / 100,
self._registers.eulRoll.value / 100,
self._registers.eulHeading.value / 100]
self._quaternion = [self._registers.quaW.value / 16384,
self._registers.quaX.value / 16384,
self._registers.quaY.value / 16384,
self._registers.quaZ.value / 16384]
self._celsius = self._registers.temp.value | mit | 2,991,752,316,976,240,000 | 34.849624 | 110 | 0.578561 | false |
germandutchwindtunnels/nettools | Cisco.py | 1 | 23574 | #/usr/bin/env python
#
# Copyright (C) 2016-2017 DNW German-Dutch Wind Tunnels
#
# This file is part of nettools.
# Nettools is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Nettools is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with nettools. If not, see <http://www.gnu.org/licenses/>.
""" This is the module defining the CiscoTelnetSession class """
from telnetlib import Telnet
from sets import Set
import multiprocessing
import re
import time
import json
import sys
import socket
import os
import pprint
class CiscoTelnetSession(object):
""" This class provides the interface to a Cisco router/switch over Telnet """
regex_protocol = '(?P<protocol>Internet)'
regex_ip = '(?P<ip>[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3})'
regex_age = '(?P<age>[0-9\-]+)'
regex_arptype = '(?P<arptype>ARPA)'
regex_vlanid = '(?P<vlanid>([0-9]+|unassigned|trunk|dynamic))'
regex_vlanname = '(?P<vlanname>[a-zA-Z][0-9a-zA-Z-_]*)'
regex_vlanstatus = '(?P<vlanstatus>[a-z/]+)'
regex_ports = '(?P<ports>[a-zA-Z0-9, /]*)'
regex_macaddress = '(?P<macaddress>[0-9a-f\.]+)'
regex_macaddress_type = '(?P<macaddress_type>(STATIC|DYNAMIC))'
regex_port = '(?P<port>[a-zA-Z0-9/]+)'
regex_whitespace = '\s+'
regex_optionalwhitespace = '\s*'
regex_deviceid = '(?P<deviceid>[.0-9A-Za-z-]+)'
regex_lldp_deviceid = '(?P<deviceid>[.0-9A-Za-z-]{1,20})'
regex_interface = '(?P<interface>((Gi|Fa|Te)[a-zA-Z]*\s*[0-9]/[0-9](/[0-9]{1,2})?)|(vlan) [0-9]+)'
regex_portid = regex_interface.replace("interface", "portid")
regex_holdtime = '(?P<holdtime>[0-9]+)'
regex_capabilities = '(?P<capabilities>([RTBSHIrP],?\s?)+)'
regex_platform = '(?P<platform>[0-9a-zA-Z-]+)'
regex_string = "[0-9a-zA-Z]+"
regex_patchid = '(?P<patchid>[a-z0-9_]+(\-|\.)[a-z0-9]+(\-|\.)[0-9]+[a-z]?)'
regex_vlanconfig = 'switchport access vlan ' + regex_vlanid.replace("vlanid", "vlanconfig")
regex_monitor_session = 'monitor session (?P<monitor_session>[0-9]+)'
regex_monitor_srcdst = '(?P<src_dst>(source|destination))\s*(remote|interface)\s*'
regex_fan = "FAN is (?P<FAN>[A-Z]+)"
regex_temperature = "TEMPERATURE is (?P<TEMPSTATUS>[A-Z]+)"
regex_temperature_value = "(Temperature Value: (?P<TEMP>[0-9]+) Degree Celsius)?"
regex_temperature_state = "(Temperature State: (?P<TEMPCOLOR>[A-Z]+))?"
regex_power_state = "Built-in[\s+] (?P<PWR>[A-Z]+)"
newline = "\n"
character_time_spacing_seconds = 0.1
line_time_spacing_seconds = 0.1
@staticmethod
def fix_interfacename(interface_name):
""" Fix common changes in interface naming. GigabitEthernet vs Gi """
ret = interface_name.replace("GigabitEthernet", "Gi")
ret = ret.replace("FastEthernet", "Fa")
ret = ret.replace("TenGigabitEthernet", "Te")
return ret
def __init__(self):
# Info for connecting and telnet
self.host = ""
self.port = 0
self.username = ""
self.password = ""
self.session = 0
self.prompt = "#"
self.response_timeout = 15
def __del__(self):
# self.session.write("exit\n")
self.session.close()
def write_command(self, commandstr):
""" Write a command to the peer """
# self.session.write(commandstr)
commandstr_len = len(commandstr)
for i in range(0, commandstr_len):
self.session.write(commandstr[i])
time.sleep(self.character_time_spacing_seconds)
if commandstr[i] == '\n':
time.sleep(self.line_time_spacing_seconds)
def execute_command_lowlevel(self, command, timeout=None):
""" Execute a command and return the result """
if timeout is None:
timeout = self.response_timeout
commandstr = command + self.newline # .strip() + self.newline
self.write_command(commandstr)
output = self.session.read_until(self.prompt, timeout)
ret = output[:-len(self.prompt)]
# print "%s: '%s'" % (command, ret)
return ret
def execute_command(self, command, timeout=None):
""" Execute a command on the Cisco switch """
retries_remaining = 3
while retries_remaining > 0:
try:
return self.execute_command_lowlevel(command, timeout)
except EOFError:
retries_remaining = retries_remaining - 1
print "Got EOFError, reconnecting..."
self.connect_and_login()
def connect_and_login(self):
""" Establish a Telnet connection and perform a login """
self.session = Telnet()
try:
self.session.open(self.host, self.port, self.response_timeout)
except socket.timeout:
return False
if not self.login(self.username, self.password):
return False
try:
self.execute_command_lowlevel("terminal length 0")
self.execute_command_lowlevel("terminal width 0")
except EOFError:
return False
return True
def login(self, username, password):
""" Log in at the Cisco machine """
output = self.session.read_until(":", self.response_timeout)
if output.find("Username:") != -1:
self.session.write(username + self.newline)
self.session.read_until("Password:", self.response_timeout)
self.session.write(password + self.newline)
pass_response = self.session.read_until(self.prompt, self.response_timeout)
if self.prompt not in pass_response:
return False
else:
self.session.close()
return False
return True
def open(self, host, port, username, password):
""" Open a connection to a Cisco router/switch """
self.host = str(host) # In case we receive a Unicode string
self.port = port
self.prompt = self.host[:self.host.find(".")] + "#"
self.username = username
self.password = password
connect_login_result = self.connect_and_login()
return connect_login_result
def close(self):
""" Close the connection to the Cisco router/switch """
self.execute_command("exit")
def filter_output(self, output, regex):
""" Filter output from a command """
result = {}
result_list = []
if isinstance(output, str):
lines = [output]
else:
lines = output
for line in lines:
iterator = re.finditer(regex, line)
try:
while True:
cur = iterator.next()
result = cur.groupdict()
result['hostname'] = self.host
result_list.append(result)
except StopIteration:
pass
return result_list
def command_filter(self, command, regex, timeout=None):
""" Execute a command and regex filter the output """
output = self.execute_command(command, timeout)
result_list = self.filter_output(output, regex)
return result_list
def show_mac_address_table(self):
""" Get a list of mac addresses known to the device, with associated port, type and vlanid """
command = "show mac address-table"
regex = CiscoTelnetSession.regex_whitespace + \
CiscoTelnetSession.regex_vlanid + CiscoTelnetSession.regex_whitespace
regex += CiscoTelnetSession.regex_macaddress + CiscoTelnetSession.regex_whitespace
regex += CiscoTelnetSession.regex_macaddress_type + \
CiscoTelnetSession.regex_whitespace + CiscoTelnetSession.regex_port
return self.command_filter(command, regex)
def show_vlan(self):
""" Return a list of VLANs,status and assigned ports """
command = "show vlan brief"
regex = CiscoTelnetSession.regex_vlanid + CiscoTelnetSession.regex_whitespace
regex += CiscoTelnetSession.regex_vlanname + CiscoTelnetSession.regex_whitespace
regex += CiscoTelnetSession.regex_vlanstatus + CiscoTelnetSession.regex_whitespace
# regex += CiscoTelnetSession.regex_ports
return self.command_filter(command, regex)
def show_neighbors(self):
""" Return a list of Cisco Discovery Protocol neighbors """
command = "show cdp neighbors"
regex = CiscoTelnetSession.regex_deviceid + CiscoTelnetSession.regex_whitespace
regex += CiscoTelnetSession.regex_interface + CiscoTelnetSession.regex_whitespace
regex += CiscoTelnetSession.regex_holdtime + CiscoTelnetSession.regex_whitespace
regex += CiscoTelnetSession.regex_capabilities + CiscoTelnetSession.regex_whitespace
regex += CiscoTelnetSession.regex_platform + CiscoTelnetSession.regex_optionalwhitespace
regex += CiscoTelnetSession.regex_portid
ret = self.command_filter(command, regex)
return ret
def show_health(self):
command = "show env all"
regex = CiscoTelnetSession.regex_fan + CiscoTelnetSession.regex_whitespace
regex += CiscoTelnetSession.regex_temperature + CiscoTelnetSession.regex_whitespace
regex += CiscoTelnetSession.regex_temperature_value + CiscoTelnetSession.regex_whitespace
regex += CiscoTelnetSession.regex_temperature_state
ret = self.command_filter(command, regex)
return ret
def show_interface_vlan(self):
""" Return a list of ports and their VLAN assignment """
command = "show interface status"
regex = CiscoTelnetSession.regex_interface + CiscoTelnetSession.regex_whitespace
regex += CiscoTelnetSession.regex_patchid + CiscoTelnetSession.regex_whitespace
regex += CiscoTelnetSession.regex_string + CiscoTelnetSession.regex_whitespace
regex += CiscoTelnetSession.regex_vlanid
return self.command_filter(command, regex)
def show_arp(self):
""" Request the ARP table of the switch """
command = "show arp"
regex = CiscoTelnetSession.regex_protocol + CiscoTelnetSession.regex_whitespace
regex += CiscoTelnetSession.regex_ip + CiscoTelnetSession.regex_whitespace
regex += CiscoTelnetSession.regex_age + CiscoTelnetSession.regex_whitespace
regex += CiscoTelnetSession.regex_macaddress + CiscoTelnetSession.regex_whitespace
regex += CiscoTelnetSession.regex_arptype + CiscoTelnetSession.regex_whitespace
regex += CiscoTelnetSession.regex_vlanname
return self.command_filter(command, regex)
def upload_file_tftp(self, src_filename, host, dest_filename):
'''Upload a file through tftp'''
regex = '(?P<bytes>[0-9]+)\sbytes\s'
command = "copy " + src_filename \
+ " tftp://" + host + "/" + dest_filename + self.newline \
+ self.newline # + self.newline# + "#dummy suffix"
command = command.replace("HOSTNAME", self.host)
# print self.host + ": command='" + command + "'"
output = self.command_filter(command, regex, 60)
#output = self.execute_command(command, 60)
#result_list = self.filter_output(output, regex)
ret = "-1"
# print self.host + ": output=\n'" + output + "'"
if len(output) > 0:
ret = output[0]['bytes']
return ret
def save_config(self):
'''Copy running config to startup config'''
return self.execute_command("copy run start" + self.newline)
def add_user(self, username, password, privilege_level=15):
'''Add a user'''
cmd = "config terminal" + self.newline \
+ "no username " + str(username) + self.newline \
+ "username " + str(username) \
+ " privilege " + str(privilege_level) \
+ " secret " + str(password) + self.newline + "end"
ret = self.execute_command(cmd)
return ret
def enable_telnet_login(self):
'''Force login on telnet'''
cmd = "config terminal" + self.newline \
+ "line vty 0 4" + self.newline \
+ "login local" + self.newline \
+ "end" + self.newline
return self.execute_command(cmd)
def show_lldp_neighbors(self):
'''Show LLDP neighbors'''
command = "show lldp neighbors"
regex = CiscoTelnetSession.regex_lldp_deviceid + CiscoTelnetSession.regex_whitespace
regex += CiscoTelnetSession.regex_interface + CiscoTelnetSession.regex_whitespace
regex += CiscoTelnetSession.regex_holdtime + CiscoTelnetSession.regex_whitespace
regex += CiscoTelnetSession.regex_capabilities + CiscoTelnetSession.regex_whitespace
regex += CiscoTelnetSession.regex_portid
return self.command_filter(command, regex)
def show_lldp_neighbor_detail(self, neighbor):
'''Show details of an LLDP neighbor'''
command = "show lldp neighbor " + neighbor + " detail"
output = self.execute_command(command)
splitted_output = output.split('\r\n')
ret = {}
for line in splitted_output:
colon = ": "
colonpos = line.find(colon)
if colonpos == -1:
continue
key_end = colonpos
value_start = colonpos + len(colon)
key = line[:key_end].lstrip()
value = line[value_start:].rstrip()
ret[key] = value
return ret
def set_single_interface_description(self, interface, description):
'''Produce the command to set the description of a single interface'''
command = "interface " + interface + self.newline
command += "description " + description + self.newline
command += "exit" + self.newline
return command
def set_interface_description(self, interface, description):
'''Set the description of an interface'''
command = "config t" + self.newline
command += self.set_single_interface_description(interface, description)
command += "end" + self.newline
output = self.execute_command(command)
return output
def set_interfaces_descriptions(self, interfaces_descriptions):
'''Set the description of a list of interfaces'''
command = "config t" + self.newline
for interface in interfaces_descriptions.iterkeys():
description = interfaces_descriptions[interface]
command += self.set_single_interface_description(interface, description)
command += "end" + self.newline
output = self.execute_command(command)
return output
def set_single_interface_vlan(self, interface, vlanid):
'''Produce the command to set the VLAN id for a singe interface'''
command = "interface " + interface + self.newline
command += "shutdown" + self.newline
command += "switchport mode access" + self.newline
command += "switchport access vlan " + vlanid + self.newline
command += "no shutdown" + self.newline
command += "exit" + self.newline
return command
def set_single_interface_voice_vlan(self, interface, voice_vlanid):
'''Produce the command to set the Voice VLAN id for a single interface'''
command = "interface " + interface + self.newline
command += "shutdown" + self.newline
command += "switchport mode access" + self.newline
if voice_vlanid is None:
command += "no switchport voice vlan" + self.newline
else:
command += "switchport voice vlan " + voice_vlanid + self.newline
command += "no shutdown" + self.newline
command += "exit" + self.newline
return command
def set_interface_vlan(self, interface, vlanid):
'''Set the VLAN ID of an interface'''
command = ""
command += "config t" + self.newline
command += self.set_single_interface_vlan(interface, vlanid)
command += "end" + self.newline
output = self.execute_command(command)
return output
def set_interface_vlan_voice_vlan(self, interface, vlanid, voice_vlanid):
'''Set the VLAN ID and Voice VLAN ID of an interface'''
command = ""
command += "config t" + self.newline
command += self.set_single_interface_vlan(interface, vlanid)
command += self.set_single_interface_voice_vlan(interface, voice_vlanid)
command += "end" + self.newline
output = self.execute_command(command)
return output
def set_single_interface_trunk(self, interface):
'''Produce the command to set a single interaface to mode trunk'''
command = "interface " + interface + self.newline
command += "shutdown" + self.newline
command += "switchport trunk encap dot1q" + self.newline
command += "switchport mode trunk" + self.newline
command += "no shutdown" + self.newline
command += "exit" + self.newline
return command
def set_interface_trunk(self, interface):
""" Set the interface to 802.1q trunk mode """
command = ""
command += "config t" + self.newline
command += self.set_single_interface_trunk(interface)
command += "end" + self.newline
output = self.execute_command(command)
return output
def get_interface_vlan_setting(self):
""" Get the vlan settings for all interfaces """
regex = "interface " + CiscoTelnetSession.regex_interface
regex += CiscoTelnetSession.regex_whitespace + CiscoTelnetSession.regex_vlanconfig
command = "show run | inc (interface)|switchport access vlan" # inc can handle regex!
output = self.command_filter(command, regex)
return output
def get_interface_status_and_setting(self):
""" Get both status and settings for all interfaces """
port_status = self.show_interface_vlan()
port_setting = self.get_interface_vlan_setting()
for port in port_status:
hostname = port["hostname"]
interface = port["interface"]
vlansetting = [x["vlanconfig"] for x in port_setting if x["hostname"] ==
hostname and CiscoTelnetSession.fix_interfacename(x["interface"]) == interface]
try:
port["vlanconfig"] = vlansetting[0]
except IndexError:
pass
return port_status
def clear_remote_span(self, remote_span_session_number):
""" Clear the remote SPAN session """
command = "conf t\nno monitor session %d\nend" % remote_span_session_number
output = self.execute_command(command)
return output
def remote_span(self, session_number, source, destination):
""" Create a remote SPAN session """
command = "conf t\nmonitor session %d source %s\n" % (
session_number, source) # source and destionation include a prefix like "interface" or "vlan"
command += "monitor session %d destination %s\nend\n" % (session_number, destination)
output = self.execute_command(command)
return output
def show_span(self):
""" Show the active SPAN sessions on this switch """
regex = CiscoTelnetSession.regex_monitor_session + ' '
regex += CiscoTelnetSession.regex_monitor_srcdst
regex += CiscoTelnetSession.regex_interface
command = "show run | inc monitor session"
output = self.command_filter(command, regex)
return output
class CiscoSet(object):
""" This class represents a set of Cisco switches, connected in a network """
def __init__(self, username, password, start_device, port):
self.username = username
self.password = password
self.start_device = start_device
self.port = port
self.seen = {start_device}
self.blacklist = []
def get_serialize_filename(self):
""" Get the filename to serialize this set to """
filename = "discover-%s.json" % self.start_device
return filename
def load(self):
""" Load from file """
filename = self.get_serialize_filename()
seen = self.seen
try:
with open(filename, "r") as fd:
json_contents = fd.read()
json_decoded = json.loads(json_contents)
self.seen = set(json_decoded)
except IOError:
# Doesn't matter, we'll create it on save
pass
except ValueError:
# Restore backup of seen when we encounter problems during decoding
self.seen = seen
def save(self):
""" Save to file """
filename = self.get_serialize_filename()
json_contents = json.dumps(list(self.seen))
with open(filename, "w+") as fd:
fd.write(json_contents)
def set_blacklist(self, blacklist):
""" Don't connect to these hosts """
self.blacklist = blacklist
def discover_devices(self):
'''Discover all networking devices, using a depth-first search.'''
self.load() # Attempt to bootstrap using a saved json file
last_count = 0
while last_count != len(self.seen):
last_count = len(self.seen)
outputs = self.execute_on_all(CiscoTelnetSession.show_neighbors)
for output in outputs:
self.seen.add(output['deviceid'])
print "Seen: " + pprint.pformat(self.seen)
self.save() # Save what we've found for the next time
def execute_on_all(self, command, *args):
""" Execute command on all devices """
cpu_count = 25 # multiprocessing.cpu_count()
command_name = command.__name__
print "Process count %d" % cpu_count
pool = multiprocessing.Pool(processes=cpu_count)
results = [
pool.apply_async(
execute_on_device,
(host,
self.port,
self.username,
self.password,
command_name) +
args) for host in self.seen if host not in self.blacklist]
ret = []
for res in results:
try:
ret = ret + res.get()
except TypeError:
ret = ret + [res.get()]
return ret
def uniq(seq):
"""Remove duplicates from list"""
s = Set(seq)
unique = list(s)
unique_sorted = sorted(unique)
return unique_sorted
def execute_on_device(hostname, port, username, password, command_name, *args):
""" Helper function for CiscoSet.discover_devices """
device = CiscoTelnetSession()
open_result = device.open(hostname, port, username, password)
# object_functions = dir(device)
command = getattr(device, command_name, None)
if command is None:
sys.stderr.write(
"execute_on_device: failed to look up function %s in CiscoTelnetSession class\n" %
command_name)
return None
ret = []
if open_result:
ret = command(*args)
else:
sys.stderr.write("execute_on_device: failed to connect to " + hostname + "\n")
return ret
| gpl-2.0 | -7,445,072,861,298,987,000 | 40.213287 | 106 | 0.61852 | false |
placher/pokeproject2 | devfiles/testprojectileimpact.py | 1 | 3561 | import sys
import pygame
import os
import inspect
from pygame.locals import *
currentdir = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe())))
parentdir = os.path.dirname(currentdir)
sys.path.insert(0,parentdir)
from scripts import player
from scripts import background
from scripts import projectile
class GameSpace:
''' Game Space Controller '''
def main(self):
''' ---------- Initialize Game Space ---------- '''
# initialize pygame enviroment
pygame.init()
# size of the screen
self.size = self.width, self.height = 960, 720
# define base color
self.gray = 128, 128, 128
# initialize display
self.screen = pygame.display.set_mode(self.size)
# initialize sprite movement speed
self.moveSpeed = 1
''' ---------- Initialize Game Objects ---------- '''
# background image
self.background = background.Background()
# player character
self.player = player.Player(1, self.size, self.moveSpeed)
# player 2 character
self.enemy = player.Player(2, self.size, self.moveSpeed)
self.enemy.rect = self.enemy.rect.move((300, 300))
self.enemy.lastDirection = "Up"
# player projectiles
self.projectiles = []
for i in range(8):
self.projectiles.append(projectile.Projectile(1, self.size, 2*self.moveSpeed))
# next projectile
self.nextProjectile = 0
# game clock
self.clock = pygame.time.Clock()
# sprite groups
self.playerSprites = pygame.sprite.RenderPlain((self.player))
self.enemySprites = pygame.sprite.RenderPlain((self.enemy))
self.playerProjectiles = pygame.sprite.RenderPlain((self.projectiles[0]), (self.projectiles[1]), (self.projectiles[2]), (self.projectiles[3]), (self.projectiles[4]), (self.projectiles[5]), (self.projectiles[6]), (self.projectiles[7]))
''' ---------- Initiate Game Loop ---------- '''
# continue loop until game over
cont = True
while (cont):
''' ---------- Tick Speed Regulation ---------- '''
# update only 60 times per second
self.clock.tick(60)
''' ---------- Read User Inputs ---------- '''
for event in pygame.event.get():
if event.type == QUIT:
sys.exit()
elif event.type == KEYDOWN and event.key == K_SPACE:
# player attack animation
self.player.attack()
# fire next projectile
self.projectiles[self.nextProjectile].fire(self.player.rect.center, self.player.lastDirection)
# increment projectile counter
self.nextProjectile += 1
if self.nextProjectile == len(self.projectiles):
self.nextProjectile = 0
elif event.type == KEYDOWN:
self.player.keyPressed(event)
elif event.type == KEYUP:
self.player.keyReleased(event)
''' ---------- Call Tick (update) on Game Objects ---------- '''
# update sprites
self.playerSprites.update()
self.playerProjectiles.update()
self.enemySprites.update()
# check for collisions
for impact in pygame.sprite.groupcollide(self.playerProjectiles, self.enemySprites, False, False).keys():
impact.hitSomething()
if (self.enemy.hit() == 0):
# enemy defeated
print("\nYou Win!!\n")
cont = False
''' ---------- Update Screen ---------- '''
# clear screen
self.screen.fill(self.gray)
# draw background
self.screen.blit(self.background.image, self.background.rect)
# render all game objects
self.playerSprites.draw(self.screen)
self.playerProjectiles.draw(self.screen)
self.enemySprites.draw(self.screen)
# flip renderer
pygame.display.flip()
if __name__ == '__main__':
gs = GameSpace()
gs.main()
| gpl-3.0 | -4,463,376,372,307,577,300 | 29.965217 | 236 | 0.660769 | false |
yotamr/backslash-python | backslash/test.py | 1 | 1976 | from sentinels import NOTHING
from .api_object import APIObject
from .lazy_query import LazyQuery
class Test(APIObject):
def report_end(self, duration=NOTHING):
self.client.api.call_function('report_test_end', {'id': self.id, 'duration': duration})
def mark_skipped(self):
self.client.api.call_function('report_test_skipped', {'id': self.id})
def mark_interrupted(self):
self.client.api.call_function('report_test_interrupted', {'id': self.id})
def add_error(self):
return self.client.api.call_function('add_test_error', {'id': self.id})
def add_failure(self):
return self.client.api.call_function('add_test_failure', {'id': self.id})
def add_metadata(self, metadata):
return self.client.api.call_function('add_test_metadata', {'id': self.id, 'metadata': metadata})
def set_conclusion(self, conclusion):
return self.client.api.call_function('set_test_conclusion', {'id': self.id, 'conclusion': conclusion})
def add_error_data(self, exception, exception_type, traceback, timestamp=NOTHING):
return self.client.api.call_function('add_test_error_data', {'id': self.id,
'exception': exception,
'exception_type': exception_type,
'traceback': traceback,
'timestamp': timestamp
})
def edit_status(self, status):
return self.client.api.call_function('edit_test_status', {'id': self.id, 'status': status})
def query_errors(self):
"""Queries tests of the current session
:rtype: A lazy query object
"""
return LazyQuery(self.client, '/rest/errors', query_params={'test_id': self.id})
| bsd-3-clause | 8,275,788,919,714,986,000 | 42.911111 | 110 | 0.552632 | false |
mensi/gittornado | gittornado/iowrapper.py | 1 | 16438 | # -*- coding: utf-8 -*-
#
# Copyright 2011 Manuel Stocker <[email protected]>
#
# This file is part of GitTornado.
#
# GitTornado is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# GitTornado is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with GitTornado. If not, see http://www.gnu.org/licenses
import subprocess
import zlib
import os
import os.path
import tornado.ioloop
from gittornado.util import get_date_header
import logging
logger = logging.getLogger(__name__)
class FileWrapper(object):
"""Wraps a file and communicates with HTTP client"""
def __init__(self, request, filename, headers={}):
self.request = request
self.headers = headers.copy()
try:
self.file = open(filename, 'rb')
filesize = os.path.getsize(filename)
except:
raise tornado.web.HTTPError(500, 'Unable to open file')
self.headers.update({'Date': get_date_header(), 'Content-Length': str(filesize)})
self.request.write('HTTP/1.1 200 OK\r\n' + '\r\n'.join([ k + ': ' + v for k, v in self.headers.items()]) + '\r\n\r\n')
self.write_chunk()
def write_chunk(self):
data = self.file.read(8192)
if data == '':
# EOF
self.file.close()
self.request.finish()
return
# write data to client and continue when data has been written
self.request.write(data, self.write_chunk)
class ProcessWrapper(object):
"""Wraps a subprocess and communicates with HTTP client
Supports gzip compression and chunked transfer encoding
"""
reading_chunks = False
got_chunk = False
headers_sent = False
got_request = False
sent_chunks = False
number_of_8k_chunks_sent = 0
gzip_decompressor = None
gzip_header_seen = False
process_input_buffer = ''
output_prelude = ''
def __init__(self, request, command, headers, output_prelude=''):
"""Wrap a subprocess
:param request: tornado request object
:param command: command to be given to subprocess.Popen
:param headers: headers to be included on success
:param output_prelude: data to send before the output of the process
"""
self.request = request
self.headers = headers
self.output_prelude = output_prelude
# invoke process
self.process = subprocess.Popen(command, stdin=subprocess.PIPE, stderr=subprocess.PIPE, stdout=subprocess.PIPE)
# check return status
if self.process.poll() is not None:
raise tornado.web.HTTPError(500, 'subprocess returned prematurely')
# get fds
self.fd_stdout = self.process.stdout.fileno()
self.fd_stderr = self.process.stderr.fileno()
self.fd_stdin = self.process.stdin.fileno()
# register with ioloop
self.ioloop = tornado.ioloop.IOLoop.instance()
self.ioloop.add_handler(self.fd_stdout, self._handle_stdout_event, self.ioloop.READ | self.ioloop.ERROR)
self.ioloop.add_handler(self.fd_stderr, self._handle_stderr_event, self.ioloop.READ | self.ioloop.ERROR)
self.ioloop.add_handler(self.fd_stdin, self._handle_stdin_event, self.ioloop.WRITE | self.ioloop.ERROR)
# is it gzipped? If yes, we initialize a zlib decompressobj
if 'gzip' in request.headers.get('Content-Encoding', '').lower(): # HTTP/1.1 RFC says value is case-insensitive
logger.debug("Gzipped request. Initializing decompressor.")
self.gzip_decompressor = zlib.decompressobj(16 + zlib.MAX_WBITS) # skip the gzip header
if self.request.method == 'POST':
# Handle chunked encoding
if request.headers.get('Expect', None) == '100-continue' and request.headers.get('Transfer-Encoding', None) == 'chunked':
logger.debug('Request uses chunked transfer encoding. Sending 100 Continue.')
self.httpstream = self.request.connection.stream
self.request.write("HTTP/1.1 100 (Continue)\r\n\r\n")
self.read_chunks()
else:
logger.debug('Got complete request')
if self.gzip_decompressor:
assert request.body[:2] == '\x1f\x8b', "gzip header"
self.process_input_buffer = self.gzip_decompressor.decompress(request.body)
else:
self.process_input_buffer = request.body
self.got_request = True
else:
logger.debug("Method %s has no input", self.request.method)
self.got_request = True
def read_chunks(self):
"""Read chunks from the HTTP client"""
if self.reading_chunks and self.got_chunk:
# we got on the fast-path and directly read from the buffer.
# if we continue to recurse, this is going to blow up the stack.
# so instead return
#
# NOTE: This actually is unnecessary as long as tornado guarantees that
# ioloop.add_callback always gets dispatched via the main io loop
# and they don't introduce a fast-path similar to read_XY
logger.debug("Fast-Path detected, returning...")
return
while not self.got_request:
self.reading_chunks = True
self.got_chunk = False
# chunk starts with length, so read it. This will then subsequently also read the chunk
self.httpstream.read_until("\r\n", self._chunk_length)
self.reading_chunks = False
if self.got_chunk:
# the previous read hit the fast path and read from the buffer
# instead of going through the main polling loop. This means we
# should iteratively issue the next request
logger.debug("Fast-Path detected, iterating...")
continue
else:
break
# if we arrive here, we read the complete request or
# the ioloop has scheduled another call to read_chunks
return
def _chunk_length(self, data):
"""Received the chunk length"""
assert data[-2:] == "\r\n", "CRLF"
length = data[:-2].split(';')[0] # cut off optional length paramters
length = int(length.strip(), 16) # length is in hex
if length:
logger.debug('Got chunk length: %d', length)
self.httpstream.read_bytes(length + 2, self._chunk_data)
else:
logger.debug('Got last chunk (size 0)')
self.got_request = True
# enable input write event so the handler can finish things up
# when it has written all pending data
self.ioloop.update_handler(self.fd_stdin, self.ioloop.WRITE | self.ioloop.ERROR)
def _chunk_data(self, data):
"""Received chunk data"""
assert data[-2:] == "\r\n", "CRLF"
if self.gzip_decompressor:
if not self.gzip_header_seen:
assert data[:2] == '\x1f\x8b', "gzip header"
self.gzip_header_seen = True
self.process_input_buffer += self.gzip_decompressor.decompress(data[:-2])
else:
self.process_input_buffer += data[:-2]
self.got_chunk = True
if self.process_input_buffer:
# since we now have data in the buffer, enable write events again
logger.debug('Got data in buffer, interested in writing to process again')
self.ioloop.update_handler(self.fd_stdin, self.ioloop.WRITE | self.ioloop.ERROR)
# do NOT call read_chunks directly. This is to give git a chance to consume input.
# we don't want to grow the buffer unnecessarily.
# Additionally, this should mitigate the stack explosion mentioned in read_chunks
self.ioloop.add_callback(self.read_chunks)
def _handle_stdin_event(self, fd, events):
"""Eventhandler for stdin"""
assert fd == self.fd_stdin
if events & self.ioloop.ERROR:
# An error at the end is expected since tornado maps HUP to ERROR
logger.debug('Error on stdin')
# ensure pipe is closed
if not self.process.stdin.closed:
self.process.stdin.close()
# remove handler
self.ioloop.remove_handler(self.fd_stdin)
# if all fds are closed, we can finish
return self._graceful_finish()
# got data ready
logger.debug('stdin ready for write')
if self.process_input_buffer:
count = os.write(fd, self.process_input_buffer)
logger.debug('Wrote first %d bytes of %d total', count, len(self.process_input_buffer))
self.process_input_buffer = self.process_input_buffer[count:]
if not self.process_input_buffer:
# consumed everything in the buffer
if self.got_request:
# we got the request and wrote everything to the process
# this means we can close stdin and stop handling events
# for it
logger.debug('Got complete request, closing stdin')
self.process.stdin.close()
self.ioloop.remove_handler(fd)
else:
# There is more data bound to come from the client
# so just disable write events for the moment until
# we got more to write
logger.debug('Not interested in write events on stdin anymore')
self.ioloop.update_handler(fd, self.ioloop.ERROR)
def _handle_stdout_event(self, fd, events):
"""Eventhandler for stdout"""
assert fd == self.fd_stdout
if events & self.ioloop.READ:
# got data ready to read
data = ''
# Now basically we have two cases: either the client supports
# HTTP/1.1 in which case we can stream the answer in chunked mode
# in HTTP/1.0 we need to send a content-length and thus buffer the complete output
if self.request.supports_http_1_1():
if not self.headers_sent:
self.sent_chunks = True
self.headers.update({'Date': get_date_header(), 'Transfer-Encoding': 'chunked'})
data = 'HTTP/1.1 200 OK\r\n' + '\r\n'.join([ k + ': ' + v for k, v in self.headers.items()]) + '\r\n\r\n'
if self.output_prelude:
data += hex(len(self.output_prelude))[2:] + "\r\n" # cut off 0x
data += self.output_prelude + "\r\n"
self.headers_sent = True
payload = os.read(fd, 8192)
if events & self.ioloop.ERROR: # there might be data remaining in the buffer if we got HUP, get it all
remainder = True
while remainder != '': # until EOF
remainder = os.read(fd, 8192)
payload += remainder
data += hex(len(payload))[2:] + "\r\n" # cut off 0x
data += payload + "\r\n"
else:
if not self.headers_sent:
# Use the over-eager blocking read that will get everything until we hit EOF
# this might actually be somewhat dangerous as noted in the subprocess documentation
# and lead to a deadlock. This is only a legacy mode for HTTP/1.0 clients anyway,
# so we might want to remove it entirely anyways
payload = self.process.stdout.read()
self.headers.update({'Date': get_date_header(), 'Content-Length': str(len(payload))})
data = 'HTTP/1.0 200 OK\r\n' + '\r\n'.join([ k + ': ' + v for k, v in self.headers.items()]) + '\r\n\r\n'
self.headers_sent = True
data += self.output_prelude + payload
else:
# this is actually somewhat illegal as it messes with content-length but
# it shouldn't happen anyways, as the read above should have read anything
# python docs say this can happen on ttys...
logger.error("This should not happen")
data = self.process.stdout.read()
if len(data) == 8200:
self.number_of_8k_chunks_sent += 1
else:
if self.number_of_8k_chunks_sent > 0:
logger.debug('Sent %d * 8192 bytes', self.number_of_8k_chunks_sent)
self.number_of_8k_chunks_sent = 0
logger.debug('Sending stdout to client %d bytes: %r', len(data), data[:20])
self.request.write(data)
# now we can also have an error. This is because tornado maps HUP onto error
# therefore, no elif here!
if events & self.ioloop.ERROR:
logger.debug('Error on stdout')
# ensure file is closed
if not self.process.stdout.closed:
self.process.stdout.close()
# remove handler
self.ioloop.remove_handler(self.fd_stdout)
# if all fds are closed, we can finish
return self._graceful_finish()
def _handle_stderr_event(self, fd, events):
"""Eventhandler for stderr"""
assert fd == self.fd_stderr
if events & self.ioloop.READ:
# got data ready
if not self.headers_sent:
payload = self.process.stderr.read()
data = 'HTTP/1.1 500 Internal Server Error\r\nDate: %s\r\nContent-Length: %d\r\n\r\n' % (get_date_header(), len(payload))
self.headers_sent = True
data += payload
else:
# see stdout
logger.error("This should not happen (stderr)")
data = self.process.stderr.read()
logger.debug('Sending stderr to client: %r', data)
self.request.write(data)
if events & self.ioloop.ERROR:
logger.debug('Error on stderr')
# ensure file is closed
if not self.process.stderr.closed:
self.process.stderr.close()
# remove handler
self.ioloop.remove_handler(self.fd_stderr)
# if all fds are closed, we can finish
return self._graceful_finish()
def _graceful_finish(self):
"""Detect if process has closed pipes and we can finish"""
if not self.process.stdout.closed or not self.process.stderr.closed:
return # stdout/stderr still open
if not self.process.stdin.closed:
self.process.stdin.close()
if self.number_of_8k_chunks_sent > 0:
logger.debug('Sent %d * 8k chunks', self.number_of_8k_chunks_sent)
logger.debug("Finishing up. Process poll: %r", self.process.poll())
if not self.headers_sent:
retval = self.process.poll()
if retval != 0:
logger.warning("Empty response. Git return value: " + str(retval))
payload = "Did not produce any data. Errorcode: " + str(retval)
data = 'HTTP/1.1 500 Internal Server Error\r\nDate: %s\r\nContent-Length: %d\r\n\r\n' % (get_date_header(), len(payload))
self.headers_sent = True
data += payload
self.request.write(data)
else:
data = 'HTTP/1.1 200 Ok\r\nDate: %s\r\nContent-Length: 0\r\n\r\n' % get_date_header()
self.headers_sent = True
self.request.write(data)
# if we are in chunked mode, send end chunk with length 0
elif self.sent_chunks:
logger.debug("End chunk")
self.request.write("0\r\n")
#we could now send some more headers resp. trailers
self.request.write("\r\n")
self.request.finish()
| gpl-3.0 | 4,464,436,423,476,009,500 | 40.510101 | 137 | 0.582309 | false |
arokem/scipy | scipy/fft/_pocketfft/tests/test_basic.py | 1 | 35101 | # Created by Pearu Peterson, September 2002
from __future__ import division, print_function, absolute_import
from numpy.testing import (assert_, assert_equal, assert_array_almost_equal,
assert_array_almost_equal_nulp, assert_array_less,
assert_allclose)
import pytest
from pytest import raises as assert_raises
from scipy.fft._pocketfft import (ifft, fft, fftn, ifftn,
rfft, irfft, rfftn, irfftn, fft2)
from numpy import (arange, add, array, asarray, zeros, dot, exp, pi,
swapaxes, cdouble)
import numpy as np
import numpy.fft
from numpy.random import rand
# "large" composite numbers supported by FFT._PYPOCKETFFT
LARGE_COMPOSITE_SIZES = [
2**13,
2**5 * 3**5,
2**3 * 3**3 * 5**2,
]
SMALL_COMPOSITE_SIZES = [
2,
2*3*5,
2*2*3*3,
]
# prime
LARGE_PRIME_SIZES = [
2011
]
SMALL_PRIME_SIZES = [
29
]
def _assert_close_in_norm(x, y, rtol, size, rdt):
# helper function for testing
err_msg = "size: %s rdt: %s" % (size, rdt)
assert_array_less(np.linalg.norm(x - y), rtol*np.linalg.norm(x), err_msg)
def random(size):
return rand(*size)
def get_mat(n):
data = arange(n)
data = add.outer(data, data)
return data
def direct_dft(x):
x = asarray(x)
n = len(x)
y = zeros(n, dtype=cdouble)
w = -arange(n)*(2j*pi/n)
for i in range(n):
y[i] = dot(exp(i*w), x)
return y
def direct_idft(x):
x = asarray(x)
n = len(x)
y = zeros(n, dtype=cdouble)
w = arange(n)*(2j*pi/n)
for i in range(n):
y[i] = dot(exp(i*w), x)/n
return y
def direct_dftn(x):
x = asarray(x)
for axis in range(len(x.shape)):
x = fft(x, axis=axis)
return x
def direct_idftn(x):
x = asarray(x)
for axis in range(len(x.shape)):
x = ifft(x, axis=axis)
return x
def direct_rdft(x):
x = asarray(x)
n = len(x)
w = -arange(n)*(2j*pi/n)
y = zeros(n//2+1, dtype=cdouble)
for i in range(n//2+1):
y[i] = dot(exp(i*w), x)
return y
def direct_irdft(x, n):
x = asarray(x)
x1 = zeros(n, dtype=cdouble)
for i in range(n//2+1):
x1[i] = x[i]
if i > 0 and 2*i < n:
x1[n-i] = np.conj(x[i])
return direct_idft(x1).real
def direct_rdftn(x):
return fftn(rfft(x), axes=range(x.ndim - 1))
class _TestFFTBase(object):
def setup_method(self):
self.cdt = None
self.rdt = None
np.random.seed(1234)
def test_definition(self):
x = np.array([1,2,3,4+1j,1,2,3,4+2j], dtype=self.cdt)
y = fft(x)
assert_equal(y.dtype, self.cdt)
y1 = direct_dft(x)
assert_array_almost_equal(y,y1)
x = np.array([1,2,3,4+0j,5], dtype=self.cdt)
assert_array_almost_equal(fft(x),direct_dft(x))
def test_n_argument_real(self):
x1 = np.array([1,2,3,4], dtype=self.rdt)
x2 = np.array([1,2,3,4], dtype=self.rdt)
y = fft([x1,x2],n=4)
assert_equal(y.dtype, self.cdt)
assert_equal(y.shape,(2,4))
assert_array_almost_equal(y[0],direct_dft(x1))
assert_array_almost_equal(y[1],direct_dft(x2))
def _test_n_argument_complex(self):
x1 = np.array([1,2,3,4+1j], dtype=self.cdt)
x2 = np.array([1,2,3,4+1j], dtype=self.cdt)
y = fft([x1,x2],n=4)
assert_equal(y.dtype, self.cdt)
assert_equal(y.shape,(2,4))
assert_array_almost_equal(y[0],direct_dft(x1))
assert_array_almost_equal(y[1],direct_dft(x2))
def test_djbfft(self):
for i in range(2,14):
n = 2**i
x = np.arange(n)
y = fft(x.astype(complex))
y2 = numpy.fft.fft(x)
assert_array_almost_equal(y,y2)
y = fft(x)
assert_array_almost_equal(y,y2)
def test_invalid_sizes(self):
assert_raises(ValueError, fft, [])
assert_raises(ValueError, fft, [[1,1],[2,2]], -5)
class TestLongDoubleFFT(_TestFFTBase):
def setup_method(self):
self.cdt = np.longcomplex
self.rdt = np.longdouble
class TestDoubleFFT(_TestFFTBase):
def setup_method(self):
self.cdt = np.cdouble
self.rdt = np.double
class TestSingleFFT(_TestFFTBase):
def setup_method(self):
self.cdt = np.complex64
self.rdt = np.float32
class TestFloat16FFT(object):
def test_1_argument_real(self):
x1 = np.array([1, 2, 3, 4], dtype=np.float16)
y = fft(x1, n=4)
assert_equal(y.dtype, np.complex64)
assert_equal(y.shape, (4, ))
assert_array_almost_equal(y, direct_dft(x1.astype(np.float32)))
def test_n_argument_real(self):
x1 = np.array([1, 2, 3, 4], dtype=np.float16)
x2 = np.array([1, 2, 3, 4], dtype=np.float16)
y = fft([x1, x2], n=4)
assert_equal(y.dtype, np.complex64)
assert_equal(y.shape, (2, 4))
assert_array_almost_equal(y[0], direct_dft(x1.astype(np.float32)))
assert_array_almost_equal(y[1], direct_dft(x2.astype(np.float32)))
class _TestIFFTBase(object):
def setup_method(self):
np.random.seed(1234)
def test_definition(self):
x = np.array([1,2,3,4+1j,1,2,3,4+2j], self.cdt)
y = ifft(x)
y1 = direct_idft(x)
assert_equal(y.dtype, self.cdt)
assert_array_almost_equal(y,y1)
x = np.array([1,2,3,4+0j,5], self.cdt)
assert_array_almost_equal(ifft(x),direct_idft(x))
def test_definition_real(self):
x = np.array([1,2,3,4,1,2,3,4], self.rdt)
y = ifft(x)
assert_equal(y.dtype, self.cdt)
y1 = direct_idft(x)
assert_array_almost_equal(y,y1)
x = np.array([1,2,3,4,5], dtype=self.rdt)
assert_equal(y.dtype, self.cdt)
assert_array_almost_equal(ifft(x),direct_idft(x))
def test_djbfft(self):
for i in range(2,14):
n = 2**i
x = np.arange(n)
y = ifft(x.astype(self.cdt))
y2 = numpy.fft.ifft(x)
assert_allclose(y,y2, rtol=self.rtol, atol=self.atol)
y = ifft(x)
assert_allclose(y,y2, rtol=self.rtol, atol=self.atol)
def test_random_complex(self):
for size in [1,51,111,100,200,64,128,256,1024]:
x = random([size]).astype(self.cdt)
x = random([size]).astype(self.cdt) + 1j*x
y1 = ifft(fft(x))
y2 = fft(ifft(x))
assert_equal(y1.dtype, self.cdt)
assert_equal(y2.dtype, self.cdt)
assert_array_almost_equal(y1, x)
assert_array_almost_equal(y2, x)
def test_random_real(self):
for size in [1,51,111,100,200,64,128,256,1024]:
x = random([size]).astype(self.rdt)
y1 = ifft(fft(x))
y2 = fft(ifft(x))
assert_equal(y1.dtype, self.cdt)
assert_equal(y2.dtype, self.cdt)
assert_array_almost_equal(y1, x)
assert_array_almost_equal(y2, x)
def test_size_accuracy(self):
# Sanity check for the accuracy for prime and non-prime sized inputs
for size in LARGE_COMPOSITE_SIZES + LARGE_PRIME_SIZES:
np.random.seed(1234)
x = np.random.rand(size).astype(self.rdt)
y = ifft(fft(x))
_assert_close_in_norm(x, y, self.rtol, size, self.rdt)
y = fft(ifft(x))
_assert_close_in_norm(x, y, self.rtol, size, self.rdt)
x = (x + 1j*np.random.rand(size)).astype(self.cdt)
y = ifft(fft(x))
_assert_close_in_norm(x, y, self.rtol, size, self.rdt)
y = fft(ifft(x))
_assert_close_in_norm(x, y, self.rtol, size, self.rdt)
def test_invalid_sizes(self):
assert_raises(ValueError, ifft, [])
assert_raises(ValueError, ifft, [[1,1],[2,2]], -5)
@pytest.mark.skipif(np.longdouble is np.float64,
reason="Long double is aliased to double")
class TestLongDoubleIFFT(_TestIFFTBase):
def setup_method(self):
self.cdt = np.longcomplex
self.rdt = np.longdouble
self.rtol = 1e-10
self.atol = 1e-10
class TestDoubleIFFT(_TestIFFTBase):
def setup_method(self):
self.cdt = np.cdouble
self.rdt = np.double
self.rtol = 1e-10
self.atol = 1e-10
class TestSingleIFFT(_TestIFFTBase):
def setup_method(self):
self.cdt = np.complex64
self.rdt = np.float32
self.rtol = 1e-5
self.atol = 1e-4
class _TestRFFTBase(object):
def setup_method(self):
np.random.seed(1234)
def test_definition(self):
for t in [[1, 2, 3, 4, 1, 2, 3, 4], [1, 2, 3, 4, 1, 2, 3, 4, 5]]:
x = np.array(t, dtype=self.rdt)
y = rfft(x)
y1 = direct_rdft(x)
assert_array_almost_equal(y,y1)
assert_equal(y.dtype, self.cdt)
def test_djbfft(self):
for i in range(2,14):
n = 2**i
x = np.arange(n)
y1 = np.fft.rfft(x)
y = rfft(x)
assert_array_almost_equal(y,y1)
def test_invalid_sizes(self):
assert_raises(ValueError, rfft, [])
assert_raises(ValueError, rfft, [[1,1],[2,2]], -5)
def test_complex_input(self):
x = np.zeros(10, dtype=self.cdt)
with assert_raises(TypeError, match="x must be a real sequence"):
rfft(x)
# See gh-5790
class MockSeries(object):
def __init__(self, data):
self.data = np.asarray(data)
def __getattr__(self, item):
try:
return getattr(self.data, item)
except AttributeError:
raise AttributeError(("'MockSeries' object "
"has no attribute '{attr}'".
format(attr=item)))
def test_non_ndarray_with_dtype(self):
x = np.array([1., 2., 3., 4., 5.])
xs = _TestRFFTBase.MockSeries(x)
expected = [1, 2, 3, 4, 5]
rfft(xs)
# Data should not have been overwritten
assert_equal(x, expected)
assert_equal(xs.data, expected)
@pytest.mark.skipif(np.longfloat is np.float64,
reason="Long double is aliased to double")
class TestRFFTLongDouble(_TestRFFTBase):
def setup_method(self):
self.cdt = np.longcomplex
self.rdt = np.longfloat
class TestRFFTDouble(_TestRFFTBase):
def setup_method(self):
self.cdt = np.cdouble
self.rdt = np.double
class TestRFFTSingle(_TestRFFTBase):
def setup_method(self):
self.cdt = np.complex64
self.rdt = np.float32
class _TestIRFFTBase(object):
def setup_method(self):
np.random.seed(1234)
def test_definition(self):
x1 = [1,2+3j,4+1j,1+2j,3+4j]
x1_1 = [1,2+3j,4+1j,2+3j,4,2-3j,4-1j,2-3j]
x1 = x1_1[:5]
x2_1 = [1,2+3j,4+1j,2+3j,4+5j,4-5j,2-3j,4-1j,2-3j]
x2 = x2_1[:5]
def _test(x, xr):
y = irfft(np.array(x, dtype=self.cdt), n=len(xr))
y1 = direct_irdft(x, len(xr))
assert_equal(y.dtype, self.rdt)
assert_array_almost_equal(y,y1, decimal=self.ndec)
assert_array_almost_equal(y,ifft(xr), decimal=self.ndec)
_test(x1, x1_1)
_test(x2, x2_1)
def test_djbfft(self):
for i in range(2,14):
n = 2**i
x = np.arange(-1, n, 2) + 1j * np.arange(0, n+1, 2)
x[0] = 0
if n % 2 == 0:
x[-1] = np.real(x[-1])
y1 = np.fft.irfft(x)
y = irfft(x)
assert_array_almost_equal(y,y1)
def test_random_real(self):
for size in [1,51,111,100,200,64,128,256,1024]:
x = random([size]).astype(self.rdt)
y1 = irfft(rfft(x), n=size)
y2 = rfft(irfft(x, n=(size*2-1)))
assert_equal(y1.dtype, self.rdt)
assert_equal(y2.dtype, self.cdt)
assert_array_almost_equal(y1, x, decimal=self.ndec,
err_msg="size=%d" % size)
assert_array_almost_equal(y2, x, decimal=self.ndec,
err_msg="size=%d" % size)
def test_size_accuracy(self):
# Sanity check for the accuracy for prime and non-prime sized inputs
if self.rdt == np.float32:
rtol = 1e-5
elif self.rdt == np.float64:
rtol = 1e-10
for size in LARGE_COMPOSITE_SIZES + LARGE_PRIME_SIZES:
np.random.seed(1234)
x = np.random.rand(size).astype(self.rdt)
y = irfft(rfft(x), len(x))
_assert_close_in_norm(x, y, rtol, size, self.rdt)
y = rfft(irfft(x, 2 * len(x) - 1))
_assert_close_in_norm(x, y, rtol, size, self.rdt)
def test_invalid_sizes(self):
assert_raises(ValueError, irfft, [])
assert_raises(ValueError, irfft, [[1,1],[2,2]], -5)
# self.ndec is bogus; we should have a assert_array_approx_equal for number of
# significant digits
@pytest.mark.skipif(np.longfloat is np.float64,
reason="Long double is aliased to double")
class TestIRFFTLongDouble(_TestIRFFTBase):
def setup_method(self):
self.cdt = np.cdouble
self.rdt = np.double
self.ndec = 14
class TestIRFFTDouble(_TestIRFFTBase):
def setup_method(self):
self.cdt = np.cdouble
self.rdt = np.double
self.ndec = 14
class TestIRFFTSingle(_TestIRFFTBase):
def setup_method(self):
self.cdt = np.complex64
self.rdt = np.float32
self.ndec = 5
class Testfft2(object):
def setup_method(self):
np.random.seed(1234)
def test_regression_244(self):
"""FFT returns wrong result with axes parameter."""
# fftn (and hence fft2) used to break when both axes and shape were
# used
x = numpy.ones((4, 4, 2))
y = fft2(x, s=(8, 8), axes=(-3, -2))
y_r = numpy.fft.fftn(x, s=(8, 8), axes=(-3, -2))
assert_array_almost_equal(y, y_r)
def test_invalid_sizes(self):
assert_raises(ValueError, fft2, [[]])
assert_raises(ValueError, fft2, [[1, 1], [2, 2]], (4, -3))
class TestFftnSingle(object):
def setup_method(self):
np.random.seed(1234)
def test_definition(self):
x = [[1, 2, 3],
[4, 5, 6],
[7, 8, 9]]
y = fftn(np.array(x, np.float32))
assert_(y.dtype == np.complex64,
msg="double precision output with single precision")
y_r = np.array(fftn(x), np.complex64)
assert_array_almost_equal_nulp(y, y_r)
@pytest.mark.parametrize('size', SMALL_COMPOSITE_SIZES + SMALL_PRIME_SIZES)
def test_size_accuracy_small(self, size):
x = np.random.rand(size, size) + 1j*np.random.rand(size, size)
y1 = fftn(x.real.astype(np.float32))
y2 = fftn(x.real.astype(np.float64)).astype(np.complex64)
assert_equal(y1.dtype, np.complex64)
assert_array_almost_equal_nulp(y1, y2, 2000)
@pytest.mark.parametrize('size', LARGE_COMPOSITE_SIZES + LARGE_PRIME_SIZES)
def test_size_accuracy_large(self, size):
x = np.random.rand(size, 3) + 1j*np.random.rand(size, 3)
y1 = fftn(x.real.astype(np.float32))
y2 = fftn(x.real.astype(np.float64)).astype(np.complex64)
assert_equal(y1.dtype, np.complex64)
assert_array_almost_equal_nulp(y1, y2, 2000)
def test_definition_float16(self):
x = [[1, 2, 3],
[4, 5, 6],
[7, 8, 9]]
y = fftn(np.array(x, np.float16))
assert_equal(y.dtype, np.complex64)
y_r = np.array(fftn(x), np.complex64)
assert_array_almost_equal_nulp(y, y_r)
@pytest.mark.parametrize('size', SMALL_COMPOSITE_SIZES + SMALL_PRIME_SIZES)
def test_float16_input_small(self, size):
x = np.random.rand(size, size) + 1j*np.random.rand(size, size)
y1 = fftn(x.real.astype(np.float16))
y2 = fftn(x.real.astype(np.float64)).astype(np.complex64)
assert_equal(y1.dtype, np.complex64)
assert_array_almost_equal_nulp(y1, y2, 5e5)
@pytest.mark.parametrize('size', LARGE_COMPOSITE_SIZES + LARGE_PRIME_SIZES)
def test_float16_input_large(self, size):
x = np.random.rand(size, 3) + 1j*np.random.rand(size, 3)
y1 = fftn(x.real.astype(np.float16))
y2 = fftn(x.real.astype(np.float64)).astype(np.complex64)
assert_equal(y1.dtype, np.complex64)
assert_array_almost_equal_nulp(y1, y2, 2e6)
class TestFftn(object):
def setup_method(self):
np.random.seed(1234)
def test_definition(self):
x = [[1, 2, 3],
[4, 5, 6],
[7, 8, 9]]
y = fftn(x)
assert_array_almost_equal(y, direct_dftn(x))
x = random((20, 26))
assert_array_almost_equal(fftn(x), direct_dftn(x))
x = random((5, 4, 3, 20))
assert_array_almost_equal(fftn(x), direct_dftn(x))
def test_axes_argument(self):
# plane == ji_plane, x== kji_space
plane1 = [[1, 2, 3],
[4, 5, 6],
[7, 8, 9]]
plane2 = [[10, 11, 12],
[13, 14, 15],
[16, 17, 18]]
plane3 = [[19, 20, 21],
[22, 23, 24],
[25, 26, 27]]
ki_plane1 = [[1, 2, 3],
[10, 11, 12],
[19, 20, 21]]
ki_plane2 = [[4, 5, 6],
[13, 14, 15],
[22, 23, 24]]
ki_plane3 = [[7, 8, 9],
[16, 17, 18],
[25, 26, 27]]
jk_plane1 = [[1, 10, 19],
[4, 13, 22],
[7, 16, 25]]
jk_plane2 = [[2, 11, 20],
[5, 14, 23],
[8, 17, 26]]
jk_plane3 = [[3, 12, 21],
[6, 15, 24],
[9, 18, 27]]
kj_plane1 = [[1, 4, 7],
[10, 13, 16], [19, 22, 25]]
kj_plane2 = [[2, 5, 8],
[11, 14, 17], [20, 23, 26]]
kj_plane3 = [[3, 6, 9],
[12, 15, 18], [21, 24, 27]]
ij_plane1 = [[1, 4, 7],
[2, 5, 8],
[3, 6, 9]]
ij_plane2 = [[10, 13, 16],
[11, 14, 17],
[12, 15, 18]]
ij_plane3 = [[19, 22, 25],
[20, 23, 26],
[21, 24, 27]]
ik_plane1 = [[1, 10, 19],
[2, 11, 20],
[3, 12, 21]]
ik_plane2 = [[4, 13, 22],
[5, 14, 23],
[6, 15, 24]]
ik_plane3 = [[7, 16, 25],
[8, 17, 26],
[9, 18, 27]]
ijk_space = [jk_plane1, jk_plane2, jk_plane3]
ikj_space = [kj_plane1, kj_plane2, kj_plane3]
jik_space = [ik_plane1, ik_plane2, ik_plane3]
jki_space = [ki_plane1, ki_plane2, ki_plane3]
kij_space = [ij_plane1, ij_plane2, ij_plane3]
x = array([plane1, plane2, plane3])
assert_array_almost_equal(fftn(x),
fftn(x, axes=(-3, -2, -1))) # kji_space
assert_array_almost_equal(fftn(x), fftn(x, axes=(0, 1, 2)))
assert_array_almost_equal(fftn(x, axes=(0, 2)), fftn(x, axes=(0, -1)))
y = fftn(x, axes=(2, 1, 0)) # ijk_space
assert_array_almost_equal(swapaxes(y, -1, -3), fftn(ijk_space))
y = fftn(x, axes=(2, 0, 1)) # ikj_space
assert_array_almost_equal(swapaxes(swapaxes(y, -1, -3), -1, -2),
fftn(ikj_space))
y = fftn(x, axes=(1, 2, 0)) # jik_space
assert_array_almost_equal(swapaxes(swapaxes(y, -1, -3), -3, -2),
fftn(jik_space))
y = fftn(x, axes=(1, 0, 2)) # jki_space
assert_array_almost_equal(swapaxes(y, -2, -3), fftn(jki_space))
y = fftn(x, axes=(0, 2, 1)) # kij_space
assert_array_almost_equal(swapaxes(y, -2, -1), fftn(kij_space))
y = fftn(x, axes=(-2, -1)) # ji_plane
assert_array_almost_equal(fftn(plane1), y[0])
assert_array_almost_equal(fftn(plane2), y[1])
assert_array_almost_equal(fftn(plane3), y[2])
y = fftn(x, axes=(1, 2)) # ji_plane
assert_array_almost_equal(fftn(plane1), y[0])
assert_array_almost_equal(fftn(plane2), y[1])
assert_array_almost_equal(fftn(plane3), y[2])
y = fftn(x, axes=(-3, -2)) # kj_plane
assert_array_almost_equal(fftn(x[:, :, 0]), y[:, :, 0])
assert_array_almost_equal(fftn(x[:, :, 1]), y[:, :, 1])
assert_array_almost_equal(fftn(x[:, :, 2]), y[:, :, 2])
y = fftn(x, axes=(-3, -1)) # ki_plane
assert_array_almost_equal(fftn(x[:, 0, :]), y[:, 0, :])
assert_array_almost_equal(fftn(x[:, 1, :]), y[:, 1, :])
assert_array_almost_equal(fftn(x[:, 2, :]), y[:, 2, :])
y = fftn(x, axes=(-1, -2)) # ij_plane
assert_array_almost_equal(fftn(ij_plane1), swapaxes(y[0], -2, -1))
assert_array_almost_equal(fftn(ij_plane2), swapaxes(y[1], -2, -1))
assert_array_almost_equal(fftn(ij_plane3), swapaxes(y[2], -2, -1))
y = fftn(x, axes=(-1, -3)) # ik_plane
assert_array_almost_equal(fftn(ik_plane1),
swapaxes(y[:, 0, :], -1, -2))
assert_array_almost_equal(fftn(ik_plane2),
swapaxes(y[:, 1, :], -1, -2))
assert_array_almost_equal(fftn(ik_plane3),
swapaxes(y[:, 2, :], -1, -2))
y = fftn(x, axes=(-2, -3)) # jk_plane
assert_array_almost_equal(fftn(jk_plane1),
swapaxes(y[:, :, 0], -1, -2))
assert_array_almost_equal(fftn(jk_plane2),
swapaxes(y[:, :, 1], -1, -2))
assert_array_almost_equal(fftn(jk_plane3),
swapaxes(y[:, :, 2], -1, -2))
y = fftn(x, axes=(-1,)) # i_line
for i in range(3):
for j in range(3):
assert_array_almost_equal(fft(x[i, j, :]), y[i, j, :])
y = fftn(x, axes=(-2,)) # j_line
for i in range(3):
for j in range(3):
assert_array_almost_equal(fft(x[i, :, j]), y[i, :, j])
y = fftn(x, axes=(0,)) # k_line
for i in range(3):
for j in range(3):
assert_array_almost_equal(fft(x[:, i, j]), y[:, i, j])
y = fftn(x, axes=()) # point
assert_array_almost_equal(y, x)
def test_shape_argument(self):
small_x = [[1, 2, 3],
[4, 5, 6]]
large_x1 = [[1, 2, 3, 0],
[4, 5, 6, 0],
[0, 0, 0, 0],
[0, 0, 0, 0]]
y = fftn(small_x, s=(4, 4))
assert_array_almost_equal(y, fftn(large_x1))
y = fftn(small_x, s=(3, 4))
assert_array_almost_equal(y, fftn(large_x1[:-1]))
def test_shape_axes_argument(self):
small_x = [[1, 2, 3],
[4, 5, 6],
[7, 8, 9]]
large_x1 = array([[1, 2, 3, 0],
[4, 5, 6, 0],
[7, 8, 9, 0],
[0, 0, 0, 0]])
y = fftn(small_x, s=(4, 4), axes=(-2, -1))
assert_array_almost_equal(y, fftn(large_x1))
y = fftn(small_x, s=(4, 4), axes=(-1, -2))
assert_array_almost_equal(y, swapaxes(
fftn(swapaxes(large_x1, -1, -2)), -1, -2))
def test_shape_axes_argument2(self):
# Change shape of the last axis
x = numpy.random.random((10, 5, 3, 7))
y = fftn(x, axes=(-1,), s=(8,))
assert_array_almost_equal(y, fft(x, axis=-1, n=8))
# Change shape of an arbitrary axis which is not the last one
x = numpy.random.random((10, 5, 3, 7))
y = fftn(x, axes=(-2,), s=(8,))
assert_array_almost_equal(y, fft(x, axis=-2, n=8))
# Change shape of axes: cf #244, where shape and axes were mixed up
x = numpy.random.random((4, 4, 2))
y = fftn(x, axes=(-3, -2), s=(8, 8))
assert_array_almost_equal(y,
numpy.fft.fftn(x, axes=(-3, -2), s=(8, 8)))
def test_shape_argument_more(self):
x = zeros((4, 4, 2))
with assert_raises(ValueError,
match="shape requires more axes than are present"):
fftn(x, s=(8, 8, 2, 1))
def test_invalid_sizes(self):
with assert_raises(ValueError,
match="invalid number of data points"
r" \(\[1, 0\]\) specified"):
fftn([[]])
with assert_raises(ValueError,
match="invalid number of data points"
r" \(\[4, -3\]\) specified"):
fftn([[1, 1], [2, 2]], (4, -3))
def test_no_axes(self):
x = numpy.random.random((2,2,2))
assert_allclose(fftn(x, axes=[]), x, atol=1e-7)
class TestIfftn(object):
dtype = None
cdtype = None
def setup_method(self):
np.random.seed(1234)
@pytest.mark.parametrize('dtype,cdtype,maxnlp',
[(np.float64, np.complex128, 2000),
(np.float32, np.complex64, 3500)])
def test_definition(self, dtype, cdtype, maxnlp):
x = np.array([[1, 2, 3],
[4, 5, 6],
[7, 8, 9]], dtype=dtype)
y = ifftn(x)
assert_equal(y.dtype, cdtype)
assert_array_almost_equal_nulp(y, direct_idftn(x), maxnlp)
x = random((20, 26))
assert_array_almost_equal_nulp(ifftn(x), direct_idftn(x), maxnlp)
x = random((5, 4, 3, 20))
assert_array_almost_equal_nulp(ifftn(x), direct_idftn(x), maxnlp)
@pytest.mark.parametrize('maxnlp', [2000, 3500])
@pytest.mark.parametrize('size', [1, 2, 51, 32, 64, 92])
def test_random_complex(self, maxnlp, size):
x = random([size, size]) + 1j*random([size, size])
assert_array_almost_equal_nulp(ifftn(fftn(x)), x, maxnlp)
assert_array_almost_equal_nulp(fftn(ifftn(x)), x, maxnlp)
def test_invalid_sizes(self):
with assert_raises(ValueError,
match="invalid number of data points"
r" \(\[1, 0\]\) specified"):
ifftn([[]])
with assert_raises(ValueError,
match="invalid number of data points"
r" \(\[4, -3\]\) specified"):
ifftn([[1, 1], [2, 2]], (4, -3))
def test_no_axes(self):
x = numpy.random.random((2,2,2))
assert_allclose(ifftn(x, axes=[]), x, atol=1e-7)
class TestRfftn(object):
dtype = None
cdtype = None
def setup_method(self):
np.random.seed(1234)
@pytest.mark.parametrize('dtype,cdtype,maxnlp',
[(np.float64, np.complex128, 2000),
(np.float32, np.complex64, 3500)])
def test_definition(self, dtype, cdtype, maxnlp):
x = np.array([[1, 2, 3],
[4, 5, 6],
[7, 8, 9]], dtype=dtype)
y = rfftn(x)
assert_equal(y.dtype, cdtype)
assert_array_almost_equal_nulp(y, direct_rdftn(x), maxnlp)
x = random((20, 26))
assert_array_almost_equal_nulp(rfftn(x), direct_rdftn(x), maxnlp)
x = random((5, 4, 3, 20))
assert_array_almost_equal_nulp(rfftn(x), direct_rdftn(x), maxnlp)
@pytest.mark.parametrize('size', [1, 2, 51, 32, 64, 92])
def test_random(self, size):
x = random([size, size])
assert_allclose(irfftn(rfftn(x), x.shape), x, atol=1e-10)
@pytest.mark.parametrize('func', [rfftn, irfftn])
def test_invalid_sizes(self, func):
with assert_raises(ValueError,
match="invalid number of data points"
r" \(\[1, 0\]\) specified"):
func([[]])
with assert_raises(ValueError,
match="invalid number of data points"
r" \(\[4, -3\]\) specified"):
func([[1, 1], [2, 2]], (4, -3))
@pytest.mark.parametrize('func', [rfftn, irfftn])
def test_no_axes(self, func):
with assert_raises(ValueError,
match="at least 1 axis must be transformed"):
func([], axes=[])
def test_complex_input(self):
with assert_raises(TypeError, match="x must be a real sequence"):
rfftn(np.zeros(10, dtype=np.complex64))
class FakeArray(object):
def __init__(self, data):
self._data = data
self.__array_interface__ = data.__array_interface__
class FakeArray2(object):
def __init__(self, data):
self._data = data
def __array__(self):
return self._data
# TODO: Is this test actually valuable? The behavior it's testing shouldn't be
# relied upon by users except for overwrite_x = False
class TestOverwrite(object):
"""Check input overwrite behavior of the FFT functions."""
real_dtypes = [np.float32, np.float64, np.longfloat]
dtypes = real_dtypes + [np.complex64, np.complex128, np.longcomplex]
fftsizes = [8, 16, 32]
def _check(self, x, routine, fftsize, axis, overwrite_x, should_overwrite):
x2 = x.copy()
for fake in [lambda x: x, FakeArray, FakeArray2]:
routine(fake(x2), fftsize, axis, overwrite_x=overwrite_x)
sig = "%s(%s%r, %r, axis=%r, overwrite_x=%r)" % (
routine.__name__, x.dtype, x.shape, fftsize, axis, overwrite_x)
if not should_overwrite:
assert_equal(x2, x, err_msg="spurious overwrite in %s" % sig)
def _check_1d(self, routine, dtype, shape, axis, overwritable_dtypes,
fftsize, overwrite_x):
np.random.seed(1234)
if np.issubdtype(dtype, np.complexfloating):
data = np.random.randn(*shape) + 1j*np.random.randn(*shape)
else:
data = np.random.randn(*shape)
data = data.astype(dtype)
should_overwrite = (overwrite_x
and dtype in overwritable_dtypes
and fftsize <= shape[axis])
self._check(data, routine, fftsize, axis,
overwrite_x=overwrite_x,
should_overwrite=should_overwrite)
@pytest.mark.parametrize('dtype', dtypes)
@pytest.mark.parametrize('fftsize', fftsizes)
@pytest.mark.parametrize('overwrite_x', [True, False])
@pytest.mark.parametrize('shape,axes', [((16,), -1),
((16, 2), 0),
((2, 16), 1)])
def test_fft_ifft(self, dtype, fftsize, overwrite_x, shape, axes):
overwritable = (np.longcomplex, np.complex128, np.complex64)
self._check_1d(fft, dtype, shape, axes, overwritable,
fftsize, overwrite_x)
self._check_1d(ifft, dtype, shape, axes, overwritable,
fftsize, overwrite_x)
@pytest.mark.parametrize('dtype', real_dtypes)
@pytest.mark.parametrize('fftsize', fftsizes)
@pytest.mark.parametrize('overwrite_x', [True, False])
@pytest.mark.parametrize('shape,axes', [((16,), -1),
((16, 2), 0),
((2, 16), 1)])
def test_rfft_irfft(self, dtype, fftsize, overwrite_x, shape, axes):
overwritable = self.real_dtypes
self._check_1d(irfft, dtype, shape, axes, overwritable,
fftsize, overwrite_x)
self._check_1d(rfft, dtype, shape, axes, overwritable,
fftsize, overwrite_x)
def _check_nd_one(self, routine, dtype, shape, axes, overwritable_dtypes,
overwrite_x):
np.random.seed(1234)
if np.issubdtype(dtype, np.complexfloating):
data = np.random.randn(*shape) + 1j*np.random.randn(*shape)
else:
data = np.random.randn(*shape)
data = data.astype(dtype)
def fftshape_iter(shp):
if len(shp) <= 0:
yield ()
else:
for j in (shp[0]//2, shp[0], shp[0]*2):
for rest in fftshape_iter(shp[1:]):
yield (j,) + rest
def part_shape(shape, axes):
if axes is None:
return shape
else:
return tuple(np.take(shape, axes))
def should_overwrite(data, shape, axes):
s = part_shape(data.shape, axes)
return (overwrite_x and
np.prod(shape) <= np.prod(s)
and dtype in overwritable_dtypes)
for fftshape in fftshape_iter(part_shape(shape, axes)):
self._check(data, routine, fftshape, axes,
overwrite_x=overwrite_x,
should_overwrite=should_overwrite(data, fftshape, axes))
if data.ndim > 1:
# check fortran order
self._check(data.T, routine, fftshape, axes,
overwrite_x=overwrite_x,
should_overwrite=should_overwrite(
data.T, fftshape, axes))
@pytest.mark.parametrize('dtype', dtypes)
@pytest.mark.parametrize('overwrite_x', [True, False])
@pytest.mark.parametrize('shape,axes', [((16,), None),
((16,), (0,)),
((16, 2), (0,)),
((2, 16), (1,)),
((8, 16), None),
((8, 16), (0, 1)),
((8, 16, 2), (0, 1)),
((8, 16, 2), (1, 2)),
((8, 16, 2), (0,)),
((8, 16, 2), (1,)),
((8, 16, 2), (2,)),
((8, 16, 2), None),
((8, 16, 2), (0, 1, 2))])
def test_fftn_ifftn(self, dtype, overwrite_x, shape, axes):
overwritable = (np.longcomplex, np.complex128, np.complex64)
self._check_nd_one(fftn, dtype, shape, axes, overwritable,
overwrite_x)
self._check_nd_one(ifftn, dtype, shape, axes, overwritable,
overwrite_x)
@pytest.mark.parametrize('func', [fft, ifft, fftn, ifftn,
rfft, irfft, rfftn, irfftn])
def test_invalid_norm(func):
x = np.arange(10, dtype=float)
with assert_raises(ValueError,
match='Invalid norm value o, should be None or "ortho"'):
func(x, norm='o')
| bsd-3-clause | 5,562,970,967,128,965,000 | 33.996012 | 80 | 0.507222 | false |
Ogaday/sapi-python-client | tests/test_functional_buckets.py | 1 | 3014 | import csv
import os
import tempfile
import unittest
import warnings
from requests import exceptions
from kbcstorage.buckets import Buckets
from kbcstorage.tables import Tables
class TestFunctionalBuckets(unittest.TestCase):
def setUp(self):
self.buckets = Buckets(os.getenv('KBC_TEST_API_URL'),
os.getenv('KBC_TEST_TOKEN'))
try:
self.buckets.delete('in.c-py-test', force=True)
except exceptions.HTTPError as e:
if e.response.status_code != 404:
raise
# https://github.com/boto/boto3/issues/454
warnings.simplefilter("ignore", ResourceWarning)
def tearDown(self):
try:
self.buckets.delete('in.c-py-test', force=True)
except exceptions.HTTPError as e:
if e.response.status_code != 404:
raise
def test_create_bucket(self):
bucket_id = self.buckets.create(name='py-test',
stage='in',
description='Test bucket')['id']
self.assertEqual(bucket_id, self.buckets.detail(bucket_id)['id'])
def test_list_tables(self):
bucket_id = self.buckets.create(name='py-test',
stage='in',
description='Test bucket')['id']
file, path = tempfile.mkstemp(prefix='sapi-test')
with open(path, 'w') as csv_file:
writer = csv.DictWriter(csv_file, fieldnames=['col1', 'col2'],
lineterminator='\n', delimiter=',',
quotechar='"')
writer.writeheader()
writer.writerow({'col1': 'ping', 'col2': 'pong'})
os.close(file)
tables = Tables(os.getenv('KBC_TEST_API_URL'),
os.getenv('KBC_TEST_TOKEN'))
tables.create(name='some-table', file_path=path,
bucket_id='in.c-py-test')
tables = self.buckets.list_tables(bucket_id)
self.assertEqual(1, len(tables))
self.assertEqual('in.c-py-test.some-table', tables[0]['id'])
def test_bucket_detail(self):
bucket_id = self.buckets.create(name='py-test',
stage='in',
description='Test bucket')['id']
detail = self.buckets.detail(bucket_id)
self.assertEqual(bucket_id, detail['id'])
self.assertEqual('c-py-test', detail['name'])
self.assertIsNotNone(detail['uri'])
self.assertIsNotNone(detail['created'])
self.assertEqual('Test bucket', detail['description'])
self.assertEqual([], detail['tables'])
self.assertEqual([], detail['attributes'])
def test_invalid_bucket(self):
try:
self.buckets.detail('some-totally-non-existent-bucket')
except exceptions.HTTPError as e:
if e.response.status_code != 404:
raise
| mit | 5,802,878,552,715,287,000 | 39.186667 | 74 | 0.544791 | false |
markomanninen/tagtor | tagtor/main.py | 1 | 3012 | #!/usr/local/bin/python
# -*- coding: utf-8 -*-
# file: main.py
from copy import deepcopy
class TAG(object):
""" Simple html tag generator """
def __init__(self, *args, **kw):
self._name = self.__class__.__name__.lower()
self._attributes = dict([k.lower(), str(w)] for k, w in kw.iteritems())
self._in = []
self._left = []
self._right = []
map(self.__lshift__, args)
def getName(self):
return self._name
def setName(self, name):
self._name = name
return self
def getAttribute(self, key):
return self._attributes[key] if self._attributes.has_key(key) else None
def setAttribute(self, key, value):
self._attributes[key] = value
return self
def rcontent(self, item):
return self.__rshift__(item)
def __rshift__(self, item):
self._in = [item] + self._in
return self
def content(self, item):
return self.__lshift__(item)
def __lshift__(self, item):
self._in.append(item)
return self
def prepend(self, item):
return self.__radd__(item)
def __radd__(self, item):
self._left.append(item)
return self
def append(self, item):
return self.__add__(item)
def __add__(self, item):
self._right.append(item)
return self
def renderAttributes(self):
attr = ''
if self._attributes:
attr = ''.join([' %s="%s"' % (k, v) for k, v in self._attributes.iteritems()])
return attr
def _repr_html_(self):
return self.__str__()
def __str__(self):
left = ''
right = ''
element = ''
if self._in:
in_elements = ''.join([str(item() if callable(item) else item) for item in self._in])
element = '<%s%s>%s</%s>' % (self._name, self.renderAttributes(), in_elements, self._name)
else:
element = '<%s%s/>' % (self._name, self.renderAttributes())
if self._left:
left = ''.join(map(lambda item: str(item() if callable(item) else item), self._left))
if self._right:
right = ''.join(map(lambda item: str(item() if callable(item) else item), self._right))
return left + element + right
class htmlHelper(object):
""" Tag generation factory """
def __getattr__(self, tag):
""" Only create tag object, if it hasn't been created before. """
if not self.__dict__.has_key(tag):
self.__dict__[tag] = type(tag, (TAG,), {})
# Don't return reference to the object, but "deeply" new object.
return deepcopy(self.__dict__[tag])
"""
All tag elements are accessible via readily constructed factory variable. This helper
should be imported from the module in this wise: ´from tagtor import helper´
OR ´from tagtor import helper as h´ if shorter variable name is preferred
"""
helper = htmlHelper() | mit | -2,851,480,116,332,622,300 | 30.673684 | 102 | 0.548537 | false |
sixninetynine/pex | pex/resolver_options.py | 1 | 7285 | # Copyright 2014 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import print_function
from pkg_resources import safe_name
from .crawler import Crawler
from .fetcher import Fetcher, PyPIFetcher
from .http import Context
from .installer import EggInstaller, WheelInstaller
from .iterator import Iterator
from .package import EggPackage, SourcePackage, WheelPackage
from .sorter import Sorter
from .translator import ChainedTranslator, EggTranslator, SourceTranslator, WheelTranslator
class ResolverOptionsInterface(object):
def get_context(self):
raise NotImplemented
def get_crawler(self):
raise NotImplemented
def get_sorter(self):
raise NotImplemented
def get_translator(self, interpreter, platform):
raise NotImplemented
def get_iterator(self):
raise NotImplemented
class ResolverOptionsBuilder(object):
"""A helper that processes options into a ResolverOptions object.
Used by command-line and requirements.txt processors to configure a resolver.
"""
def __init__(self,
fetchers=None,
allow_all_external=False,
allow_external=None,
allow_unverified=None,
allow_prereleases=None,
precedence=None,
context=None):
self._fetchers = fetchers if fetchers is not None else [PyPIFetcher()]
self._allow_all_external = allow_all_external
self._allow_external = allow_external if allow_external is not None else set()
self._allow_unverified = allow_unverified if allow_unverified is not None else set()
self._allow_prereleases = allow_prereleases
self._precedence = precedence if precedence is not None else Sorter.DEFAULT_PACKAGE_PRECEDENCE
self._context = context or Context.get()
def clone(self):
return ResolverOptionsBuilder(
fetchers=self._fetchers[:],
allow_all_external=self._allow_all_external,
allow_external=self._allow_external.copy(),
allow_unverified=self._allow_unverified.copy(),
allow_prereleases=self._allow_prereleases,
precedence=self._precedence[:],
context=self._context,
)
def add_index(self, index):
fetcher = PyPIFetcher(index)
if fetcher not in self._fetchers:
self._fetchers.append(fetcher)
return self
def set_index(self, index):
self._fetchers = [PyPIFetcher(index)]
return self
def add_repository(self, repo):
fetcher = Fetcher([repo])
if fetcher not in self._fetchers:
self._fetchers.append(fetcher)
return self
def clear_indices(self):
self._fetchers = [fetcher for fetcher in self._fetchers if not isinstance(fetcher, PyPIFetcher)]
return self
def allow_all_external(self):
self._allow_all_external = True
return self
def allow_external(self, key):
self._allow_external.add(safe_name(key).lower())
return self
def allow_unverified(self, key):
self._allow_unverified.add(safe_name(key).lower())
return self
def use_wheel(self):
if WheelPackage not in self._precedence:
self._precedence = (WheelPackage,) + self._precedence
return self
def no_use_wheel(self):
self._precedence = tuple(
[precedent for precedent in self._precedence if precedent is not WheelPackage])
return self
def allow_builds(self):
if SourcePackage not in self._precedence:
self._precedence = self._precedence + (SourcePackage,)
return self
def no_allow_builds(self):
self._precedence = tuple(
[precedent for precedent in self._precedence if precedent is not SourcePackage])
return self
# TODO: Make this whole interface more Pythonic.
#
# This method would be better defined as a property allow_prereleases.
# Unfortunately, the existing method below already usurps the name allow_prereleases.
# It is an existing API that returns self as if it was written in an attempt to allow
# Java style chaining of method calls.
# Due to that return type, it cannot be used as a Python property setter.
# It's currently used in this manner:
#
# builder.allow_prereleases(True)
#
# and we cannot change it into @allow_prereleases.setter and use in this manner:
#
# builder.allow_prereleases = True
#
# without affecting the existing API calls.
#
# The code review shows that, for this particular method (allow_prereleases),
# the return value (self) is never used in the current API calls.
# It would be worth examining if the API change for this and some other methods here
# would be a good idea.
@property
def prereleases_allowed(self):
return self._allow_prereleases
def allow_prereleases(self, allowed):
self._allow_prereleases = allowed
return self
def build(self, key):
return ResolverOptions(
fetchers=self._fetchers,
allow_external=self._allow_all_external or key in self._allow_external,
allow_unverified=key in self._allow_unverified,
allow_prereleases=self._allow_prereleases,
precedence=self._precedence,
context=self._context,
)
class ResolverOptions(ResolverOptionsInterface):
def __init__(self,
fetchers=None,
allow_external=False,
allow_unverified=False,
allow_prereleases=None,
precedence=None,
context=None):
self._fetchers = fetchers if fetchers is not None else [PyPIFetcher()]
self._allow_external = allow_external
self._allow_unverified = allow_unverified
self._allow_prereleases = allow_prereleases
self._precedence = precedence if precedence is not None else Sorter.DEFAULT_PACKAGE_PRECEDENCE
self._context = context or Context.get()
# TODO(wickman) Revisit with Github #58
def get_context(self):
return self._context
def get_crawler(self):
return Crawler(self.get_context())
# get_sorter and get_translator are arguably options that should be global
# except that --no-use-wheel fucks this shit up. hm.
def get_sorter(self):
return Sorter(self._precedence)
def get_translator(self, interpreter, platform):
translators = []
# TODO(wickman) This is not ideal -- consider an explicit link between a Package
# and its Installer type rather than mapping this here, precluding the ability to
# easily add new package types (or we just forego that forever.)
for package in self._precedence:
if package is WheelPackage:
translators.append(WheelTranslator(interpreter=interpreter, platform=platform))
elif package is EggPackage:
translators.append(EggTranslator(interpreter=interpreter, platform=platform))
elif package is SourcePackage:
installer_impl = WheelInstaller if WheelPackage in self._precedence else EggInstaller
translators.append(SourceTranslator(
installer_impl=installer_impl,
interpreter=interpreter,
platform=platform))
return ChainedTranslator(*translators)
def get_iterator(self):
return Iterator(
fetchers=self._fetchers,
crawler=self.get_crawler(),
follow_links=self._allow_external,
allow_prereleases=self._allow_prereleases
)
| apache-2.0 | 5,057,392,927,551,107,000 | 33.201878 | 100 | 0.699657 | false |
XBMC-Addons/service.xbmc.versioncheck | resources/lib/version_check/viewer.py | 1 | 5158 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Copyright (C) 2011-2013 Martijn Kaijser
Copyright (C) 2013-2014 Team-XBMC
Copyright (C) 2014-2019 Team Kodi
This file is part of service.xbmc.versioncheck
SPDX-License-Identifier: GPL-3.0-or-later
See LICENSES/GPL-3.0-or-later.txt for more information.
"""
from contextlib import closing
import os
import sys
import xbmc # pylint: disable=import-error
import xbmcaddon # pylint: disable=import-error
import xbmcgui # pylint: disable=import-error
import xbmcvfs # pylint: disable=import-error
_ADDON = xbmcaddon.Addon('service.xbmc.versioncheck')
_ADDON_NAME = _ADDON.getAddonInfo('name')
if sys.version_info[0] >= 3:
_ADDON_PATH = _ADDON.getAddonInfo('path')
else:
_ADDON_PATH = _ADDON.getAddonInfo('path').decode('utf-8')
_ICON = _ADDON.getAddonInfo('icon')
class Viewer:
""" Show user a text viewer (WINDOW_DIALOG_TEXT_VIEWER)
Include the text file for the viewers body in the resources/ directory
usage:
script_path = os.path.join(_ADDON_PATH, 'resources', 'lib', 'version_check', 'viewer.py')
xbmc.executebuiltin('RunScript(%s,%s,%s)' % (script_path, 'Heading', 'notice.txt'))
:param heading: text viewer heading
:type heading: str
:param filename: filename to use for text viewers body
:type filename: str
"""
WINDOW = 10147
CONTROL_LABEL = 1
CONTROL_TEXTBOX = 5
def __init__(self, heading, filename):
self.heading = heading
self.filename = filename
# activate the text viewer window
xbmc.executebuiltin('ActivateWindow(%d)' % (self.WINDOW,))
# get window
self.window = xbmcgui.Window(self.WINDOW)
# give window time to initialize
xbmc.sleep(100)
# set controls
self.set_controls()
def set_controls(self):
""" Set the window controls
"""
# get text viewer body text
text = self.get_text()
# set heading
self.window.getControl(self.CONTROL_LABEL).setLabel('%s : %s' % (_ADDON_NAME,
self.heading,))
# set text
self.window.getControl(self.CONTROL_TEXTBOX).setText(text)
xbmc.sleep(2000)
def get_text(self):
""" Get the text viewers body text from self.filename
:return: contents of self.filename
:rtype: str
"""
try:
return self.read_file(self.filename)
except Exception as error: # pylint: disable=broad-except
xbmc.log(_ADDON_NAME + ': ' + str(error), xbmc.LOGERROR)
return ''
@staticmethod
def read_file(filename):
""" Read the contents of the provided file, from
os.path.join(_ADDON_PATH, 'resources', filename)
:param filename: name of file to read
:type filename: str
:return: contents of the provided file
:rtype: str
"""
filename = os.path.join(_ADDON_PATH, 'resources', filename)
with closing(xbmcvfs.File(filename)) as open_file:
contents = open_file.read()
return contents
class WebBrowser:
""" Display url using the default browser
usage:
script_path = os.path.join(_ADDON_PATH, 'resources', 'lib', 'version_check', 'viewer.py')
xbmc.executebuiltin('RunScript(%s,%s,%s)' % (script_path, 'webbrowser', 'https://kodi.tv/'))
:param url: url to open
:type url: str
"""
def __init__(self, url):
self.url = url
try:
# notify user
self.notification(_ADDON_NAME, self.url)
xbmc.sleep(100)
# launch url
self.launch_url()
except Exception as error: # pylint: disable=broad-except
xbmc.log(_ADDON_NAME + ': ' + str(error), xbmc.LOGERROR)
@staticmethod
def notification(heading, message, icon=None, time=15000, sound=True):
""" Create a notification
:param heading: notification heading
:type heading: str
:param message: notification message
:type message: str
:param icon: path and filename for the notification icon
:type icon: str
:param time: time to display notification
:type time: int
:param sound: is notification audible
:type sound: bool
"""
if not icon:
icon = _ICON
xbmcgui.Dialog().notification(heading, message, icon, time, sound)
def launch_url(self):
""" Open self.url in the default web browser
"""
import webbrowser # pylint: disable=import-outside-toplevel
webbrowser.open(self.url)
if __name__ == '__main__':
try:
if sys.argv[1] == 'webbrowser':
WebBrowser(sys.argv[2])
else:
Viewer(sys.argv[1], sys.argv[2])
except Exception as err: # pylint: disable=broad-except
xbmc.log(_ADDON_NAME + ': ' + str(err), xbmc.LOGERROR)
| gpl-2.0 | 6,787,627,537,322,967,000 | 30.440252 | 100 | 0.582784 | false |
nigelsmall/py2neo | py2neo/database/cypher.py | 1 | 6889 | #!/usr/bin/env python
# -*- encoding: utf-8 -*-
# Copyright 2011-2016, Nigel Small
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from io import StringIO
from json import dumps as json_dumps
from sys import stdout
from py2neo.compat import ustr
from py2neo.types import Node, Relationship, Path
from py2neo.util import is_collection
class CypherWriter(object):
""" Writer for Cypher data. This can be used to write to any
file-like object, such as standard output.
"""
safe_first_chars = u"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz_"
safe_chars = u"0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz_"
default_sequence_separator = u","
default_key_value_separator = u":"
def __init__(self, file=None, **kwargs):
self.file = file or stdout
self.sequence_separator = kwargs.get("sequence_separator", self.default_sequence_separator)
self.key_value_separator = \
kwargs.get("key_value_separator", self.default_key_value_separator)
def write(self, obj):
""" Write any entity, value or collection.
:arg obj:
"""
if obj is None:
pass
elif isinstance(obj, Node):
self.write_node(obj)
elif isinstance(obj, Relationship):
self.write_relationship(obj)
elif isinstance(obj, Path):
self.write_walkable(obj)
elif isinstance(obj, dict):
self.write_map(obj)
elif is_collection(obj):
self.write_list(obj)
else:
self.write_value(obj)
def write_value(self, value):
""" Write a value.
:arg value:
"""
self.file.write(ustr(json_dumps(value, ensure_ascii=False)))
def write_identifier(self, identifier):
""" Write an identifier.
:arg identifier:
"""
if not identifier:
raise ValueError("Invalid identifier")
identifier = ustr(identifier)
safe = (identifier[0] in self.safe_first_chars and
all(ch in self.safe_chars for ch in identifier[1:]))
if not safe:
self.file.write(u"`")
self.file.write(identifier.replace(u"`", u"``"))
self.file.write(u"`")
else:
self.file.write(identifier)
def write_list(self, collection):
""" Write a list.
:arg collection:
"""
self.file.write(u"[")
link = u""
for value in collection:
self.file.write(link)
self.write(value)
link = self.sequence_separator
self.file.write(u"]")
def write_literal(self, text):
""" Write literal text.
:arg text:
"""
self.file.write(ustr(text))
def write_map(self, mapping, private=False):
""" Write a map.
:arg mapping:
:arg private:
"""
self.file.write(u"{")
link = u""
for key, value in sorted(dict(mapping).items()):
if key.startswith("_") and not private:
continue
self.file.write(link)
self.write_identifier(key)
self.file.write(self.key_value_separator)
self.write(value)
link = self.sequence_separator
self.file.write(u"}")
def write_node(self, node, name=None, full=True):
""" Write a node.
:arg node:
:arg name:
:arg full:
"""
self.file.write(u"(")
if name is None:
name = node.__name__
self.write_identifier(name)
if full:
for label in sorted(node.labels()):
self.write_literal(u":")
self.write_identifier(label)
if node:
self.file.write(u" ")
self.write_map(dict(node))
self.file.write(u")")
def write_relationship(self, relationship, name=None):
""" Write a relationship (including nodes).
:arg relationship:
:arg name:
"""
self.write_node(relationship.start_node(), full=False)
self.file.write(u"-")
self.write_relationship_detail(relationship, name)
self.file.write(u"->")
self.write_node(relationship.end_node(), full=False)
def write_relationship_detail(self, relationship, name=None):
""" Write a relationship (excluding nodes).
:arg relationship:
:arg name:
"""
self.file.write(u"[")
if name is not None:
self.write_identifier(name)
if type:
self.file.write(u":")
self.write_identifier(relationship.type())
if relationship:
self.file.write(u" ")
self.write_map(relationship)
self.file.write(u"]")
def write_subgraph(self, subgraph):
""" Write a subgraph.
:arg subgraph:
"""
self.write_literal("({")
for i, node in enumerate(subgraph.nodes()):
if i > 0:
self.write_literal(", ")
self.write_node(node)
self.write_literal("}, {")
for i, relationship in enumerate(subgraph.relationships()):
if i > 0:
self.write_literal(", ")
self.write_relationship(relationship)
self.write_literal("})")
def write_walkable(self, walkable):
""" Write a walkable.
:arg walkable:
"""
nodes = walkable.nodes()
for i, relationship in enumerate(walkable):
node = nodes[i]
self.write_node(node, full=False)
forward = relationship.start_node() == node
self.file.write(u"-" if forward else u"<-")
self.write_relationship_detail(relationship)
self.file.write(u"->" if forward else u"-")
self.write_node(nodes[-1], full=False)
def cypher_escape(identifier):
""" Escape a Cypher identifier in backticks.
::
>>> cypher_escape("this is a `label`")
'`this is a ``label```'
:arg identifier:
"""
s = StringIO()
writer = CypherWriter(s)
writer.write_identifier(identifier)
return s.getvalue()
def cypher_repr(obj):
""" Generate the Cypher representation of an object.
:arg obj:
"""
s = StringIO()
writer = CypherWriter(s)
writer.write(obj)
return s.getvalue()
| apache-2.0 | 8,277,273,479,301,094,000 | 28.566524 | 99 | 0.575555 | false |
alex/changes | tests/changes/api/test_system_options.py | 1 | 1427 | from changes.config import db
from changes.models import SystemOption
from changes.testutils import APITestCase
class SystemOptionsListTest(APITestCase):
def test_simple(self):
path = '/api/0/systemoptions/'
resp = self.client.get(path)
assert resp.status_code == 200
data = self.unserialize(resp)
assert data['system.message'] == ''
db.session.add(SystemOption(
name='system.message',
value='hello',
))
db.session.commit()
resp = self.client.get(path)
assert resp.status_code == 200
data = self.unserialize(resp)
assert data['system.message'] == 'hello'
class SystemOptionsUpdateTest(APITestCase):
def test_simple(self):
path = '/api/0/systemoptions/'
resp = self.client.post(path, data={
'system.message': 'hello',
})
assert resp.status_code == 401
self.login_default()
resp = self.client.post(path, data={
'system.message': 'hello',
})
assert resp.status_code == 403
self.login_default_admin()
resp = self.client.post(path, data={
'system.message': 'hello',
})
assert resp.status_code == 200
options = dict(db.session.query(
SystemOption.name, SystemOption.value
))
assert options.get('system.message') == 'hello'
| apache-2.0 | -5,700,007,956,722,778,000 | 25.425926 | 55 | 0.58164 | false |
chiffa/Pharmacosensitivity_growth_assays | src/plot_drawings.py | 1 | 5857 | import numpy as np
from matplotlib import pyplot as plt
from chiffatools.linalg_routines import rm_nans
from chiffatools.dataviz import better2D_desisty_plot
import supporting_functions as SF
from scipy import stats
def quick_hist(data):
plt.hist(np.log10(rm_nans(data)), bins=20)
plt.show()
def show_2d_array(data):
plt.imshow(data, interpolation='nearest', cmap='coolwarm')
plt.colorbar()
plt.show()
def correlation_plot(x, y):
plt.plot(x, y, '.k')
plt.show()
better2D_desisty_plot(x, y)
plt.show()
slope, intercept, r_value, p_value, std_err = stats.linregress(x, y)
print "r-squared:", r_value**2
def raw_plot(values, full_values, concentrations, noise_level, color):
m_i = values.shape[0]
m_j = values.shape[2]
ax = plt.subplot(111)
ax.set_xscale('log')
msk = concentrations == 0.0
concentrations[msk] = np.min(concentrations[np.logical_not(msk)])/4
if type(noise_level) == np.float64 or type(noise_level) == float:
errs = np.empty_like(values)
errs.fill(noise_level)
errs = [errs, errs]
if type(noise_level) == np.ndarray:
errs = [noise_level, noise_level]
if type(noise_level) == tuple:
errs = [noise_level[0], noise_level[1]]
for i in range(0, m_i):
for j in range(0, m_j):
# temp_concs = concentrations
temp_concs = concentrations*np.random.uniform(0.95, 1.05, 1)
nan_mask = np.logical_not(np.isnan(full_values[i, :, j]))
plt.errorbar(temp_concs[nan_mask], full_values[i, nan_mask, j],
yerr=[errs[0][i, nan_mask, j], errs[1][i, nan_mask, j]], fmt='.', color=color, alpha=0.25)
plt.errorbar(temp_concs[nan_mask], values[i, nan_mask, j],
yerr=[errs[0][i, nan_mask, j], errs[1][i, nan_mask, j]], fmt='.', color=color)
def summary_plot(means, mean_err, concentrations, anchor, color='black', legend='', nofill=False):
# TODO: inject nan to mark that the control is different from the main sequence.
ax = plt.subplot(111)
ax.set_xscale('log')
nanmask = np.logical_not(np.isnan(means))
if not np.all(np.logical_not(nanmask)):
concentrations[0] = anchor
plt.errorbar(concentrations[nanmask], means[nanmask], yerr=mean_err[nanmask], color=color, label=legend)
ymax = means[nanmask] + mean_err[nanmask]
ymin = means[nanmask] - mean_err[nanmask]
if not nofill:
plt.fill_between(concentrations[nanmask], ymax, ymin, facecolor=color, alpha=0.25)
def vector_summary_plot(means_array, error_array, concentrations_array, anchor, legend_array=None, color='black'):
if legend_array is None:
legend_array = np.zeros_like(means_array[:, 0])
for i in range(0, means_array.shape[0]):
nanmask = np.logical_not(np.isnan(means_array[i, :]))
if not np.all(np.logical_not(nanmask)):
summary_plot(means_array[i, nanmask], error_array[i, nanmask], concentrations_array[i, nanmask], anchor, color, legend_array[i])
def pretty_gradual_plot(data, concentrations, strain_name_map, drug_name, blank_line=200):
def inner_scatter_plot(mean, std, relative, limiter=4):
series = np.zeros(mean.shape)
cell_type = np.zeros(mean.shape)
for i, name in enumerate(names):
series[i, :] = np.arange(i, c.shape[0]*(len(names)+40)+i, len(names)+40)
cell_type[i, :] = i
plt.scatter(series[i, :], mean[i, :], c=cm(i/float(len(names))), s=35, label=name)
plt.errorbar(series.flatten(), mean.flatten(), yerr=std.flatten(), fmt=None, capsize=0)
plt.xticks(np.mean(series, axis=0), c)
plt.legend(bbox_to_anchor=(0., 1.02, 1., .102), loc=3, ncol=len(names)/limiter, mode="expand", borderaxespad=0.,prop={'size':6})
if not relative:
plt.axhline(y=blank_line)
plt.show()
filter = np.all(np.logical_not(np.isnan(data)), axis=(1, 2))
names = [strain_name_map[i] for i in filter.nonzero()[0].tolist()]
c = concentrations[filter, :][0, :]
mean = np.nanmean(data[filter, :, :], axis=-1)
std = np.nanstd(data[filter, :, :], axis=-1)
cm = plt.cm.get_cmap('spectral')
refmean = mean[:, 0].reshape((mean.shape[0], 1))
refstd = std[:, 0].reshape((mean.shape[0], 1))
rel_mean, rel_std = (mean/refmean, np.sqrt(np.power(refstd, 2)+np.power(std, 2))/mean)
inner_scatter_plot(mean, std, False)
inner_scatter_plot(rel_mean, rel_std, True)
mean_mean = np.nanmean(mean, axis=0)
std_mean = np.nanstd(mean, axis=0)
mean_std = np.nanmean(std, axis=0)
total_std = np.sqrt(np.power(std_mean, 2) + np.power(mean_std, 2))
confusables = np.sum(mean - std < blank_line, axis=0) / float(len(names))
rel_mean_mean = np.nanmean(rel_mean, axis=0)
rel_std_mean = np.nanstd(rel_mean, axis=0)
rel_mean_std = np.nanmean(rel_std, axis=0)
rel_total_std = np.sqrt(np.power(rel_std_mean, 2) + np.power(rel_mean_std, 2))
plt.subplot(212)
plt.plot(mean_mean, c=cm(0.00), label='mean of mean')
plt.plot(mean_std, c=cm(.25), label='mean of std')
plt.plot(std_mean, c=cm(.50), label='std of mean')
plt.plot(total_std, c=cm(0.75), label='total std')
# plt.legend(bbox_to_anchor=(0., 1.02, 1., .102), loc=3, mode="expand", borderaxespad=0.,prop={'size':8})
plt.axhline(y=blank_line)
plt.subplot(211)
plt.plot(rel_mean_mean, c=cm(0.00), label='mean of mean')
plt.plot(rel_mean_std, c=cm(.25), label='mean of std')
plt.plot(rel_std_mean, c=cm(.50), label='std of mean')
plt.plot(rel_total_std, c=cm(0.75), label='total std')
plt.plot(confusables, c=cm(0.9), label='confusable with null')
plt.legend(bbox_to_anchor=(0., 1.02, 1., .102), loc=3, mode="expand", borderaxespad=0.,prop={'size':8})
plt.show() | bsd-3-clause | -7,508,849,187,087,418,000 | 38.85034 | 140 | 0.621137 | false |
google-research/google-research | protein_lm/domains.py | 1 | 14044 | # coding=utf-8
# Copyright 2021 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Lint as: python3
"""Specifications for different types of input/output domains."""
import abc
import collections
import gin
import numpy as np
import six
from six.moves import range
from protein_lm import seq_utils
from protein_lm import utils
BOS_TOKEN = '<' # Beginning of sequence token.
EOS_TOKEN = '>' # End of sequence token.
PAD_TOKEN = '_' # End of sequence token.
MASK_TOKEN = '*' # End of sequence token.
SEP_TOKEN = '|' # A special token for separating tokens for serialization.
@gin.configurable
class Vocabulary(object):
"""Basic vocabulary used to represent output tokens for domains."""
def __init__(self,
tokens,
include_bos=False,
include_eos=False,
include_pad=False,
include_mask=False,
bos_token=BOS_TOKEN,
eos_token=EOS_TOKEN,
pad_token=PAD_TOKEN,
mask_token=MASK_TOKEN):
"""A token vocabulary.
Args:
tokens: An list of tokens to put in the vocab. If an int, will be
interpreted as the number of tokens and '0', ..., 'tokens-1' will be
used as tokens.
include_bos: Whether to append `bos_token` to `tokens` that marks the
beginning of a sequence.
include_eos: Whether to append `eos_token` to `tokens` that marks the
end of a sequence.
include_pad: Whether to append `pad_token` to `tokens` to marks past end
of sequence.
include_mask: Whether to append `mask_token` to `tokens` to mark masked
positions.
bos_token: A special token than marks the beginning of sequence.
Ignored if `include_bos == False`.
eos_token: A special token than marks the end of sequence.
Ignored if `include_eos == False`.
pad_token: A special token than marks past the end of sequence.
Ignored if `include_pad == False`.
mask_token: A special token than marks MASKED positions for e.g. BERT.
Ignored if `include_mask == False`.
"""
if not isinstance(tokens, collections.Iterable):
tokens = range(tokens)
tokens = [str(token) for token in tokens]
if include_bos:
tokens.append(bos_token)
if include_eos:
tokens.append(eos_token)
if include_pad:
tokens.append(pad_token)
if include_mask:
tokens.append(mask_token)
if len(set(tokens)) != len(tokens):
raise ValueError('tokens not unique!')
special_tokens = sorted(set(tokens) & set([SEP_TOKEN]))
if special_tokens:
raise ValueError(
f'tokens contains reserved special tokens: {special_tokens}!')
self._tokens = tokens
self._token_ids = list(range(len(self._tokens)))
self._id_to_token = collections.OrderedDict(
zip(self._token_ids, self._tokens))
self._token_to_id = collections.OrderedDict(
zip(self._tokens, self._token_ids))
self._bos_token = bos_token if include_bos else None
self._eos_token = eos_token if include_eos else None
self._mask_token = mask_token if include_mask else None
self._pad_token = pad_token if include_pad else None
def __len__(self):
return len(self._tokens)
@property
def tokens(self):
"""Return the tokens of the vocabulary."""
return list(self._tokens)
@property
def token_ids(self):
"""Return the tokens ids of the vocabulary."""
return list(self._token_ids)
@property
def bos(self):
"""Returns the index of the BOS token or None if unspecified."""
return (None if self._bos_token is None else
self._token_to_id[self._bos_token])
@property
def eos(self):
"""Returns the index of the EOS token or None if unspecified."""
return (None if self._eos_token is None else
self._token_to_id[self._eos_token])
@property
def mask(self):
"""Returns the index of the MASK token or None if unspecified."""
return (None if self._mask_token is None else
self._token_to_id[self._mask_token])
@property
def pad(self):
"""Returns the index of the PAD token or None if unspecified."""
return (None
if self._pad_token is None else self._token_to_id[self._pad_token])
def is_valid(self, value):
"""Tests if a value is a valid token id and returns a bool."""
return value in self._token_ids
def are_valid(self, values):
"""Tests if values are valid token ids and returns an array of bools."""
return np.array([self.is_valid(value) for value in values])
def encode(self, tokens):
"""Maps an iterable of string tokens to a list of integer token ids."""
if six.PY3 and isinstance(tokens, bytes):
# Always use Unicode in Python 3.
tokens = tokens.decode('utf-8')
return [self._token_to_id[token] for token in tokens]
def decode(self, values, stop_at_eos=False, as_str=True):
"""Maps an iterable of integer token ids to string tokens.
Args:
values: An iterable of token ids.
stop_at_eos: Whether to ignore all values after the first EOS token id.
as_str: Whether to return a list of tokens or a concatenated string.
Returns:
A string of tokens or a list of tokens if `as_str == False`.
"""
if stop_at_eos and self.eos is None:
raise ValueError('EOS unspecified!')
tokens = []
for value in values:
value = int(value) # Requires if value is a scalar tensor.
if stop_at_eos and value == self.eos:
break
tokens.append(self._id_to_token[value])
return ''.join(tokens) if as_str else tokens
@gin.configurable
class ProteinVocab(Vocabulary):
"""A `Vocabulary` with amino acid tokens."""
def __init__(self,
include_anomalous_amino_acids=False,
include_align_tokens=False,
**kwargs):
"""Creates an instance of this class.
Args:
include_anomalous_amino_acids: A bool indicating whether to also include
the set of anomalous amino acids (vs. to use the standard ones only).
include_align_tokens: A bool indicating whether to also include the tokens
required to represent (fixed-length) aligned sequences.
**kwargs: Named parameters to be passed to the base class.
"""
tokens = list(seq_utils.AA_TOKENS)
if include_anomalous_amino_acids:
tokens += list(seq_utils.AA_ANOMALOUS_TOKENS)
if include_align_tokens:
tokens += list(seq_utils.AA_ALIGN_TOKENS)
super(ProteinVocab, self).__init__(tokens=tokens, **kwargs)
@six.add_metaclass(abc.ABCMeta)
class Domain(object):
"""Base class of problem domains, which specifies the set of valid objects."""
@property
def mask_fn(self):
"""Returns a masking function or None."""
@abc.abstractmethod
def is_valid(self, sample):
"""Tests if the given sample is valid for this domain."""
def are_valid(self, samples):
"""Tests if the given samples are valid for this domain."""
return np.array([self.is_valid(sample) for sample in samples])
class DiscreteDomain(Domain):
"""Base class for discrete domains: sequences of categorical variables."""
def __init__(self, vocab):
self._vocab = vocab
@property
def vocab_size(self):
return len(self.vocab)
@property
def vocab(self):
return self._vocab
def encode(self, samples, **kwargs):
"""Maps a list of string tokens to a list of lists of integer token ids."""
return [self.vocab.encode(sample, **kwargs) for sample in samples]
def decode(self, samples, **kwargs):
"""Maps list of lists of integer token ids to list of strings."""
return [self.vocab.decode(sample, **kwargs) for sample in samples]
@gin.configurable
class FixedLengthDiscreteDomain(DiscreteDomain):
"""Output is a fixed length discrete sequence."""
def __init__(self, vocab_size=None, length=None, vocab=None):
"""Creates an instance of this class.
Args:
vocab_size: An optional integer for constructing a vocab of this size.
If provided, `vocab` must be `None`.
length: The length of the domain (required).
vocab: The `Vocabulary` of the domain. If provided, `vocab_size` must be
`None`.
Raises:
ValueError: If neither `vocab_size` nor `vocab` is provided.
ValueError: If `length` if not provided.
"""
if length is None:
raise ValueError('length must be provided!')
if not (vocab_size is None) ^ (vocab is None):
raise ValueError('Exactly one of vocab_size of vocab must be specified!')
self._length = length
if vocab is None:
vocab = Vocabulary(vocab_size)
super(FixedLengthDiscreteDomain, self).__init__(vocab)
@property
def length(self):
return self._length
@property
def size(self):
"""The number of structures in the Domain."""
return self.vocab_size**self.length
def is_valid(self, sequence):
return len(sequence) == self.length and self.vocab.are_valid(sequence).all()
def sample_uniformly(self, num_samples, seed=None):
random_state = utils.get_random_state(seed)
return np.int32(
random_state.randint(
size=[num_samples, self.length], low=0, high=self.vocab_size))
def index_to_structure(self, index):
"""Given an integer and target length, encode into structure."""
structure = np.zeros(self.length, dtype=np.int32)
tokens = [int(token, base=len(self.vocab))
for token in np.base_repr(index, base=len(self.vocab))]
structure[-len(tokens):] = tokens
return structure
def structure_to_index(self, structure):
"""Returns the index of a sequence over a vocabulary of size `vocab_size`."""
structure = np.asarray(structure)[::-1]
return np.sum(structure * np.power(len(self.vocab), range(len(structure))))
@gin.configurable
class VariableLengthDiscreteDomain(FixedLengthDiscreteDomain):
"""A domain for variable-length sequences."""
def __init__(self, vocab, length, min_length=0):
"""Creates an instance of this class.
Args:
vocab: An instance of a `Vocabulary` with an EOS token.
length: The maximum sequence length.
min_length: The minimum sequence length.
Raises:
ValueError: If `vocab` does not have an EOS token.
"""
if vocab.eos is None:
raise ValueError('vocab must have an EOS token!')
super(VariableLengthDiscreteDomain, self).__init__(
length=length, vocab=vocab)
self._min_length = min_length
@property
def length(self):
return self._length
@property
def min_length(self):
return self._min_length
def is_valid(self, sequence):
"""Tests if `sequences` are valid for this domain."""
unpadded_seq = seq_utils.unpad_sequences([sequence], self.vocab.eos)[0]
return (len(unpadded_seq) >= self.min_length and
len(unpadded_seq) <= self.length and
self.vocab.are_valid(sequence).all() and
seq_utils.sequences_end_with_value([sequence], self.vocab.eos)[0])
def encode(self, sequences, pad=True):
"""Integer-encodes sequences and optionally pads them."""
encoded = [self.vocab.encode(seq) for seq in sequences]
if pad:
encoded = seq_utils.pad_sequences(encoded, self.length, self.vocab.eos)
return encoded
def decode(self, sequences, stop_at_eos=True, **kwargs):
"""Integer-encodes sequences and optionally pads them."""
return [self.vocab.decode(seq, stop_at_eos=stop_at_eos, **kwargs)
for seq in sequences]
def sample_uniformly(self,
num_samples,
min_seq_len=None,
max_seq_len=None,
pad=True,
seed=None):
"""Samples valid integer-encoded sequences from the domain.
Args:
num_samples: The number of samples.
min_seq_len: The minimum sequence length of samples (inclusive).
max_seq_len: The maximum sequence length of samples (inclusive).
pad: Whether to pad sequences to the maximum length.
seed: Optional seed of the random number generator.
Returns:
A list with `num_samples` samples.
"""
if min_seq_len is None:
min_seq_len = self.min_length
if max_seq_len is None:
max_seq_len = self.length
random_state = utils.get_random_state(seed)
valid_token_ids = np.delete(self.vocab.token_ids, self.vocab.eos)
lengths = random_state.randint(min_seq_len, max_seq_len + 1, num_samples)
seqs = [random_state.choice(valid_token_ids, length)
for length in lengths]
if pad:
seqs = seq_utils.pad_sequences(seqs, self.length, self.vocab.eos)
return seqs
def is_discrete(domain):
"""Returns a bool indicating whether `domain` is discrete."""
return isinstance(domain, DiscreteDomain)
def check_if_discrete(domain):
"""Raises an exception if `domain` is not discrete."""
if not is_discrete(domain):
raise ValueError('Discrete domain expected!')
def is_variable_length(domain):
"""Returns a bool indicating whether `domain` is variable-length."""
return isinstance(domain, VariableLengthDiscreteDomain)
def domain_to_bos(domain):
"""Returns a pre-specified start-sequence symbol or a new symbol (len(vocab)).
If a new symbol is returned, it's not added to the vocabulary (only used as
input at the beginning of sequence).
Args:
domain: The problem's Domain instance.
"""
vocab = domain.vocab
return len(vocab) if vocab.bos is None else vocab.bos
| apache-2.0 | -4,031,687,846,241,752,000 | 33.421569 | 81 | 0.662489 | false |
craft-ai/craft-ai-client-python | tests/test_pandas.py | 1 | 33216 | import unittest
from random import random
from craft_ai.pandas import CRAFTAI_PANDAS_ENABLED
if CRAFTAI_PANDAS_ENABLED:
import copy
import pandas as pd
from numpy.random import randn
import craft_ai.pandas
from .data import pandas_valid_data, valid_data
from .utils import generate_entity_id
from . import settings
AGENT_ID_1_BASE = "test_pandas_1"
AGENT_ID_2_BASE = "test_pandas_2"
GENERATOR_ID_BASE = "test_pandas_generator"
SIMPLE_AGENT_CONFIGURATION = pandas_valid_data.SIMPLE_AGENT_CONFIGURATION
SIMPLE_AGENT_BOOSTING_CONFIGURATION = (
pandas_valid_data.SIMPLE_AGENT_BOOSTING_CONFIGURATION
)
SIMPLE_AGENT_BOOSTING_CONFIGURATION_WITH_GEN_TYPE = (
pandas_valid_data.SIMPLE_AGENT_BOOSTING_CONFIGURATION_WITH_GEN_TYPE
)
SIMPLE_AGENT_DATA = pandas_valid_data.SIMPLE_AGENT_DATA
SIMPLE_AGENT_BOOSTING_DATA = pandas_valid_data.SIMPLE_AGENT_BOOSTING_DATA
SIMPLE_AGENT_BOOSTING_MANY_DATA = pandas_valid_data.SIMPLE_AGENT_BOOSTING_MANY_DATA
SIMPLE_AGENT_MANY_DATA = pandas_valid_data.SIMPLE_AGENT_MANY_DATA
COMPLEX_AGENT_CONFIGURATION = pandas_valid_data.COMPLEX_AGENT_CONFIGURATION
COMPLEX_AGENT_CONFIGURATION_2 = pandas_valid_data.COMPLEX_AGENT_CONFIGURATION_2
COMPLEX_AGENT_DATA = pandas_valid_data.COMPLEX_AGENT_DATA
COMPLEX_AGENT_DATA_2 = pandas_valid_data.COMPLEX_AGENT_DATA_2
DATETIME_AGENT_CONFIGURATION = pandas_valid_data.DATETIME_AGENT_CONFIGURATION
DATETIME_AGENT_DATA = pandas_valid_data.DATETIME_AGENT_DATA
MISSING_AGENT_CONFIGURATION = pandas_valid_data.MISSING_AGENT_CONFIGURATION
MISSING_AGENT_DATA = pandas_valid_data.MISSING_AGENT_DATA
MISSING_AGENT_DATA_DECISION = pandas_valid_data.MISSING_AGENT_DATA_DECISION
INVALID_PYTHON_IDENTIFIER_CONFIGURATION = (
pandas_valid_data.INVALID_PYTHON_IDENTIFIER_CONFIGURATION
)
INVALID_PYTHON_IDENTIFIER_DATA = pandas_valid_data.INVALID_PYTHON_IDENTIFIER_DATA
INVALID_PYTHON_IDENTIFIER_DECISION = (
pandas_valid_data.INVALID_PYTHON_IDENTIFIER_DECISION
)
EMPTY_TREE = pandas_valid_data.EMPTY_TREE
CLIENT = craft_ai.pandas.Client(settings.CRAFT_CFG)
@unittest.skipIf(CRAFTAI_PANDAS_ENABLED is False, "pandas is not enabled")
class TestPandasSimpleAgent(unittest.TestCase):
def setUp(self):
self.agent_id = generate_entity_id(AGENT_ID_1_BASE + "SimpleAgent")
CLIENT.delete_agent(self.agent_id)
CLIENT.create_agent(SIMPLE_AGENT_CONFIGURATION, self.agent_id)
def tearDown(self):
CLIENT.delete_agent(self.agent_id)
def test_add_agent_operations_df_bad_index(self):
df = pd.DataFrame(randn(10, 5), columns=["a", "b", "c", "d", "e"])
self.assertRaises(
craft_ai.pandas.errors.CraftAiBadRequestError,
CLIENT.add_agent_operations,
self.agent_id,
df,
)
def test_add_agent_operations_df(self):
CLIENT.add_agent_operations(self.agent_id, SIMPLE_AGENT_DATA)
agent = CLIENT.get_agent(self.agent_id)
self.assertEqual(
agent["firstTimestamp"],
SIMPLE_AGENT_DATA.first_valid_index().value // 10 ** 9,
)
self.assertEqual(
agent["lastTimestamp"],
SIMPLE_AGENT_DATA.last_valid_index().value // 10 ** 9,
)
def test_add_agent_operations_df_unexpected_property(self):
df = pd.DataFrame(
randn(300, 6),
columns=["a", "b", "c", "d", "e", "f"],
index=pd.date_range("20200101", periods=300, freq="T").tz_localize(
"Europe/Paris"
),
)
self.assertRaises(
craft_ai.pandas.errors.CraftAiBadRequestError,
CLIENT.add_agent_operations,
self.agent_id,
df,
)
@unittest.skipIf(CRAFTAI_PANDAS_ENABLED is False, "pandas is not enabled")
class TestPandasComplexAgent(unittest.TestCase):
def setUp(self):
self.agent_id = generate_entity_id(AGENT_ID_1_BASE + "ComplexAgent")
CLIENT.delete_agent(self.agent_id)
CLIENT.create_agent(COMPLEX_AGENT_CONFIGURATION, self.agent_id)
def tearDown(self):
CLIENT.delete_agent(self.agent_id)
def test_add_agent_operations_df_complex_agent(self):
CLIENT.add_agent_operations(self.agent_id, COMPLEX_AGENT_DATA)
agent = CLIENT.get_agent(self.agent_id)
self.assertEqual(
agent["firstTimestamp"],
COMPLEX_AGENT_DATA.first_valid_index().value // 10 ** 9,
)
self.assertEqual(
agent["lastTimestamp"],
COMPLEX_AGENT_DATA.last_valid_index().value // 10 ** 9,
)
def test_add_agent_operations_df_without_tz(self):
test_df = COMPLEX_AGENT_DATA.drop(columns="tz")
CLIENT.add_agent_operations(self.agent_id, test_df)
agent = CLIENT.get_agent(self.agent_id)
self.assertEqual(
agent["firstTimestamp"],
COMPLEX_AGENT_DATA.first_valid_index().value // 10 ** 9,
)
self.assertEqual(
agent["lastTimestamp"],
COMPLEX_AGENT_DATA.last_valid_index().value // 10 ** 9,
)
@unittest.skipIf(CRAFTAI_PANDAS_ENABLED is False, "pandas is not enabled")
class TestPandasMissingAgent(unittest.TestCase):
def setUp(self):
self.agent_id = generate_entity_id(AGENT_ID_1_BASE + "MissingAgent")
CLIENT.delete_agent(self.agent_id)
CLIENT.create_agent(MISSING_AGENT_CONFIGURATION, self.agent_id)
def tearDown(self):
CLIENT.delete_agent(self.agent_id)
def test_add_agent_operations_df_missing_agent(self):
CLIENT.add_agent_operations(self.agent_id, MISSING_AGENT_DATA)
agent = CLIENT.get_agent(self.agent_id)
self.assertEqual(
agent["firstTimestamp"],
MISSING_AGENT_DATA.first_valid_index().value // 10 ** 9,
)
self.assertEqual(
agent["lastTimestamp"],
MISSING_AGENT_DATA.last_valid_index().value // 10 ** 9,
)
@unittest.skipIf(CRAFTAI_PANDAS_ENABLED is False, "pandas is not enabled")
class TestPandasSimpleAgentWithData(unittest.TestCase):
def setUp(self):
self.agent_id = generate_entity_id(AGENT_ID_1_BASE + "SimpleAgentWData")
CLIENT.delete_agent(self.agent_id)
CLIENT.create_agent(SIMPLE_AGENT_CONFIGURATION, self.agent_id)
CLIENT.add_agent_operations(self.agent_id, SIMPLE_AGENT_DATA)
def tearDown(self):
CLIENT.delete_agent(self.agent_id)
def test_get_agent_operations_df(self):
df = CLIENT.get_agent_operations(self.agent_id)
self.assertEqual(len(df), 300)
self.assertEqual(len(df.dtypes), 5)
self.assertEqual(
df.first_valid_index(),
pd.Timestamp("2020-01-01 00:00:00", tz="Europe/Paris"),
)
self.assertEqual(
df.last_valid_index(),
pd.Timestamp("2020-01-01 04:59:00", tz="Europe/Paris"),
)
def test_get_agent_states_df(self):
df = CLIENT.get_agent_states(self.agent_id)
self.assertEqual(len(df), 180)
self.assertEqual(len(df.dtypes), 5)
self.assertEqual(
df.first_valid_index(),
pd.Timestamp("2020-01-01 00:00:00", tz="Europe/Paris"),
)
self.assertEqual(
df.last_valid_index(),
pd.Timestamp("2020-01-01 04:58:20", tz="Europe/Paris"),
)
def test_tree_visualization(self):
tree1 = CLIENT.get_agent_decision_tree(
self.agent_id, DATETIME_AGENT_DATA.last_valid_index().value // 10 ** 9
)
craft_ai.pandas.utils.create_tree_html(tree1, "", "constant", None, 500)
def test_display_tree_raised_error(self):
tree1 = CLIENT.get_agent_decision_tree(
self.agent_id, DATETIME_AGENT_DATA.last_valid_index().value // 10 ** 9
)
self.assertRaises(
craft_ai.pandas.errors.CraftAiError,
craft_ai.pandas.utils.display_tree,
tree1,
)
@unittest.skipIf(CRAFTAI_PANDAS_ENABLED is False, "pandas is not enabled")
class TestPandasSimpleAgentWithOperations(unittest.TestCase):
def setUp(self):
self.agent_id = generate_entity_id(AGENT_ID_1_BASE + "SimpleAgentWOp")
CLIENT.delete_agent(self.agent_id)
CLIENT.create_agent(valid_data.VALID_CONFIGURATION, self.agent_id)
CLIENT.add_agent_operations(self.agent_id, valid_data.VALID_OPERATIONS_SET)
def tearDown(self):
CLIENT.delete_agent(self.agent_id)
def test_get_decision_tree_with_pdtimestamp(self):
# test if we get the same decision tree
decision_tree = CLIENT.get_agent_decision_tree(
self.agent_id, pd.Timestamp(valid_data.VALID_TIMESTAMP, unit="s", tz="UTC")
)
ground_truth_decision_tree = CLIENT.get_agent_decision_tree(
self.agent_id, valid_data.VALID_TIMESTAMP
)
self.assertIsInstance(decision_tree, dict)
self.assertNotEqual(decision_tree.get("_version"), None)
self.assertNotEqual(decision_tree.get("configuration"), None)
self.assertNotEqual(decision_tree.get("trees"), None)
self.assertEqual(decision_tree, ground_truth_decision_tree)
@unittest.skipIf(CRAFTAI_PANDAS_ENABLED is False, "pandas is not enabled")
class TestPandasComplexAgentWithData(unittest.TestCase):
def setUp(self):
self.agent_id = generate_entity_id(AGENT_ID_1_BASE + "ComplexAgentWData")
CLIENT.delete_agent(self.agent_id)
CLIENT.create_agent(COMPLEX_AGENT_CONFIGURATION, self.agent_id)
CLIENT.add_agent_operations(self.agent_id, COMPLEX_AGENT_DATA)
def tearDown(self):
CLIENT.delete_agent(self.agent_id)
def test_get_agent_operations_df_complex_agent(self):
df = CLIENT.get_agent_operations(self.agent_id)
self.assertEqual(len(df), 10)
self.assertEqual(len(df.dtypes), 3)
self.assertEqual(
df.first_valid_index(),
pd.Timestamp("2020-01-01 00:00:00", tz="Europe/Paris"),
)
self.assertEqual(
df.last_valid_index(),
pd.Timestamp("2020-01-10 00:00:00", tz="Europe/Paris"),
)
def test_decide_from_contexts_df(self):
tree = CLIENT.get_agent_decision_tree(
self.agent_id, COMPLEX_AGENT_DATA.last_valid_index().value // 10 ** 9
)
test_df = COMPLEX_AGENT_DATA
test_df_copy = test_df.copy(deep=True)
df = CLIENT.decide_from_contexts_df(tree, test_df)
self.assertEqual(len(df), 10)
self.assertEqual(len(df.dtypes), 6)
self.assertTrue(test_df.equals(test_df_copy))
self.assertEqual(
df.first_valid_index(),
pd.Timestamp("2020-01-01 00:00:00", tz="Europe/Paris"),
)
self.assertEqual(
df.last_valid_index(),
pd.Timestamp("2020-01-10 00:00:00", tz="Europe/Paris"),
)
# Also works as before, with a plain context
output = CLIENT.decide(tree, {"a": 1, "tz": "+02:00"})
self.assertEqual(output["output"]["b"]["predicted_value"], "Pierre")
def test_decide_from_contexts_df_zero_rows(self):
tree = CLIENT.get_agent_decision_tree(
self.agent_id, COMPLEX_AGENT_DATA.last_valid_index().value // 10 ** 9
)
test_df = COMPLEX_AGENT_DATA.iloc[:0, :]
self.assertRaises(
craft_ai.errors.CraftAiBadRequestError,
CLIENT.decide_from_contexts_df,
tree,
test_df,
)
def test_decide_from_contexts_df_empty_df(self):
tree = CLIENT.get_agent_decision_tree(
self.agent_id, COMPLEX_AGENT_DATA.last_valid_index().value // 10 ** 9
)
self.assertRaises(
craft_ai.errors.CraftAiBadRequestError,
CLIENT.decide_from_contexts_df,
tree,
pd.DataFrame(),
)
@unittest.skipIf(CRAFTAI_PANDAS_ENABLED is False, "pandas is not enabled")
class TestPandasComplexAgent2WithData(unittest.TestCase):
def setUp(self):
self.agent_id = generate_entity_id(AGENT_ID_1_BASE + "ComplexAgent2WData")
CLIENT.delete_agent(self.agent_id)
CLIENT.create_agent(COMPLEX_AGENT_CONFIGURATION_2, self.agent_id)
CLIENT.add_agent_operations(self.agent_id, COMPLEX_AGENT_DATA)
def tearDown(self):
CLIENT.delete_agent(self.agent_id)
def test_decide_from_contexts_df_null_decisions(self):
tree = CLIENT.get_agent_decision_tree(
self.agent_id, COMPLEX_AGENT_DATA.last_valid_index().value // 10 ** 9
)
test_df = pd.DataFrame(
[["Jean-Pierre", "+02:00"], ["Paul"]],
columns=["b", "tz"],
index=pd.date_range("20200201", periods=2, freq="D").tz_localize(
"Europe/Paris"
),
)
test_df_copy = test_df.copy(deep=True)
df = CLIENT.decide_from_contexts_df(tree, test_df)
self.assertEqual(len(df), 2)
self.assertTrue(test_df.equals(test_df_copy))
self.assertTrue(pd.notnull(df["a_predicted_value"][0]))
self.assertTrue(pd.notnull(df["a_predicted_value"][1]))
@unittest.skipIf(CRAFTAI_PANDAS_ENABLED is False, "pandas is not enabled")
class TestPandasComplexAgent3WithData(unittest.TestCase):
def setUp(self):
self.agent_id = generate_entity_id(AGENT_ID_1_BASE + "ComplexAgent3WData")
CLIENT.delete_agent(self.agent_id)
CLIENT.create_agent(COMPLEX_AGENT_CONFIGURATION_2, self.agent_id)
CLIENT.add_agent_operations(self.agent_id, COMPLEX_AGENT_DATA_2)
def test_decide_from_contexts_df_empty_tree(self):
test_df = pd.DataFrame(
[[0, "Jean-Pierre", "+02:00"], [1, "Paul", "+02:00"]],
columns=["a", "b", "tz"],
index=pd.date_range("20200201", periods=2, freq="D").tz_localize(
"Europe/Paris"
),
)
df = CLIENT.decide_from_contexts_df(EMPTY_TREE, test_df)
expected_error_message = (
"Unable to take decision: the decision tree is not "
"based on any context operations."
)
self.assertEqual(len(df), 2)
self.assertEqual(df.columns, ["error"])
self.assertEqual(df["error"][0], expected_error_message)
self.assertEqual(df["error"][1], expected_error_message)
def tearDown(self):
CLIENT.delete_agent(self.agent_id)
def test_decide_from_contexts_df_with_array(self):
tree = CLIENT.get_agent_decision_tree(
self.agent_id, COMPLEX_AGENT_DATA_2.last_valid_index().value // 10 ** 9
)
test_df = pd.DataFrame(
[["Jean-Pierre", "+02:00"], ["Paul"]],
columns=["b", "tz"],
index=pd.date_range("20200201", periods=2, freq="D").tz_localize(
"Europe/Paris"
),
)
test_df_copy = test_df.copy(deep=True)
df = CLIENT.decide_from_contexts_df(tree, test_df)
self.assertEqual(len(df), 2)
self.assertTrue(test_df.equals(test_df_copy))
self.assertTrue(pd.notnull(df["a_predicted_value"][0]))
self.assertTrue(pd.notnull(df["a_predicted_value"][1]))
@unittest.skipIf(CRAFTAI_PANDAS_ENABLED is False, "pandas is not enabled")
class TestPandasMissingAgentWithData(unittest.TestCase):
def setUp(self):
self.agent_id = generate_entity_id(AGENT_ID_1_BASE + "MissingAgentWData")
CLIENT.delete_agent(self.agent_id)
CLIENT.create_agent(MISSING_AGENT_CONFIGURATION, self.agent_id)
CLIENT.add_agent_operations(self.agent_id, MISSING_AGENT_DATA)
def tearDown(self):
CLIENT.delete_agent(self.agent_id)
def test_decide_from_missing_contexts_df(self):
tree = CLIENT.get_agent_decision_tree(
self.agent_id, MISSING_AGENT_DATA.last_valid_index().value // 10 ** 9, "2"
)
df = CLIENT.decide_from_contexts_df(tree, MISSING_AGENT_DATA_DECISION)
self.assertEqual(len(df), 2)
self.assertEqual(
df.first_valid_index(),
pd.Timestamp("2020-01-01 00:00:00", tz="Europe/Paris"),
)
self.assertEqual(
df.last_valid_index(),
pd.Timestamp("2020-01-02 00:00:00", tz="Europe/Paris"),
)
# Also works as before, with a context containing an optional value
output = CLIENT.decide(tree, {"b": {}, "tz": "+02:00"})
self.assertTrue(pd.notnull(output["output"]["a"]["predicted_value"]))
# Also works as before, with a context containing a missing value
output = CLIENT.decide(tree, {"b": None, "tz": "+02:00"})
self.assertTrue(pd.notnull(output["output"]["a"]["predicted_value"]))
@unittest.skipIf(CRAFTAI_PANDAS_ENABLED is False, "pandas is not enabled")
class TestPandasDatetimeAgentWithData(unittest.TestCase):
def setUp(self):
self.agent_id = generate_entity_id(AGENT_ID_1_BASE + "DatetimeAgentWData")
CLIENT.delete_agent(self.agent_id)
CLIENT.create_agent(DATETIME_AGENT_CONFIGURATION, self.agent_id)
CLIENT.add_agent_operations(self.agent_id, DATETIME_AGENT_DATA)
def tearDown(self):
CLIENT.delete_agent(self.agent_id)
def test_datetime_states_df(self):
df = CLIENT.get_agent_states(self.agent_id)
self.assertEqual(len(df), 10)
self.assertEqual(len(df.dtypes), 4)
self.assertEqual(df["myTimeOfDay"].tolist(), [2, 3, 6, 7, 4, 5, 14, 15, 16, 19])
# This test is commented because of the current non-deterministic behavior of craft ai.
# def test_datetime_decide_from_contexts_df(self):
# tree = CLIENT.get_agent_decision_tree(AGENT_ID,
# DATETIME_AGENT_DATA.last_valid_index().value // 10 ** 9)
# test_df = pd.DataFrame(
# [
# [1],
# [3],
# [7]
# ],
# columns=["a"],
# index=pd.date_range("20200101 00:00:00",
# periods=3,
# freq="H").tz_localize("Asia/Shanghai"))
# test_df_copy = test_df.copy(deep=True)
# df = CLIENT.decide_from_contexts_df(tree, test_df)
# self.assertEqual(len(df), 3)
# self.assertEqual(len(df.dtypes), 6)
# self.assertEqual(df["b_predicted_value"].tolist(), ["Pierre", "Paul", "Jacques"])
# self.assertTrue(test_df.equals(test_df_copy))
@unittest.skipIf(CRAFTAI_PANDAS_ENABLED is False, "pandas is not enabled")
class TestPandasAgentWithInvalidIdentifier(unittest.TestCase):
def setUp(self):
self.agent_id = generate_entity_id(AGENT_ID_1_BASE + "InvalidIdentifier")
CLIENT.delete_agent(self.agent_id)
CLIENT.create_agent(INVALID_PYTHON_IDENTIFIER_CONFIGURATION, self.agent_id)
CLIENT.add_agent_operations(self.agent_id, INVALID_PYTHON_IDENTIFIER_DATA)
def tearDown(self):
CLIENT.delete_agent(self.agent_id)
def test_decide_from_python_invalid_identifier(self):
tree = CLIENT.get_agent_decision_tree(
self.agent_id,
INVALID_PYTHON_IDENTIFIER_DATA.last_valid_index().value // 10 ** 9,
"2",
)
test_df = INVALID_PYTHON_IDENTIFIER_DECISION.copy(deep=True)
df = CLIENT.decide_from_contexts_df(tree, test_df)
self.assertEqual(len(df), 3)
self.assertEqual(len(df.dtypes), 8)
@unittest.skipIf(CRAFTAI_PANDAS_ENABLED is False, "pandas is not enabled")
class TestPandasGeneratorWithOperation(unittest.TestCase):
def setUp(self):
self.agent_1_id = generate_entity_id(AGENT_ID_1_BASE + "GeneratorWithOp")
self.agent_2_id = generate_entity_id(AGENT_ID_2_BASE + "GeneratorWithOp")
self.generator_id = generate_entity_id(GENERATOR_ID_BASE + "GeneratorWithOp")
CLIENT.delete_agent(self.agent_1_id)
CLIENT.delete_agent(self.agent_2_id)
CLIENT.delete_generator(self.generator_id)
CLIENT.create_agent(valid_data.VALID_CONFIGURATION, self.agent_1_id)
CLIENT.create_agent(valid_data.VALID_CONFIGURATION, self.agent_2_id)
CLIENT.add_agent_operations(self.agent_1_id, valid_data.VALID_OPERATIONS_SET)
CLIENT.add_agent_operations(self.agent_2_id, valid_data.VALID_OPERATIONS_SET)
generator_configuration = copy.deepcopy(
valid_data.VALID_GENERATOR_CONFIGURATION
)
generator_configuration["filter"] = [self.agent_1_id, self.agent_2_id]
CLIENT.create_generator(generator_configuration, self.generator_id)
def tearDown(self):
CLIENT.delete_agent(self.agent_1_id)
CLIENT.delete_agent(self.agent_2_id)
CLIENT.delete_generator(self.generator_id)
def test_get_generator_decision_tree_with_pdtimestamp(self):
# test if we get the same decision tree
decision_tree = CLIENT.get_generator_decision_tree(
self.generator_id,
pd.Timestamp(valid_data.VALID_TIMESTAMP, unit="s", tz="UTC"),
)
ground_truth_decision_tree = CLIENT.get_generator_decision_tree(
self.generator_id, valid_data.VALID_TIMESTAMP
)
self.assertIsInstance(decision_tree, dict)
self.assertNotEqual(decision_tree.get("_version"), None)
self.assertNotEqual(decision_tree.get("configuration"), None)
self.assertNotEqual(decision_tree.get("trees"), None)
self.assertEqual(decision_tree, ground_truth_decision_tree)
@unittest.skipIf(CRAFTAI_PANDAS_ENABLED is False, "pandas is not enabled")
class TestPandasBoostingSimpleAgent(unittest.TestCase):
def setUp(self):
self.agent_id = generate_entity_id(AGENT_ID_1_BASE + "BoostingAgentWData")
CLIENT.delete_agent(self.agent_id)
CLIENT.create_agent(SIMPLE_AGENT_BOOSTING_CONFIGURATION, self.agent_id)
CLIENT.add_agent_operations(self.agent_id, SIMPLE_AGENT_BOOSTING_DATA)
def tearDown(self):
CLIENT.delete_agent(self.agent_id)
def test_decide_boosting_from_contexts_df(self):
context_df = pd.DataFrame(
[[random(), random(), random(), "+01:00"] for i in range(4)],
columns=["b", "c", "d", "e"],
index=pd.date_range("20200101", periods=4, freq="T").tz_localize(
"Europe/Paris",
),
)
decisions = CLIENT.decide_boosting_from_contexts_df(
self.agent_id,
SIMPLE_AGENT_BOOSTING_DATA.first_valid_index().value // 10 ** 9,
SIMPLE_AGENT_BOOSTING_DATA.last_valid_index().value // 10 ** 9,
context_df,
)
self.assertEqual(decisions.shape[0], 4)
self.assertTrue(len(decisions.columns) == 1)
self.assertTrue("a_predicted_value" in decisions.columns)
self.assertTrue(
type(decisions.iloc[0]["a_predicted_value"]) == float
or type(decisions.iloc[0]["a_predicted_value"] == int)
)
@unittest.skipIf(CRAFTAI_PANDAS_ENABLED is False, "pandas is not enabled")
class TestPandasBoostingGeneratorWithOperation(unittest.TestCase):
def setUp(self):
self.agent_1_id = generate_entity_id(AGENT_ID_1_BASE + "BoostGeneratorWithOp")
self.agent_2_id = generate_entity_id(AGENT_ID_2_BASE + "BoostGeneratorWithOp")
self.generator_id = generate_entity_id(
GENERATOR_ID_BASE + "BoostGeneratorWithOp"
)
CLIENT.delete_agent(self.agent_1_id)
CLIENT.delete_agent(self.agent_2_id)
CLIENT.delete_generator(self.generator_id)
CLIENT.create_agent(SIMPLE_AGENT_BOOSTING_CONFIGURATION, self.agent_1_id)
CLIENT.create_agent(SIMPLE_AGENT_BOOSTING_CONFIGURATION, self.agent_2_id)
CLIENT.add_agent_operations(self.agent_1_id, SIMPLE_AGENT_BOOSTING_DATA)
CLIENT.add_agent_operations(self.agent_2_id, SIMPLE_AGENT_BOOSTING_MANY_DATA)
generator_configuration = copy.deepcopy(SIMPLE_AGENT_BOOSTING_CONFIGURATION)
generator_configuration["filter"] = [self.agent_1_id, self.agent_2_id]
CLIENT.create_generator(generator_configuration, self.generator_id)
def tearDown(self):
CLIENT.delete_agent(self.agent_1_id)
CLIENT.delete_agent(self.agent_2_id)
CLIENT.delete_generator(self.generator_id)
def test_get_generator_boosting_with_pdtimestamp(self):
context_df = pd.DataFrame(
[[random(), random(), random(), "+01:00"] for i in range(4)],
columns=["b", "c", "d", "e"],
index=pd.date_range("20200101", periods=4, freq="T").tz_localize(
"Europe/Paris",
),
)
decisions = CLIENT.decide_generator_boosting_from_contexts_df(
self.generator_id,
SIMPLE_AGENT_BOOSTING_DATA.first_valid_index().value // 10 ** 9,
SIMPLE_AGENT_BOOSTING_MANY_DATA.last_valid_index().value // 10 ** 9,
context_df,
)
self.assertEqual(decisions.shape[0], 4)
self.assertTrue(len(decisions.columns) == 1)
self.assertTrue("a_predicted_value" in decisions.columns)
self.assertTrue(
type(decisions.iloc[0]["a_predicted_value"]) == float
or type(decisions.iloc[0]["a_predicted_value"] == int)
)
@unittest.skipIf(CRAFTAI_PANDAS_ENABLED is False, "pandas is not enabled")
class TestPandasBoostingGeneratorWithGeneratedType(unittest.TestCase):
def setUp(self):
self.agent_1_id = generate_entity_id(
AGENT_ID_1_BASE + "BoostGeneratorWithGenType"
)
self.agent_2_id = generate_entity_id(
AGENT_ID_2_BASE + "BoostGeneratorWithGenType"
)
self.generator_id = generate_entity_id(
GENERATOR_ID_BASE + "BoostGeneratorWithGenType"
)
CLIENT.delete_agent(self.agent_1_id)
CLIENT.delete_agent(self.agent_2_id)
CLIENT.delete_generator(self.generator_id)
CLIENT.create_agent(
SIMPLE_AGENT_BOOSTING_CONFIGURATION_WITH_GEN_TYPE, self.agent_1_id
)
CLIENT.create_agent(
SIMPLE_AGENT_BOOSTING_CONFIGURATION_WITH_GEN_TYPE, self.agent_2_id
)
CLIENT.add_agent_operations(self.agent_1_id, SIMPLE_AGENT_BOOSTING_DATA)
CLIENT.add_agent_operations(self.agent_2_id, SIMPLE_AGENT_BOOSTING_MANY_DATA)
generator_configuration = copy.deepcopy(
SIMPLE_AGENT_BOOSTING_CONFIGURATION_WITH_GEN_TYPE
)
generator_configuration["filter"] = [self.agent_1_id, self.agent_2_id]
CLIENT.create_generator(generator_configuration, self.generator_id)
def tearDown(self):
CLIENT.delete_agent(self.agent_1_id)
CLIENT.delete_agent(self.agent_2_id)
CLIENT.delete_generator(self.generator_id)
def test_get_generator_boosting_with_pdtimestamp(self):
context_df = pd.DataFrame(
[[random(), random(), random(), "+01:00"] for i in range(4)],
columns=["b", "c", "d", "e"],
index=pd.date_range("20200101", periods=4, freq="T").tz_localize(
"Europe/Paris",
),
)
decisions = CLIENT.decide_generator_boosting_from_contexts_df(
self.generator_id,
SIMPLE_AGENT_BOOSTING_DATA.first_valid_index().value // 10 ** 9,
SIMPLE_AGENT_BOOSTING_MANY_DATA.last_valid_index().value // 10 ** 9,
context_df,
)
self.assertTrue(len(decisions.columns) == 1)
self.assertTrue("a_predicted_value" in decisions.columns)
self.assertTrue(
type(decisions.iloc[0]["a_predicted_value"]) == float
or type(decisions.iloc[0]["a_predicted_value"] == int)
)
@unittest.skipIf(CRAFTAI_PANDAS_ENABLED is False, "pandas is not enabled")
class TestPandasBoostingAgentWithChunks(unittest.TestCase):
def setUp(self):
self.agent_id = generate_entity_id(AGENT_ID_1_BASE + "BoostAgentWithChunks")
CLIENT.delete_agent(self.agent_id)
CLIENT.create_agent(SIMPLE_AGENT_BOOSTING_CONFIGURATION, self.agent_id)
CLIENT.add_agent_operations(self.agent_id, SIMPLE_AGENT_BOOSTING_MANY_DATA)
CLIENT._config["operationsChunksSize"] = 5
def tearDown(self):
CLIENT.delete_agent(self.agent_id)
def test_get_chunked_decision(self):
context_df = SIMPLE_AGENT_BOOSTING_MANY_DATA.copy()
del context_df[SIMPLE_AGENT_BOOSTING_CONFIGURATION["output"][0]]
decisions = CLIENT.decide_boosting_from_contexts_df(
self.agent_id,
SIMPLE_AGENT_BOOSTING_MANY_DATA.first_valid_index().value // 10 ** 9,
SIMPLE_AGENT_BOOSTING_MANY_DATA.last_valid_index().value // 10 ** 9,
context_df,
)
self.assertEqual(decisions.shape[0], pandas_valid_data.NB_MANY_OPERATIONS)
self.assertTrue(len(decisions.columns) == 1)
self.assertTrue("a_predicted_value" in decisions.columns)
@unittest.skipIf(CRAFTAI_PANDAS_ENABLED is False, "pandas is not enabled")
class TestPandasBoostingGeneratorWithChunks(unittest.TestCase):
def setUp(self):
self.agent_1_id = generate_entity_id(
AGENT_ID_1_BASE + "BoostGeneratorWithChunks"
)
self.generator_id = generate_entity_id(
GENERATOR_ID_BASE + "BoostGeneratorWithChunks"
)
CLIENT.delete_agent(self.agent_1_id)
CLIENT.delete_generator(self.generator_id)
CLIENT.create_agent(SIMPLE_AGENT_BOOSTING_CONFIGURATION, self.agent_1_id)
CLIENT.add_agent_operations(self.agent_1_id, SIMPLE_AGENT_BOOSTING_MANY_DATA)
generator_configuration = copy.deepcopy(SIMPLE_AGENT_BOOSTING_CONFIGURATION)
generator_configuration["filter"] = [self.agent_1_id]
CLIENT.create_generator(generator_configuration, self.generator_id)
CLIENT._config["operationsChunksSize"] = 5
def tearDown(self):
CLIENT.delete_agent(self.agent_1_id)
CLIENT.delete_generator(self.generator_id)
def test_get_chunked_decision(self):
context_df = SIMPLE_AGENT_BOOSTING_MANY_DATA.copy()
del context_df[SIMPLE_AGENT_BOOSTING_CONFIGURATION["output"][0]]
decisions = CLIENT.decide_generator_boosting_from_contexts_df(
self.generator_id,
SIMPLE_AGENT_BOOSTING_MANY_DATA.first_valid_index().value // 10 ** 9,
SIMPLE_AGENT_BOOSTING_MANY_DATA.last_valid_index().value // 10 ** 9,
context_df,
)
self.assertEqual(decisions.shape[0], pandas_valid_data.NB_MANY_OPERATIONS)
self.assertTrue(len(decisions.columns) == 1)
self.assertTrue("a_predicted_value" in decisions.columns)
@unittest.skipIf(CRAFTAI_PANDAS_ENABLED is False, "pandas is not enabled")
class TestPandasDecisionContextGeneration(unittest.TestCase):
def setUp(self):
self.agent_1_id = generate_entity_id(AGENT_ID_1_BASE + "BoostGeneratorWithOp")
self.generator_id = generate_entity_id(
GENERATOR_ID_BASE + "BoostGeneratorWithOp"
)
CLIENT.delete_agent(self.agent_1_id)
CLIENT.delete_generator(self.generator_id)
CLIENT.create_agent(SIMPLE_AGENT_BOOSTING_CONFIGURATION, self.agent_1_id)
CLIENT.add_agent_operations(self.agent_1_id, SIMPLE_AGENT_BOOSTING_DATA)
generator_configuration = copy.deepcopy(SIMPLE_AGENT_BOOSTING_CONFIGURATION)
generator_configuration["filter"] = [self.agent_1_id]
CLIENT.create_generator(generator_configuration, self.generator_id)
def tearDown(self):
CLIENT.delete_agent(self.agent_1_id)
CLIENT.delete_generator(self.generator_id)
def test_time_features_generation(self):
# Ensures that time features are correctly generated.
contexts_df = pd.DataFrame(
[[random(), random(), random(), 1] for i in range(4)],
columns=["b", "c", "d", "e"],
index=pd.date_range("20200101", periods=4, freq="T").tz_localize(
"Europe/Paris",
),
)
configuration = SIMPLE_AGENT_BOOSTING_CONFIGURATION_WITH_GEN_TYPE
df, tz_col = CLIENT._generate_decision_df_and_tz_col(
self.generator_id, contexts_df, configuration
)
params = {
"context_ops": list(df.itertuples(name=None))[0],
"configuration": configuration,
"feature_names": df.columns.values,
"tz_col": tz_col,
}
context = CLIENT._check_context_properties(params)
time = CLIENT._generate_time_features(params, context)
decide_context = CLIENT._generate_decision_context(params, context, time)
time_dict = time.to_dict()
self.assertEqual(time_dict["timestamp"], 1577833200)
self.assertEqual(time_dict["timezone"], "+01:00")
self.assertEqual(time_dict["time_of_day"], 0.0)
self.assertEqual(time_dict["day_of_week"], 2)
self.assertEqual(time_dict["day_of_month"], 1)
self.assertEqual(time_dict["month_of_year"], 1)
self.assertEqual(time_dict["utc_iso"], "2020-01-01T00:00:00+01:00")
self.assertEqual(decide_context["e"], "+01:00")
self.assertEqual(decide_context["f"], 2)
| bsd-3-clause | 4,118,190,505,216,684,500 | 40.007407 | 96 | 0.635326 | false |
calaldees/libs | python3/calaldees/pyramid_helpers/auto_format2.py | 1 | 11313 | #-------------------------------------------------------------------------------
# Imports
#-------------------------------------------------------------------------------
import re
import copy
from functools import lru_cache
import pyramid.request
import pyramid.response
import pyramid.events
import pyramid.decorator
import logging
log = logging.getLogger(__name__)
#-------------------------------------------------------------------------------
# Class's
#-------------------------------------------------------------------------------
class FormatError(Exception):
pass
#-------------------------------------------------------------------------------
# Action Returns
#-------------------------------------------------------------------------------
def action_ok(message='', data={}, code=200, status='ok', **kwargs):
assert isinstance(message, str)
assert isinstance(data, dict)
assert isinstance(code, int)
d = {
'status': status,
'messages': [],
'data': data,
'code': code,
}
d.update(kwargs)
if message:
d['messages'].append(message)
return d
class action_error(Exception):
def __init__(self, message='', data={}, code=500, status='error', **kwargs):
super().__init__(self, message)
self.d = action_ok(message=message, data=data, code=code, status=status, **kwargs)
def __str__( self ):
return str(self.d)
#-------------------------------------------------------------------------------
# Register Format Mechanics
#-------------------------------------------------------------------------------
class FormatRendererManager():
def __init__(self):
self._renderers = {}
self._content_type_to_format = {}
self._format_to_content_type = {}
@property
def registered_formats(self):
return self._renderers.keys()
@pyramid.decorator.reify
def registered_formats_regex(self):
return re.compile(r'\.(?P<format>{})$'.format('|'.join(self.registered_formats)), flags=re.IGNORECASE)
def register_format_decorator(self, format_name, content_type=None):
assert isinstance(format_name, str)
assert format_name not in self._renderers
if content_type:
assert isinstance(content_type, str)
assert content_type not in self._content_type_to_format
self._content_type_to_format[content_type] = format_name
self._format_to_content_type[format_name] = content_type
def wrapper(format_func):
assert callable(format_func)
self._renderers[format_name] = format_func
return wrapper
def render(self, request, data):
format_name = data['format']
response = self._renderers[format_name](request, data)
# Override context type
if hasattr(response, 'content_type') and self._format_to_content_type.get(format_name):
response.content_type = self._format_to_content_type[format_name]
return response
format_manager = FormatRendererManager()
# -----------------------------------
class PostViewDictAugmentation():
def __init__(self):
self._pre_render_funcs = []
self._post_render_funcs = []
def register_pre_render_decorator(self):
def wrapper(augmenter_func):
assert callable(augmenter_func)
self._pre_render_funcs.append(augmenter_func)
return wrapper
def register_post_render_decorator(self):
def wrapper(augmenter_func):
assert callable(augmenter_func)
self._post_render_funcs.append(augmenter_func)
return wrapper
def pre_render_augmentation(self, request, response):
for _func in self._pre_render_funcs:
_func(request, response)
def post_render_augmentation(self, request, response, response_object):
for _func in self._post_render_funcs:
_func(request, response, response_object)
post_view_dict_augmentation = PostViewDictAugmentation()
@post_view_dict_augmentation.register_post_render_decorator()
def overlay_return_code_on_response_object(request, response, response_object):
if isinstance(response_object, pyramid.response.Response):
response_object.status_int = response.get('code')
@post_view_dict_augmentation.register_pre_render_decorator()
def add_template_to_response(request, response):
try:
response.setdefault('template', request.context.__template__)
except AttributeError:
pass
@post_view_dict_augmentation.register_pre_render_decorator()
def add_format_to_response(request, response):
try:
response.setdefault('format', request.requested_response_format)
except AttributeError:
pass
# TODO: move this to the session to reduce coupling
@post_view_dict_augmentation.register_pre_render_decorator()
def add_identity_to_response(request, response):
if hasattr(request, 'session_identity'):
response['identity'] = request.session_identity
# TODO: Move this to reduce coupling
@post_view_dict_augmentation.register_pre_render_decorator()
def add_messages_in_session_to_response(request, response):
if request.session.peek_flash():
# TODO: This needs to be modularised
response.setdefault('messages', []).extend(request.session.pop_flash())
# -----------------------
def before_traversal_extract_format_from_path_info_to_get_param(event):
"""
We could have a path_info of '/track/t3.json'
We don't want '.json' contaminating the traversal algorithm
Use a regex to extract the format from the path_info to a GET param
"""
path_format_match = format_manager.registered_formats_regex.search(event.request.path_info)
if path_format_match:
event.request.GET.update(path_format_match.groupdict())
event.request.path_info = format_manager.registered_formats_regex.sub('', event.request.path_info)
def requested_response_format(request):
formats = set(filter(None, (
request.params.get('format'), # From GET/POST params (augmented by BeforeTraversal)
request.matchdict.get('format') if request.matchdict else None, # matched route 'format' key
))) or set(filter(None, (
format_manager._content_type_to_format.get( # content_type from 'Accept' header
request.accept.best_match(format_manager._content_type_to_format.keys())
),
))) or {
# TODO: BUG: I don't think this html fallback works - a None content_type in `best_match` above defualts to the order they are registed in the `format_manager`
request.registry.settings.get('api.format.default', 'html'),
}
if len(formats) >= 2:
raise Exception(f'Multiple formats requested {formats}')
return formats.pop()
def setup_pyramid_autoformater(config):
config.add_subscriber(before_traversal_extract_format_from_path_info_to_get_param, pyramid.events.BeforeTraversal)
config.add_request_method(requested_response_format, 'requested_response_format', property=True) # TODO: could we use reify here? Do views modify this anywhere?
#config.add_response_adapter(autoformat_response_adaptor, dict)
#def autoformat_format_selector_response_callback(request, response):
# if isinstance(response, dict):
# response['format'] = request.requested_response_format
#def add_response_callbacks_to_newrequest(event):
# event.request.add_response_callback(autoformat_format_selector_response_callback)
#config.add_subscriber(add_response_callbacks_to_newrequest, pyramid.events.NewRequest)
def autoformat_view(view, info):
if not info.options.get('autoformat', True):
return view
def view_wrapper(context, request):
#if 'internal_request' in request.matchdict: # Abort if internal call
# return view(context, request)
try:
response = view(context, request) # Execute View
except action_error as ae:
response = ae.d
if isinstance(response, dict) and response.keys() >= {'code', 'messages', 'data', 'status'}:
response = copy.copy(response) # HACK: BUGFIX: dogpile in_python cache dicts were being modified on return
post_view_dict_augmentation.pre_render_augmentation(request, response)
response_object = format_manager.render(request, response)
post_view_dict_augmentation.post_render_augmentation(request, response, response_object)
return response_object
return response
return view_wrapper
autoformat_view.options = ('autoformat', )
config.add_view_deriver(autoformat_view, name='autoformat', over='mapped_view', under='rendered_view')
#-------------------------------------------------------------------------------
# Renderer Template
#-------------------------------------------------------------------------------
from pyramid.renderers import render_to_response
import os.path
def render_template(request, data, template_language='mako', format_path=''):
assert data.keys() >= {'format', 'template'}
return render_to_response(
os.path.join(format_path or data['format'], '{}.{}'.format(data['template'], template_language)),
data,
request=request,
response=request.response,
)
#-------------------------------------------------------------------------------
# Formatters
#-------------------------------------------------------------------------------
@format_manager.register_format_decorator('python')
def format_python(request, data):
return data
@format_manager.register_format_decorator('html', content_type='text/html')
def format_html(request, data):
return render_template(request, data)
@format_manager.register_format_decorator('html_template')
def format_html_template(request, data):
"""
Return html content with no head/body tags
Base templates must support result['format'] for this to function
"""
return render_template(request, data, format_path='html')
from ..json import json_string
@format_manager.register_format_decorator('json', content_type='application/json')
def format_json(request, data):
request.response.text = json_string(data)
return request.response
#charset='utf-8',
from ..xml import dictToXMLString
@format_manager.register_format_decorator('xml', content_type='text/xml')
def format_xml(request, data):
request.response.text = '<?xml version="1.0" encoding="UTF-8"?>' + dictToXMLString(data) #.encode('utf-8')
return request.response
#charset='utf-8',
from pyramid.httpexceptions import HTTPFound
@format_manager.register_format_decorator('redirect')
def format_redirect(request, data):
"""
A special case for compatable browsers making REST calls
"""
# SetCookie is now supported on redirect. This legacy check can probably be removed?
#if request.response.headers.get('Set-Cookie'):
# raise FormatError('format_redirect cannot function when cookies are being set')
for message in data['messages']:
request.session.flash(message)
data['code'] = 302
return HTTPFound(location=request.referer or '/', headers=request.response.headers)
| gpl-3.0 | -1,741,857,060,563,053,300 | 37.479592 | 167 | 0.618757 | false |
hakonsbm/nest-simulator | pynest/examples/BrodyHopfield.py | 1 | 4554 | # -*- coding: utf-8 -*-
#
# BrodyHopfield.py
#
# This file is part of NEST.
#
# Copyright (C) 2004 The NEST Initiative
#
# NEST is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 of the License, or
# (at your option) any later version.
#
# NEST is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with NEST. If not, see <http://www.gnu.org/licenses/>.
"""Spike synchronization through subthreshold oscillation
------------------------------------------------------------
This script reproduces the spike synchronization behavior
of integrate-and-fire neurons in response to a subthreshold
oscillation. This phenomenon is shown in Fig. 1 of [1]_
Neurons receive a weak 35 Hz oscillation, a gaussian noise current
and an increasing DC. The time-locking capability is shown to
depend on the input current given. The result is then plotted using
pylab. All parameters are taken from the above paper.
References
~~~~~~~~~~~~~
.. [1] Brody CD and Hopfield JJ (2003). Simple networks for
spike-timing-based computation, with application to olfactory
processing. Neuron 37, 843-852.
"""
#################################################################################
# First, we import all necessary modules for simulation, analysis, and plotting.
import nest
import nest.raster_plot
###############################################################################
# Second, the simulation parameters are assigned to variables.
N = 1000 # number of neurons
bias_begin = 140. # minimal value for the bias current injection [pA]
bias_end = 200. # maximal value for the bias current injection [pA]
T = 600 # simulation time (ms)
# parameters for the alternative-current generator
driveparams = {'amplitude': 50., 'frequency': 35.}
# parameters for the noise generator
noiseparams = {'mean': 0.0, 'std': 200.}
neuronparams = {'tau_m': 20., # membrane time constant
'V_th': 20., # threshold potential
'E_L': 10., # membrane resting potential
't_ref': 2., # refractory period
'V_reset': 0., # reset potential
'C_m': 200., # membrane capacitance
'V_m': 0.} # initial membrane potential
###############################################################################
# Third, the nodes are created using ``Create``. We store the returned handles
# in variables for later reference.
neurons = nest.Create('iaf_psc_alpha', N)
sd = nest.Create('spike_detector')
noise = nest.Create('noise_generator')
drive = nest.Create('ac_generator')
###############################################################################
# Set the parameters specified above for the generators using ``SetStatus``.
nest.SetStatus(drive, driveparams)
nest.SetStatus(noise, noiseparams)
###############################################################################
# Set the parameters specified above for the neurons. Neurons get an internal
# current. The first neuron additionally receives the current with amplitude
# `bias_begin`, the last neuron with amplitude `bias_end`.
nest.SetStatus(neurons, neuronparams)
nest.SetStatus(neurons, [{'I_e':
(n * (bias_end - bias_begin) / N + bias_begin)}
for n in neurons])
###############################################################################
# Set the parameters for the ``spike_detector``: recorded data should include
# the information about global IDs of spiking neurons and the time of
# individual spikes.
nest.SetStatus(sd, {"withgid": True, "withtime": True})
###############################################################################
# Connect alternative current and noise generators as well as
# spike detectors to neurons
nest.Connect(drive, neurons)
nest.Connect(noise, neurons)
nest.Connect(neurons, sd)
###############################################################################
# Simulate the network for time `T`.
nest.Simulate(T)
###############################################################################
# Plot the raster plot of the neuronal spiking activity.
nest.raster_plot.from_device(sd, hist=True)
| gpl-2.0 | 3,528,837,289,585,193,000 | 37.923077 | 81 | 0.585859 | false |
bschmoker/stix-validator | validators/xml_schema.py | 1 | 7809 | # Copyright (c) 2014, The MITRE Corporation. All rights reserved.
# See LICENSE.txt for complete terms.
import os
from collections import defaultdict
from lxml import etree
class XmlSchemaValidator(object):
NS_XML_SCHEMA_INSTANCE = "http://www.w3.org/2001/XMLSchema-instance"
NS_XML_SCHEMA = "http://www.w3.org/2001/XMLSchema"
def __init__(self, schema_dir=None):
self.__imports = self._build_imports(schema_dir)
def _get_target_ns(self, fp):
'''Returns the target namespace for a schema file
Keyword Arguments
fp - the path to the schema file
'''
parser = etree.ETCompatXMLParser(huge_tree=True)
tree = etree.parse(fp, parser=parser)
root = tree.getroot()
return root.attrib['targetNamespace'] # throw an error if it
# doesn't exist...we can't
# validate
def _get_include_base_schema(self, list_schemas):
'''Returns the root schema which defines a namespace.
Certain schemas, such as OASIS CIQ use xs:include statements in their
schemas, where two schemas define a namespace (e.g., XAL.xsd and
XAL-types.xsd). This makes validation difficult, when we must refer to
one schema for a given namespace.
To fix this, we attempt to find the root schema which includes the
others. We do this by seeing if a schema has an xs:include element,
and if it does we assume that it is the parent. This is totally wrong
and needs to be fixed. Ideally this would build a tree of includes and
return the root node.
Keyword Arguments:
list_schemas - a list of schema file paths that all belong to the same
namespace
'''
parent_schema = None
tag_include = "{%s}include" % (self.NS_XML_SCHEMA)
for fn in list_schemas:
tree = etree.parse(fn)
root = tree.getroot()
includes = root.findall(tag_include)
if len(includes) > 0: # this is a hack that assumes if the schema
# includes others, it is the base schema for
# the namespace
return fn
return parent_schema
def _build_imports(self, schema_dir):
'''Given a directory of schemas, this builds a dictionary of schemas
that need to be imported under a wrapper schema in order to enable
validation. This returns a dictionary of the form
{namespace : path to schema}.
Keyword Arguments
schema_dir - a directory of schema files
'''
if not schema_dir:
return None
imports = defaultdict(list)
for top, dirs, files in os.walk(schema_dir):
for f in files:
if f.endswith('.xsd'):
fp = os.path.join(top, f)
target_ns = self._get_target_ns(fp)
imports[target_ns].append(fp)
for k, v in imports.iteritems():
if len(v) > 1:
base_schema = self._get_include_base_schema(v)
imports[k] = base_schema
else:
imports[k] = v[0]
return imports
def _build_wrapper_schema(self, import_dict):
'''Creates a wrapper schema that imports all namespaces defined by the
input dictionary. This enables validation of instance documents that
refer to multiple namespaces and schemas
Keyword Arguments
import_dict - a dictionary of the form {namespace : path to schema} that
will be used to build the list of xs:import statements
'''
schema_txt = '''<xs:schema xmlns:xs="http://www.w3.org/2001/XMLSchema"
targetNamespace="http://stix.mitre.org/tools/validator"
elementFormDefault="qualified"
attributeFormDefault="qualified"/>'''
root = etree.fromstring(schema_txt)
tag_import = "{%s}import" % (self.NS_XML_SCHEMA)
for ns, list_schemaloc in import_dict.iteritems():
schemaloc = list_schemaloc
schemaloc = schemaloc.replace("\\", "/")
attrib = {'namespace': ns, 'schemaLocation': schemaloc}
el_import = etree.Element(tag_import, attrib=attrib)
root.append(el_import)
return root
def _extract_schema_locations(self, root):
schemaloc_dict = {}
tag_schemaloc = "{%s}schemaLocation" % (self.NS_XML_SCHEMA_INSTANCE)
schemaloc = root.attrib[tag_schemaloc].split()
schemaloc_pairs = zip(schemaloc[::2], schemaloc[1::2])
for ns, loc in schemaloc_pairs:
schemaloc_dict[ns] = loc
return schemaloc_dict
def _build_result_dict(self, result, errors=None):
d = {}
d['result'] = result
if errors:
if not hasattr(errors, "__iter__"):
errors = [errors]
d['errors'] = errors
return d
def validate(self, doc, schemaloc=False):
'''Validates an instance documents.
Returns a tuple of where the first item is the boolean validation
result and the second is the validation error if there was one.
Keyword Arguments
instance_doc - a filename, file-like object, etree._Element, or
etree._ElementTree to be validated
'''
if not(schemaloc or self.__imports):
return self._build_result_dict(False,
"No schemas to validate "
"against! Try instantiating "
"XmlValidator with "
"use_schemaloc=True or setting the "
"schema_dir param in __init__")
if isinstance(doc, etree._Element):
root = doc
elif isinstance(doc, etree._ElementTree):
root = doc.getroot()
else:
try:
parser = etree.ETCompatXMLParser(huge_tree=True)
tree = etree.parse(doc, parser=parser)
root = tree.getroot()
except etree.XMLSyntaxError as e:
return self._build_result_dict(False, str(e))
if schemaloc:
try:
required_imports = self._extract_schema_locations(root)
except KeyError as e:
return self._build_result_dict(False,
"No schemaLocation attribute "
"set on instance document. "
"Unable to validate")
else:
required_imports = {}
# visit all nodes and gather schemas
for elem in root.iter():
for prefix, ns in elem.nsmap.iteritems():
schema_location = self.__imports.get(ns)
if schema_location:
required_imports[ns] = schema_location
if not required_imports:
return self._build_result_dict(False, "Unable to determine schemas "
"to validate against")
wrapper_schema_doc = self._build_wrapper_schema(import_dict=required_imports)
xmlschema = etree.XMLSchema(wrapper_schema_doc)
isvalid = xmlschema.validate(root)
if isvalid:
return self._build_result_dict(True)
else:
return self._build_result_dict(False,
[str(x) for x in xmlschema.error_log])
| bsd-3-clause | -661,948,361,949,860,100 | 38.841837 | 85 | 0.548726 | false |
danposch/BPR-Scripts | deploy_network.py | 1 | 14796 | #@author dposch
import os
import paramiko
import ssh_lib as ssh
import node_parser as np
import logging
import apps as ap
from igraph import *
from allPaths import *
from start_apps import *
from apps import *
import time
import copy
PI_CONFIG_HZ = 100
LATENCY = 100 #queue length of tbf in ms
def getNextFibHops(paths):
cost_dict = {}
for path in paths:
nextHop = path[1]
if nextHop in cost_dict.keys():
if len(path) < len(cost_dict[nextHop]):
cost_dict[nextHop] = path
else:
cost_dict[nextHop] = path
return cost_dict.values()
def deployNetwork(NETWORK, PATHS, PI_START_SUFFIX, PI_END_SUFFIX, FW_STRATEGIES, MNG_PREFIX, EMU_PREFIX, ITEC_GATEWAY):
print "Deploying Network: " + NETWORK
# available pis: PREFIX.NR = IP
pi_list = range(PI_START_SUFFIX,PI_END_SUFFIX+1) # returns [start, ..., end-1]
print "Available PIs(" + str(len(pi_list)) + "): " + str(pi_list)
nodes, link_list, property_list = np.parseNetwork(NETWORK);
if len(pi_list) < nodes:
print "Error to less PIs available to deploy network!"
exit(-1)
#map ipdresses to nodes
for link in link_list:
link.ip1 = EMU_PREFIX+str(pi_list[int(link.n1)])
link.ip2 = EMU_PREFIX+str(pi_list[int(link.n2)])
for prop in property_list:
prop.ip_client = EMU_PREFIX+str(pi_list[int(prop.client)])
prop.ip_server = EMU_PREFIX+str(pi_list[int(prop.server)])
#init commands per pi { pi:[c1,c2,...,cn]}
commands = {}
for i in pi_list:
commands[MNG_PREFIX+str(i)] = []
#drop everything
commands[MNG_PREFIX+str(i)].append("sudo iptables --flush") #delete all old entries
commands[MNG_PREFIX+str(i)].append("sudo iptables -P INPUT DROP")
commands[MNG_PREFIX+str(i)].append("sudo iptables -P FORWARD DROP")
commands[MNG_PREFIX+str(i)].append("sudo iptables -P OUTPUT DROP")
#but allow all ip traffic from the mangement interface
commands[MNG_PREFIX+str(i)].append("sudo iptables -A INPUT -d " + MNG_PREFIX+str(i) + " -j ACCEPT")
commands[MNG_PREFIX+str(i)].append("sudo iptables -A INPUT -s " + MNG_PREFIX+str(i) + " -j ACCEPT")
commands[MNG_PREFIX+str(i)].append("sudo iptables -A FORWARD -d " + MNG_PREFIX+str(i) + " -j ACCEPT")
commands[MNG_PREFIX+str(i)].append("sudo iptables -A FORWARD -s " + MNG_PREFIX+str(i) + " -j ACCEPT")
commands[MNG_PREFIX+str(i)].append("sudo iptables -A OUTPUT -d " + MNG_PREFIX+str(i) + " -j ACCEPT")
commands[MNG_PREFIX+str(i)].append("sudo iptables -A OUTPUT -s " + MNG_PREFIX+str(i) + " -j ACCEPT")
#setup the itec gateway
commands[MNG_PREFIX+str(i)].append("sudo iptables -A INPUT -d " + ITEC_GATEWAY + " -j ACCEPT")
commands[MNG_PREFIX+str(i)].append("sudo iptables -A INPUT -s " + ITEC_GATEWAY + " -j ACCEPT")
commands[MNG_PREFIX+str(i)].append("sudo iptables -A FORWARD -d " + ITEC_GATEWAY + " -j ACCEPT")
commands[MNG_PREFIX+str(i)].append("sudo iptables -A FORWARD -s " + ITEC_GATEWAY + " -j ACCEPT")
commands[MNG_PREFIX+str(i)].append("sudo iptables -A OUTPUT -d " + ITEC_GATEWAY + " -j ACCEPT")
commands[MNG_PREFIX+str(i)].append("sudo iptables -A OUTPUT -s " + ITEC_GATEWAY + " -j ACCEPT")
#delete all old tc settings (default ceil = rate)
commands[MNG_PREFIX+str(i)].append("sudo tc qdisc del dev eth0 root");
commands[MNG_PREFIX+str(i)].append("sudo tc qdisc add dev eth0 root handle 1: htb default " + str(10))
commands[MNG_PREFIX+str(i)].append("sudo tc class add dev eth0 parent 1: classid 1:"+ str(10) + " htb rate 100mbit")
for link in link_list:
#ip
ip1 = link.ip1
ip2 = link.ip2
#node id
n1 = link.n1
n2 = link.n2
#add connection between nodes ip1 and ip2
commands[ip1.replace(EMU_PREFIX, MNG_PREFIX)].append("sudo iptables -A INPUT -d " + ip1 + " -s " + ip2 +" -j ACCEPT")
commands[ip1.replace(EMU_PREFIX, MNG_PREFIX)].append("sudo iptables -A FORWARD -d " + ip1 + " -s " + ip2 + " -j ACCEPT")
commands[ip1.replace(EMU_PREFIX, MNG_PREFIX)].append("sudo iptables -A FORWARD -d " + ip2 + " -s " + ip1 + " -j ACCEPT")
commands[ip1.replace(EMU_PREFIX, MNG_PREFIX)].append("sudo iptables -A OUTPUT -s " + ip1 + " -d " + ip2 + " -j ACCEPT")
commands[ip2.replace(EMU_PREFIX, MNG_PREFIX)].append("sudo iptables -A INPUT -d " + ip2 + " -s " + ip1 +" -j ACCEPT")
commands[ip2.replace(EMU_PREFIX, MNG_PREFIX)].append("sudo iptables -A FORWARD -d " + ip2 + " -s " + ip1 + " -j ACCEPT")
commands[ip2.replace(EMU_PREFIX, MNG_PREFIX)].append("sudo iptables -A FORWARD -d " + ip1 + " -s " + ip2 + " -j ACCEPT")
commands[ip2.replace(EMU_PREFIX, MNG_PREFIX)].append("sudo iptables -A OUTPUT -s " + ip2 + " -d " + ip1 + " -j ACCEPT")
#example: http://askubuntu.com/questions/776/how-i-can-limit-download-upload-bandwidth
#add tc classes for n1 (default ceil = rate)
handle_offset = 11
flowId1 = "1:" + str(handle_offset+int(n2)) #towards n1
flowId2 = "1:" + str(handle_offset+nodes+1+int(n2)) #from n1
#commands[ip1.replace(EMU_PREFIX, MNG_PREFIX)].append("sudo tc class add dev eth0 parent 1: classid " + flowId1 + " htb rate " + str(link.bw_n2_to_n1) + "kbit") #towards n1
#commands[ip1.replace(EMU_PREFIX, MNG_PREFIX)].append("sudo tc class add dev eth0 parent 1: classid " + flowId2 + " htb rate " + str(link.bw_n1_to_n2) + "kbit") #from n1
commands[ip1.replace(EMU_PREFIX, MNG_PREFIX)].append("sudo tc class add dev eth0 parent 1: classid " + flowId1 + " htb rate 100mbit") #towards n1
commands[ip1.replace(EMU_PREFIX, MNG_PREFIX)].append("sudo tc class add dev eth0 parent 1: classid " + flowId2 + " htb rate 100mbit") #from n1
#add tc filter for n1
commands[ip1.replace(EMU_PREFIX, MNG_PREFIX)].append("sudo tc filter add dev eth0 protocol ip parent 1:0 prio 1 u32 match ip dst " + ip1 + " match ip src " + ip2 + " flowid " + flowId1) #towards n1
commands[ip1.replace(EMU_PREFIX, MNG_PREFIX)].append("sudo tc filter add dev eth0 protocol ip parent 1:0 prio 1 u32 match ip dst " + ip2 + " match ip src " + ip1 + " flowid " + flowId2) #from n1
#add tbf below htp for queue length
# burst >= rate / CONFIG_HZ # rate is in kbits
burst = float(link.bw_n2_to_n1 * 1000) / (PI_CONFIG_HZ * 8)
commands[ip1.replace(EMU_PREFIX, MNG_PREFIX)].append("sudo tc qdisc add dev eth0 parent " + flowId1 + " handle " + str(handle_offset+int(n2)) +
": tbf rate " + str(link.bw_n2_to_n1) + "kbit burst " + str(int(burst)) + " latency " + str(LATENCY) + "ms") #towards n1
burst = float(link.bw_n1_to_n2 * 1000) / (PI_CONFIG_HZ * 8)
commands[ip1.replace(EMU_PREFIX, MNG_PREFIX)].append("sudo tc qdisc add dev eth0 parent " + flowId2 + " handle " + str(handle_offset+nodes+1+int(n2)) +
": tbf rate " + str(link.bw_n1_to_n2) + "kbit burst " + str(int(burst)) + " latency " + str(LATENCY) + "ms") #from n1
#delay and loss for n1 to n2
netman_handle = str(handle_offset+(nodes+1)*2+int(n2))
commands[ip1.replace(EMU_PREFIX, MNG_PREFIX)].append("sudo tc qdisc add dev eth0 parent " + str(handle_offset+nodes+1+int(n2)) + ":" + str(int(n2)+1) + " handle " + netman_handle + " netem delay " + str(link.delay_n1_to_n2) + "ms" +
str(link.loss_n1_to_n2))
#sudo tc qdisc add dev eth0 parent 33:1 handle 9999 netem delay 10ms
#add tc classes for n2 (default ceil = rate)
flowId1 = "1:" + str(handle_offset+int(n1)) #towards n2
flowId2 = "1:" + str(handle_offset+nodes+1+int(n1)) #from n2
#commands[ip2.replace(EMU_PREFIX, MNG_PREFIX)].append("sudo tc class add dev eth0 parent 1: classid " + flowId1 + " htb rate " + str(link.bw_n1_to_n2) + "kbit") #towards n2
#commands[ip2.replace(EMU_PREFIX, MNG_PREFIX)].append("sudo tc class add dev eth0 parent 1: classid " + flowId2 + " htb rate " + str(link.bw_n2_to_n1) + "kbit") #from n2
commands[ip2.replace(EMU_PREFIX, MNG_PREFIX)].append("sudo tc class add dev eth0 parent 1: classid " + flowId1 + " htb rate 100mbit") #towards n2
commands[ip2.replace(EMU_PREFIX, MNG_PREFIX)].append("sudo tc class add dev eth0 parent 1: classid " + flowId2 + " htb rate 100mbit") #from n2
#add tc filter for n2
commands[ip2.replace(EMU_PREFIX, MNG_PREFIX)].append("sudo tc filter add dev eth0 protocol ip parent 1:0 prio 1 u32 match ip dst " + ip2 + " match ip src " + ip1 + " flowid " + flowId1) #towards n2
commands[ip2.replace(EMU_PREFIX, MNG_PREFIX)].append("sudo tc filter add dev eth0 protocol ip parent 1:0 prio 1 u32 match ip dst " + ip1 + " match ip src " + ip2 + " flowid " + flowId2) #from n2
#add tbf below htp for queue length
# burst >= rate / CONFIG_HZ # rate is in kbits
burst = float(link.bw_n1_to_n2 * 1000) / (PI_CONFIG_HZ * 8)
commands[ip2.replace(EMU_PREFIX, MNG_PREFIX)].append("sudo tc qdisc add dev eth0 parent " + flowId1 + " handle " + str(handle_offset+int(n1)) +
": tbf rate " + str(link.bw_n1_to_n2) + "kbit burst " + str(int(burst)) + " latency " + str(LATENCY) + "ms") #towards n2
burst = float(link.bw_n2_to_n1 * 1000) / (PI_CONFIG_HZ * 8)
commands[ip2.replace(EMU_PREFIX, MNG_PREFIX)].append("sudo tc qdisc add dev eth0 parent " + flowId2 + " handle " + str(handle_offset+nodes+1+int(n1)) +
": tbf rate " + str(link.bw_n2_to_n1) + "kbit burst " + str(int(burst)) + " latency " + str(LATENCY) + "ms") #from n2
#delay and loss for n2 to n1
netman_handle = str(handle_offset+(nodes+1)*2+int(n1))
commands[ip2.replace(EMU_PREFIX, MNG_PREFIX)].append("sudo tc qdisc add dev eth0 parent " + str(handle_offset+nodes+1+int(n1)) + ":" + str(int(n1)+1) + " handle " + netman_handle + " netem delay " + str(link.delay_n2_to_n1) + "ms" +
str(link.loss_n2_to_n1))
print "Configuring NFDs:"
#restart NFD on all PIs
for pi in pi_list:
commands[MNG_PREFIX + str(pi)].append("sudo nfd-stop")
commands[MNG_PREFIX + str(pi)].append("sleep 5")
commands[MNG_PREFIX + str(pi)].append("sudo nfd-start")
commands[MNG_PREFIX + str(pi)].append("sleep 5")
#deploy ALL shortest routes
#1. we need a graph to calc the shortest / all paths
g = Graph()
g = g.as_directed()
for pi in pi_list:
g.add_vertex(EMU_PREFIX+str(pi))
#g.add_vertices(len(pi_list))
for link in link_list:
g.add_edges( [(int(link.n1),int(link.n2)), (int(link.n2), int(link.n1)) ])
g.vs["label"] = g.vs["name"]
layout = g.layout("kk")
#plot(g, layout = layout)
for pi_idx, pi in enumerate(pi_list):
for to_idx, to in enumerate(pi_list[pi_idx+1:]):
#print "Start calc for pi:" +str(pi)
if PATHS == "shortest":
paths = g.get_all_shortest_paths(pi-PI_START_SUFFIX, to-PI_START_SUFFIX)
elif PATHS == "all":
paths = find_all_paths(g, pi-PI_START_SUFFIX, to-PI_START_SUFFIX, maxlen = nodes)
else:
print "Invalid Path selection! Please choose \"all\" or \"shortest\"!"
exit(-1)
#print "found " + str(len(paths)) + " for pair (" + str(pi) + "," + str(to) + ")"
#store reverse pahts for to -> pi
reverse_paths = copy.deepcopy(paths)
for path in reverse_paths:
path.reverse()
#first calc and add fib entries from pi -> to
paths = getNextFibHops(paths)
#install next hop and costs
for path in paths:
for fws in FW_STRATEGIES:
commands[MNG_PREFIX+ str(pi)].append("sudo nfdc register /" + fws +"/"+ str(to-PI_START_SUFFIX) +" udp://" +EMU_PREFIX + str(path[1]+PI_START_SUFFIX) + " -c " + str(len(path)-1)) # /FW_STRATEGY/Node_ID/
#now calc and add fib entries from to -> pi
reverse_paths = getNextFibHops(reverse_paths)
#install next hop and costs
for path in reverse_paths:
for fws in FW_STRATEGIES:
commands[MNG_PREFIX+ str(to)].append("sudo nfdc register /" + fws +"/"+ str(pi-PI_START_SUFFIX) +" udp://" +EMU_PREFIX + str(path[1]+PI_START_SUFFIX) + " -c " + str(len(path)-1)) # /
#install strategies per fw-prefix on each pi
for pi in pi_list:
for fws in FW_STRATEGIES:
commands[MNG_PREFIX+ str(pi)].append("sudo nfdc set-strategy " + "/"+fws + "/ /localhost/nfd/strategy/" + fws) #set-strategy <namespace> <strategy-name>
#print commands
#logging.basicConfig(level=logging.DEBUG)
apps = {}
for prop in property_list:
#add client app
if prop.ip_client in apps.keys():
"Configuration Error! Only one Client-App per Node!"
exit(-1)
apps[prop.ip_client.replace(EMU_PREFIX, MNG_PREFIX)] = []
#apps[prop.ip_client.replace(EMU_PREFIX, MNG_PREFIX)].append("sleep 10") #Clients Sleep so Servers can Start first..# Not anymore nessecary
apps[prop.ip_client.replace(EMU_PREFIX, MNG_PREFIX)].append(ap.getConsumerCommand(prop.client, prop.server))
#add server app
if prop.ip_server in apps.keys():
continue #servers may appear in multiple properties as 1 server may serve for many clients
apps[prop.ip_server.replace(EMU_PREFIX, MNG_PREFIX)] = []
apps[prop.ip_server.replace(EMU_PREFIX, MNG_PREFIX)].append(ap.getProducerCommand(prop.server))
#print
#prepare client.sh for logging
orig_f = open("client.sh", "r")
modified_f = open (os.getenv("HOME")+"/client.sh",'w')
modified_f.write('#!/bin/sh\n');
for line in orig_f:
modified_f.write(line.replace("$$$TRACKAPP$$$", getTrackApp()))
modified_f.close()
orig_f.close()
#deploy network
for pi in commands:
print "Setting up Network Settings for PI: " + pi
with open(os.getenv("HOME")+"/network.sh",'w') as f:
f.write('#!/bin/sh\n') # python will convert \n to os.linesep
for c in commands[pi]:
#print c
f.write(c+"\n")
f.close()
#check if pi shall run an app
hasApp = False
if pi in apps.keys():
hasApp = True
print "Setting up App Script for PI: " + pi
with open(os.getenv("HOME")+"/app.sh",'w') as f:
f.write('#!/bin/sh\n') # python will convert \n to os.linesep
for c in apps[pi]:
#print c
f.write(c+"\n")
f.close()
else:
print "No Apps for this Pi."
print "Pushing Settings and Apps to PI via SSH..."
#open ssh
s = ssh.Connection(pi, 'root', password = 'pi')
#remove old scripts and log files
s.execute("rm -f /home/nfd/network.sh") #network settings
s.execute("rm -f /home/nfd/app.sh") #deployed app
s.execute("rm -f /home/nfd/consumer-PI_*.log") #app logs
s.execute("rm -f /tmp/logs/*.json") #pi-usage logs
s.execute("rm -f /home/nfd/*.nfd-status.log") #nfd-status logs
#create pi-usage log folder if it does not exists...
s.execute("mkdir /tmp/logs")
#copy new scripts
s.put(os.getenv("HOME")+"/network.sh", '/home/nfd/network.sh')
s.execute("chmod +x /home/nfd/network.sh")
if hasApp:
s.put(os.getenv("HOME")+"/app.sh", '/home/nfd/app.sh')
s.execute("chmod +x /home/nfd/app.sh")
s.put(os.getenv("HOME")+"/client.sh", "/root/client.sh")
s.execute("chmod +x /root/client.sh")
#launch nfd
s.execute("screen -d -m /home/nfd/network.sh")
s.close()
print "Pi:" + pi + " Done!\n"
print "Network deployed on all PIs! Waiting 180 seconds so Pis can startup NFD and set routes!\n"
time.sleep(120)
return g, pi_list, property_list
| gpl-3.0 | -2,462,278,183,705,561,600 | 43.972644 | 234 | 0.657272 | false |
Eficent/purchase-workflow | purchase_order_reorder_lines/__manifest__.py | 1 | 1323 | # -*- coding: utf-8 -*-
#
#
# Author: Alexandre Fayolle
# Copyright 2013 Camptocamp SA
#
# Author: Damien Crier
# Copyright 2015 Camptocamp SA
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#
{
'name': 'Purchase order lines with sequence number',
'version': '8.0.1.0.1',
'category': 'Purchase Management',
'author': "Camptocamp,Odoo Community Association (OCA)",
'website': 'http://www.camptocamp.com',
'depends': [
'purchase',
'stock_picking_reorder_lines',
],
'data': ['views/purchase_view.xml'],
'demo': [],
'installable': False,
'auto_install': False,
'application': False,
'license': "AGPL-3",
}
| agpl-3.0 | -45,439,500,165,891,390 | 32.075 | 77 | 0.663643 | false |
on-three/asobot | asobot/emulator.py | 1 | 1610 | # vim: set ts=2 expandtab:
# -*- coding: utf-8 -*-
"""
Module: Emulator.py
Desc: pass keypresses to a game emultor or something.
Author: on_three
Email: [email protected]
DATE: Thursday, Jan 16th 2014
"""
import string
import re
from twisted.python import log
from controls import Key
class Emulator(object):
'''
pass commands to a game emulator of some sort.
'''
COMMAND_REGEX = ur'^(?P<command>:)(?P<commands>.+)$'
def __init__(self, parent, emulator_window_name):
'''
constructor
'''
self._parent = parent
self._window_name = emulator_window_name
def is_msg_of_interest(self, user, channel, msg):
'''
PLUGIN API REQUIRED
Is the rx'd irc message of interest to this plugin?
'''
m = re.match(Emulator.COMMAND_REGEX, msg)
if m:
log.msg('Message of interest...')
return True
else:
return False
def handle_msg(self, user, channel, msg):
'''
PLUGIN API REQUIRED
Handle message and return nothing
'''
log.msg('{channel} : {msg}'.format(channel=channel, msg=msg))
m = re.match(Emulator.COMMAND_REGEX, msg)
if not m:
return
#got a command along with the .c or .channel statement
commands = m.groupdict()['commands']
self.keypresses_to_emulator(commands, channel)
def keypresses_to_emulator(self, keys, channel):
'''
Split commands by spaces. Each non spaced group represents
a series of buttons (or joystick directions) pressed TOGETHER
'''
presses = [x.strip() for x in keys.split(u' ')]
for p in presses:
Key.press(p, self._window_name)
| mit | 2,284,341,602,436,959,700 | 23.769231 | 65 | 0.647205 | false |
sserrot/champion_relationships | venv/Lib/site-packages/PIL/SunImagePlugin.py | 1 | 4302 | #
# The Python Imaging Library.
# $Id$
#
# Sun image file handling
#
# History:
# 1995-09-10 fl Created
# 1996-05-28 fl Fixed 32-bit alignment
# 1998-12-29 fl Import ImagePalette module
# 2001-12-18 fl Fixed palette loading (from Jean-Claude Rimbault)
#
# Copyright (c) 1997-2001 by Secret Labs AB
# Copyright (c) 1995-1996 by Fredrik Lundh
#
# See the README file for information on usage and redistribution.
#
from . import Image, ImageFile, ImagePalette
from ._binary import i32be as i32
def _accept(prefix):
return len(prefix) >= 4 and i32(prefix) == 0x59A66A95
##
# Image plugin for Sun raster files.
class SunImageFile(ImageFile.ImageFile):
format = "SUN"
format_description = "Sun Raster File"
def _open(self):
# The Sun Raster file header is 32 bytes in length
# and has the following format:
# typedef struct _SunRaster
# {
# DWORD MagicNumber; /* Magic (identification) number */
# DWORD Width; /* Width of image in pixels */
# DWORD Height; /* Height of image in pixels */
# DWORD Depth; /* Number of bits per pixel */
# DWORD Length; /* Size of image data in bytes */
# DWORD Type; /* Type of raster file */
# DWORD ColorMapType; /* Type of color map */
# DWORD ColorMapLength; /* Size of the color map in bytes */
# } SUNRASTER;
# HEAD
s = self.fp.read(32)
if not _accept(s):
raise SyntaxError("not an SUN raster file")
offset = 32
self._size = i32(s[4:8]), i32(s[8:12])
depth = i32(s[12:16])
# data_length = i32(s[16:20]) # unreliable, ignore.
file_type = i32(s[20:24])
palette_type = i32(s[24:28]) # 0: None, 1: RGB, 2: Raw/arbitrary
palette_length = i32(s[28:32])
if depth == 1:
self.mode, rawmode = "1", "1;I"
elif depth == 4:
self.mode, rawmode = "L", "L;4"
elif depth == 8:
self.mode = rawmode = "L"
elif depth == 24:
if file_type == 3:
self.mode, rawmode = "RGB", "RGB"
else:
self.mode, rawmode = "RGB", "BGR"
elif depth == 32:
if file_type == 3:
self.mode, rawmode = "RGB", "RGBX"
else:
self.mode, rawmode = "RGB", "BGRX"
else:
raise SyntaxError("Unsupported Mode/Bit Depth")
if palette_length:
if palette_length > 1024:
raise SyntaxError("Unsupported Color Palette Length")
if palette_type != 1:
raise SyntaxError("Unsupported Palette Type")
offset = offset + palette_length
self.palette = ImagePalette.raw("RGB;L", self.fp.read(palette_length))
if self.mode == "L":
self.mode = "P"
rawmode = rawmode.replace("L", "P")
# 16 bit boundaries on stride
stride = ((self.size[0] * depth + 15) // 16) * 2
# file type: Type is the version (or flavor) of the bitmap
# file. The following values are typically found in the Type
# field:
# 0000h Old
# 0001h Standard
# 0002h Byte-encoded
# 0003h RGB format
# 0004h TIFF format
# 0005h IFF format
# FFFFh Experimental
# Old and standard are the same, except for the length tag.
# byte-encoded is run-length-encoded
# RGB looks similar to standard, but RGB byte order
# TIFF and IFF mean that they were converted from T/IFF
# Experimental means that it's something else.
# (https://www.fileformat.info/format/sunraster/egff.htm)
if file_type in (0, 1, 3, 4, 5):
self.tile = [("raw", (0, 0) + self.size, offset, (rawmode, stride))]
elif file_type == 2:
self.tile = [("sun_rle", (0, 0) + self.size, offset, rawmode)]
else:
raise SyntaxError("Unsupported Sun Raster file type")
#
# registry
Image.register_open(SunImageFile.format, SunImageFile, _accept)
Image.register_extension(SunImageFile.format, ".ras")
| mit | -6,823,107,286,268,212,000 | 30.632353 | 82 | 0.547652 | false |
JingheZ/shogun | examples/undocumented/python_modular/structure_discrete_hmsvm_mosek.py | 2 | 1217 | #!/usr/bin/env python
import numpy
import scipy
from scipy import io
data_dict = scipy.io.loadmat('../data/hmsvm_data_large_integer.mat', struct_as_record=False)
parameter_list=[[data_dict]]
def structure_discrete_hmsvm_mosek (m_data_dict=data_dict):
from modshogun import RealMatrixFeatures
from modshogun import SequenceLabels, HMSVMModel, Sequence, TwoStateModel, SMT_TWO_STATE
from modshogun import StructuredAccuracy
try:
from modshogun import PrimalMosekSOSVM
except ImportError:
print("Mosek not available")
return
labels_array = m_data_dict['label'][0]
idxs = numpy.nonzero(labels_array == -1)
labels_array[idxs] = 0
labels = SequenceLabels(labels_array, 250, 500, 2)
features = RealMatrixFeatures(m_data_dict['signal'].astype(float), 250, 500)
num_obs = 4 # given by the data file used
model = HMSVMModel(features, labels, SMT_TWO_STATE, num_obs)
sosvm = PrimalMosekSOSVM(model, labels)
sosvm.train()
#print(sosvm.get_w())
predicted = sosvm.apply()
evaluator = StructuredAccuracy()
acc = evaluator.evaluate(predicted, labels)
#print('Accuracy = %.4f' % acc)
if __name__ == '__main__':
print("Discrete HMSVM Mosek")
structure_discrete_hmsvm_mosek(*parameter_list[0])
| gpl-3.0 | 2,882,753,445,676,237,000 | 26.659091 | 92 | 0.733772 | false |
madduck/reclass | reclass/errors.py | 2 | 6324 | #
# -*- coding: utf-8 -*-
#
# This file is part of reclass (http://github.com/madduck/reclass)
#
# Copyright © 2007–14 martin f. krafft <[email protected]>
# Released under the terms of the Artistic Licence 2.0
#
import posix, sys
import traceback
from reclass.defaults import PARAMETER_INTERPOLATION_SENTINELS
class ReclassException(Exception):
def __init__(self, rc=posix.EX_SOFTWARE, msg=None):
super(ReclassException, self).__init__()
self._rc = rc
self._msg = msg
self._traceback = traceback.format_exc()
message = property(lambda self: self._get_message())
rc = property(lambda self: self._rc)
def _get_message(self):
if self._msg:
return self._msg
else:
return 'No error message provided.'
def exit_with_message(self, out=sys.stderr):
print >>out, self.message
if self._traceback:
print >>out, self._traceback
sys.exit(self.rc)
class PermissionError(ReclassException):
def __init__(self, msg, rc=posix.EX_NOPERM):
super(PermissionError, self).__init__(rc=rc, msg=msg)
class InvocationError(ReclassException):
def __init__(self, msg, rc=posix.EX_USAGE):
super(InvocationError, self).__init__(rc=rc, msg=msg)
class ConfigError(ReclassException):
def __init__(self, msg, rc=posix.EX_CONFIG):
super(ConfigError, self).__init__(rc=rc, msg=msg)
class DuplicateUriError(ConfigError):
def __init__(self, nodes_uri, classes_uri):
super(DuplicateUriError, self).__init__(msg=None)
self._nodes_uri = nodes_uri
self._classes_uri = classes_uri
def _get_message(self):
return "The inventory URIs must not be the same " \
"for nodes and classes: {0}".format(self._nodes_uri)
class UriOverlapError(ConfigError):
def __init__(self, nodes_uri, classes_uri):
super(UriOverlapError, self).__init__(msg=None)
self._nodes_uri = nodes_uri
self._classes_uri = classes_uri
def _get_message(self):
msg = "The URIs for the nodes and classes inventories must not " \
"overlap, but {0} and {1} do."
return msg.format(self._nodes_uri, self._classes_uri)
class NotFoundError(ReclassException):
def __init__(self, msg, rc=posix.EX_IOERR):
super(NotFoundError, self).__init__(rc=rc, msg=msg)
class NodeNotFound(NotFoundError):
def __init__(self, storage, nodename, uri):
super(NodeNotFound, self).__init__(msg=None)
self._storage = storage
self._name = nodename
self._uri = uri
def _get_message(self):
msg = "Node '{0}' not found under {1}://{2}"
return msg.format(self._name, self._storage, self._uri)
class ClassNotFound(NotFoundError):
def __init__(self, storage, classname, uri, nodename=None):
super(ClassNotFound, self).__init__(msg=None)
self._storage = storage
self._name = classname
self._uri = uri
self._nodename = nodename
def _get_message(self):
if self._nodename:
msg = "Class '{0}' (in ancestry of node '{1}') not found " \
"under {2}://{3}"
else:
msg = "Class '{0}' not found under {2}://{3}"
return msg.format(self._name, self._nodename, self._storage, self._uri)
def set_nodename(self, nodename):
self._nodename = nodename
class InterpolationError(ReclassException):
def __init__(self, msg, rc=posix.EX_DATAERR):
super(InterpolationError, self).__init__(rc=rc, msg=msg)
class UndefinedVariableError(InterpolationError):
def __init__(self, var, context=None):
super(UndefinedVariableError, self).__init__(msg=None)
self._var = var
self._context = context
var = property(lambda self: self._var)
context = property(lambda self: self._context)
def _get_message(self):
msg = "Cannot resolve " + self._var.join(PARAMETER_INTERPOLATION_SENTINELS)
if self._context:
msg += ' in the context of %s' % self._context
return msg
def set_context(self, context):
self._context = context
class IncompleteInterpolationError(InterpolationError):
def __init__(self, string, end_sentinel):
super(IncompleteInterpolationError, self).__init__(msg=None)
self._ref = string.join(PARAMETER_INTERPOLATION_SENTINELS)
self._end_sentinel = end_sentinel
def _get_message(self):
msg = "Missing '{0}' to end reference: {1}"
return msg.format(self._end_sentinel, self._ref)
class InfiniteRecursionError(InterpolationError):
def __init__(self, path, ref):
super(InfiniteRecursionError, self).__init__(msg=None)
self._path = path
self._ref = ref.join(PARAMETER_INTERPOLATION_SENTINELS)
def _get_message(self):
msg = "Infinite recursion while resolving {0} at {1}"
return msg.format(self._ref, self._path)
class MappingError(ReclassException):
def __init__(self, msg, rc=posix.EX_DATAERR):
super(MappingError, self).__init__(rc=rc, msg=msg)
class MappingFormatError(MappingError):
def __init__(self, msg):
super(MappingFormatError, self).__init__(msg)
class NameError(ReclassException):
def __init__(self, msg, rc=posix.EX_DATAERR):
super(NameError, self).__init__(rc=rc, msg=msg)
class InvalidClassnameError(NameError):
def __init__(self, invalid_character, classname):
super(InvalidClassnameError, self).__init__(msg=None)
self._char = invalid_character
self._classname = classname
def _get_message(self):
msg = "Invalid character '{0}' in class name '{1}'."
return msg.format(self._char, self._classname)
class DuplicateNodeNameError(NameError):
def __init__(self, storage, name, uri1, uri2):
super(DuplicateNodeNameError, self).__init__(msg=None)
self._storage = storage
self._name = name
self._uris = (uri1, uri2)
def _get_message(self):
msg = "{0}: Definition of node '{1}' in '{2}' collides with " \
"definition in '{3}'. Nodes can only be defined once " \
"per inventory."
return msg.format(self._storage, self._name, self._uris[1], self._uris[0])
| artistic-2.0 | 5,022,595,609,476,263,000 | 28.676056 | 83 | 0.621895 | false |
cedriclaunay/gaffer | python/GafferImageTest/ImageTimeWarpTest.py | 1 | 5003 | ##########################################################################
#
# Copyright (c) 2013, Image Engine Design Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above
# copyright notice, this list of conditions and the following
# disclaimer.
#
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided with
# the distribution.
#
# * Neither the name of John Haddon nor the names of
# any other contributors to this software may be used to endorse or
# promote products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
##########################################################################
import unittest
import IECore
import Gaffer
import GafferTest
import GafferImage
import GafferSceneTest
class ImageTimeWarpTest( GafferTest.TestCase ) :
def testDefaultName( self ) :
t = GafferImage.ImageTimeWarp()
self.assertEqual( t.getName(), "ImageTimeWarp" )
def testEnabledPlug( self ) :
t = GafferImage.ImageTimeWarp()
self.assertTrue( isinstance( t["enabled"], Gaffer.BoolPlug ) )
self.assertTrue( t["enabled"].isSame( t.enabledPlug() ) )
self.assertFalse( "enabled1" in t )
def testAffects( self ) :
timeWarp = GafferImage.ImageTimeWarp()
for n in [ "format", "dataWindow", "channelNames", "channelData" ] :
a = timeWarp.affects( timeWarp["in"][n] )
self.assertEqual( len( a ), 1 )
self.assertTrue( a[0].isSame( timeWarp["out"][n] ) )
for n in [ "enabled", "offset", "speed" ] :
a = set( [ plug.relativeName( plug.node() ) for plug in timeWarp.affects( timeWarp[n] ) ] )
self.assertEqual(
a,
set( [
"out.format", "out.dataWindow", "out.channelNames", "out.channelData",
] ),
)
def testTimeWarping( self ) :
script = Gaffer.ScriptNode()
script["constant"] = GafferImage.Constant()
script["expression"] = Gaffer.Expression()
script["expression"]["engine"].setValue( "python" )
script["expression"]["expression"].setValue( 'parent["constant"]["color"]["r"] = context["frame"]' )
script["timeWarp"] = GafferImage.ImageTimeWarp()
script["timeWarp"]["offset"].setValue( 1 )
script["timeWarp"]["in"].setInput( script["constant"]["out"] )
for f in range( 0, 10 ) :
with script.context() :
script.context().setFrame( f )
c0 = script["constant"]["out"].image()
c0Hash = script["constant"]["out"].imageHash()
t = script["timeWarp"]["out"].image()
tHash = script["timeWarp"]["out"].imageHash()
script.context().setFrame( f + 1 )
c1 = script["constant"]["out"].image()
c1Hash = script["constant"]["out"].imageHash()
self.assertEqual( c1, t )
self.assertEqual( c1Hash, tHash )
self.assertNotEqual( c0, c1 )
self.assertNotEqual( c0Hash, c1Hash )
def testDisabling( self ) :
script = Gaffer.ScriptNode()
script["constant"] = GafferImage.Constant()
script["expression"] = Gaffer.Expression()
script["expression"]["engine"].setValue( "python" )
script["expression"]["expression"].setValue( 'parent["constant"]["color"]["r"] = context["frame"]' )
script["timeWarp"] = GafferImage.ImageTimeWarp()
script["timeWarp"]["offset"].setValue( 1 )
script["timeWarp"]["in"].setInput( script["constant"]["out"] )
with script.context() :
c = script["constant"]["out"].image()
cHash = script["constant"]["out"].imageHash()
t = script["timeWarp"]["out"].image()
tHash = script["timeWarp"]["out"].imageHash()
self.assertNotEqual( c, t )
self.assertNotEqual( cHash, tHash )
script["timeWarp"]["enabled"].setValue( False )
with script.context() :
c = script["constant"]["out"].image()
cHash = script["constant"]["out"].imageHash()
t = script["timeWarp"]["out"].image()
tHash = script["timeWarp"]["out"].imageHash()
self.assertEqual( c, t )
self.assertEqual( cHash, tHash )
if __name__ == "__main__":
unittest.main()
| bsd-3-clause | -5,437,082,892,515,985,000 | 33.034014 | 102 | 0.660204 | false |
stencila/hub | manager/users/migrations/0001_initial.py | 1 | 5290 | # Generated by Django 3.0.8 on 2020-07-08 22:03
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import django.utils.timezone
import users.models
class Migration(migrations.Migration):
initial = True
dependencies = [
('auth', '0011_update_proxy_permissions'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('contenttypes', '0002_remove_content_type_name'),
]
operations = [
migrations.CreateModel(
name='Invite',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('key', models.CharField(default=users.models.generate_invite_key, help_text='The key for the invite.', max_length=64, unique=True)),
('email', models.EmailField(help_text='The email address of the person you are inviting.', max_length=2048)),
('message', models.TextField(blank=True, help_text='An optional message to send to the invitee.', null=True)),
('created', models.DateTimeField(auto_now_add=True, help_text='When the invite was created.')),
('sent', models.DateTimeField(blank=True, help_text='When the invite was sent.', null=True)),
('accepted', models.BooleanField(default=False, help_text='Whether the invite has been accepted. Will only be true if the user has clicked on the invitation AND authenticated.')),
('completed', models.DateTimeField(blank=True, help_text='When the invite action was completed', null=True)),
('action', models.CharField(blank=True, choices=[('join_account', 'Join account'), ('join_team', 'Join team'), ('join_project', 'Join project'), ('take_tour', 'Take tour')], help_text='The action to perform when the invitee signs up.', max_length=64, null=True)),
('subject_id', models.IntegerField(blank=True, help_text='The id of the target of the action.', null=True)),
('arguments', models.JSONField(blank=True, help_text='Any additional arguments to pass to the action.', null=True)),
('inviter', models.ForeignKey(blank=True, help_text='The user who created the invite.', null=True, on_delete=django.db.models.deletion.CASCADE, related_name='invites', to=settings.AUTH_USER_MODEL)),
('subject_type', models.ForeignKey(blank=True, help_text='The type of the target of the action. e.g Team, Account', null=True, on_delete=django.db.models.deletion.CASCADE, to='contenttypes.ContentType')),
],
),
migrations.CreateModel(
name='Flag',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(help_text='The human/computer readable name.', max_length=100, unique=True, verbose_name='Name')),
('everyone', models.NullBooleanField(help_text='Flip this flag on (Yes) or off (No) for everyone, overriding all other settings. Leave as Unknown to use normally.', verbose_name='Everyone')),
('percent', models.DecimalField(blank=True, decimal_places=1, help_text='A number between 0.0 and 99.9 to indicate a percentage of users for whom this flag will be active.', max_digits=3, null=True, verbose_name='Percent')),
('testing', models.BooleanField(default=False, help_text='Allow this flag to be set for a session for user testing', verbose_name='Testing')),
('superusers', models.BooleanField(default=True, help_text='Flag always active for superusers?', verbose_name='Superusers')),
('staff', models.BooleanField(default=False, help_text='Flag always active for staff?', verbose_name='Staff')),
('authenticated', models.BooleanField(default=False, help_text='Flag always active for authenticated users?', verbose_name='Authenticated')),
('languages', models.TextField(blank=True, default='', help_text='Activate this flag for users with one of these languages (comma-separated list)', verbose_name='Languages')),
('rollout', models.BooleanField(default=False, help_text='Activate roll-out mode?', verbose_name='Rollout')),
('note', models.TextField(blank=True, help_text='Note where this Flag is used.', verbose_name='Note')),
('created', models.DateTimeField(db_index=True, default=django.utils.timezone.now, help_text='Date when this Flag was created.', verbose_name='Created')),
('modified', models.DateTimeField(default=django.utils.timezone.now, help_text='Date when this Flag was last modified.', verbose_name='Modified')),
('groups', models.ManyToManyField(blank=True, help_text='Activate this flag for these user groups.', to='auth.Group', verbose_name='Groups')),
('users', models.ManyToManyField(blank=True, help_text='Activate this flag for these users.', to=settings.AUTH_USER_MODEL, verbose_name='Users')),
],
options={
'verbose_name': 'Flag',
'verbose_name_plural': 'Flags',
'abstract': False,
},
),
]
| apache-2.0 | 5,189,788,996,011,161,000 | 81.65625 | 279 | 0.652174 | false |
MediaMath/t1-python | terminalone/t1mappings_noclassdef.py | 1 | 1823 | # Temporary until we can either kill the circular dependency introduced
# by importing model defs in xmlparser or kill xmlparser entirely
SINGULAR = {
'acl': 'acl',
'ad_server': 'ad_servers',
'advertiser': 'advertisers',
'agency': 'agencies',
'atomic_creative': 'atomic_creatives',
'audience_segment': 'audience_segments',
'campaign': 'campaigns',
'concept': 'concepts',
'contact': 'contacts',
'contract': 'contracts',
'creative': 'creatives',
'creative_approval': 'creative_approvals',
'deal': 'deals',
'organization': 'organizations',
'permission': 'permissions',
'pixel': 'pixels',
'pixel_bundle': 'pixel_bundles',
'pixel_provider': 'pixel_providers',
'placement_slot': 'placement_slots',
'publisher': 'publishers',
'publisher_site': 'publisher_sites',
'rmx_strategy': 'rmx_strategies',
'rmx_strategy_roi_target_pixel': 'rmx_strategy_roi_target_pixels',
'seat': 'seats',
'site_list': 'site_lists',
'site_placement': 'site_placements',
'strategy': 'strategies',
'strategy_audience_segment': 'strategy_audience_segments',
'strategy_concept': 'strategy_concepts',
'strategy_deal': 'strategy_deals',
'strategy_day_part': 'strategy_day_parts',
'strategy_domain_restriction': 'strategy_domain_restrictions',
'strategy_supply_source': 'strategy_supply_sources',
'strategy_targeting_segment': 'strategy_targeting_segments',
'supply_source': 'supply_sources',
'target_dimension': 'target_dimensions',
'target_value': 'target_values',
'user': 'users',
'vendor': 'vendors',
'vendor_contract': 'vendor_contracts',
'vendor_domain': 'vendor_domains',
'vendor_pixel': 'vendor_pixels',
'vendor_pixel_domain': 'vendor_pixel_domains',
'vertical': 'verticals'
}
| apache-2.0 | -1,990,546,721,601,453,300 | 36.204082 | 71 | 0.654964 | false |
Nic30/hwtLib | hwtLib/peripheral/i2c/masterBitCntrl_test.py | 1 | 1862 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
from _collections import deque
import unittest
from hwt.simulator.simTestCase import SimTestCase
from hwtLib.peripheral.i2c.intf import I2cAgent
from hwtLib.peripheral.i2c.masterBitCntrl import I2cMasterBitCtrl, \
NOP, START, READ, WRITE
from hwtSimApi.constants import CLK_PERIOD
from pyMathBitPrecise.bit_utils import get_bit
class I2CMasterBitCntrlTC(SimTestCase):
@classmethod
def setUpClass(cls):
cls.u = I2cMasterBitCtrl()
cls.compileSim(cls.u)
def test_nop(self):
u = self.u
u.cntrl._ag.data.append((NOP, 0))
u.clk_cnt_initVal._ag.data.append(4)
self.runSim(20 * CLK_PERIOD)
self.assertFalse(u.i2c._ag.hasTransactionPending())
def test_startbit(self):
u = self.u
u.cntrl._ag.data.extend([(START, 0), (NOP, 0)])
u.clk_cnt_initVal._ag.data.append(4)
self.runSim(60 * CLK_PERIOD)
self.assertEqual(u.i2c._ag.bit_cntrl_rx, deque([I2cAgent.START]))
def test_7bitAddr(self):
u = self.u
addr = 13
mode = I2cAgent.READ
u.cntrl._ag.data.extend(
[(START, 0), ] +
[(WRITE, get_bit(addr, 7 - i - 1)) for i in range(7)] +
[(WRITE, mode),
(READ, 0),
(NOP, 0)
])
u.clk_cnt_initVal._ag.data.append(4)
self.runSim(70 * CLK_PERIOD)
self.assertValSequenceEqual(
u.i2c._ag.bit_cntrl_rx,
[I2cAgent.START] +
[get_bit(addr, 7 - i - 1)
for i in range(7)] +
[mode])
if __name__ == "__main__":
suite = unittest.TestSuite()
# suite.addTest(I2CMasterBitCntrlTC('test_nop'))
suite.addTest(unittest.makeSuite(I2CMasterBitCntrlTC))
runner = unittest.TextTestRunner(verbosity=3)
runner.run(suite)
| mit | 4,142,820,898,430,028,300 | 27.646154 | 73 | 0.593985 | false |
mvaled/sentry | src/sentry/south_migrations/0397_auto__add_latestrelease__add_unique_latestrelease_repository_id_enviro.py | 1 | 102302 | # -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
# Flag to indicate if this migration is too risky
# to run online and needs to be coordinated for offline
is_dangerous = False
def forwards(self, orm):
# Adding model 'LatestRelease'
db.create_table('sentry_latestrelease', (
('id', self.gf('sentry.db.models.fields.bounded.BoundedBigAutoField')(primary_key=True)),
('repository_id', self.gf('sentry.db.models.fields.bounded.BoundedBigIntegerField')()),
('environment_id', self.gf('sentry.db.models.fields.bounded.BoundedBigIntegerField')()),
('release_id', self.gf('sentry.db.models.fields.bounded.BoundedBigIntegerField')()),
('deploy_id', self.gf('sentry.db.models.fields.bounded.BoundedBigIntegerField')(null=True)),
('commit_id', self.gf('sentry.db.models.fields.bounded.BoundedBigIntegerField')(null=True)),
))
db.send_create_signal('sentry', ['LatestRelease'])
# Adding unique constraint on 'LatestRelease', fields ['repository_id', 'environment_id']
db.create_unique('sentry_latestrelease', ['repository_id', 'environment_id'])
def backwards(self, orm):
# Removing unique constraint on 'LatestRelease', fields ['repository_id', 'environment_id']
db.delete_unique('sentry_latestrelease', ['repository_id', 'environment_id'])
# Deleting model 'LatestRelease'
db.delete_table('sentry_latestrelease')
models = {
'sentry.activity': {
'Meta': {'object_name': 'Activity'},
'data': ('sentry.db.models.fields.gzippeddict.GzippedDictField', [], {'null': 'True'}),
'datetime': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Group']", 'null': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'ident': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}),
'type': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}),
'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']", 'null': 'True'})
},
'sentry.apiapplication': {
'Meta': {'object_name': 'ApiApplication'},
'allowed_origins': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'client_id': ('django.db.models.fields.CharField', [], {'default': "'77cf05ffe3c94e5d90e8d2debfdf44a3338317c84edf4a1584bccc7e741e5010'", 'unique': 'True', 'max_length': '64'}),
'client_secret': ('sentry.db.models.fields.encrypted.EncryptedTextField', [], {'default': "'5c21e34062d04fcdb2c6c95ae002e02493507d401a1d491c8c8272c07311e256'"}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'homepage_url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'default': "'Humble Sawfly'", 'max_length': '64', 'blank': 'True'}),
'owner': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']"}),
'privacy_url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True'}),
'redirect_uris': ('django.db.models.fields.TextField', [], {}),
'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0', 'db_index': 'True'}),
'terms_url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True'})
},
'sentry.apiauthorization': {
'Meta': {'unique_together': "(('user', 'application'),)", 'object_name': 'ApiAuthorization'},
'application': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.ApiApplication']", 'null': 'True'}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'scope_list': ('sentry.db.models.fields.array.ArrayField', [], {'of': ('django.db.models.fields.TextField', [], {})}),
'scopes': ('django.db.models.fields.BigIntegerField', [], {'default': 'None'}),
'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']"})
},
'sentry.apigrant': {
'Meta': {'object_name': 'ApiGrant'},
'application': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.ApiApplication']"}),
'code': ('django.db.models.fields.CharField', [], {'default': "'a510e2f87b39450998283c8fcb9a2925'", 'max_length': '64', 'db_index': 'True'}),
'expires_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2018, 3, 21, 0, 0)', 'db_index': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'redirect_uri': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'scope_list': ('sentry.db.models.fields.array.ArrayField', [], {'of': ('django.db.models.fields.TextField', [], {})}),
'scopes': ('django.db.models.fields.BigIntegerField', [], {'default': 'None'}),
'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']"})
},
'sentry.apikey': {
'Meta': {'object_name': 'ApiKey'},
'allowed_origins': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '32'}),
'label': ('django.db.models.fields.CharField', [], {'default': "'Default'", 'max_length': '64', 'blank': 'True'}),
'organization': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'key_set'", 'to': "orm['sentry.Organization']"}),
'scope_list': ('sentry.db.models.fields.array.ArrayField', [], {'of': ('django.db.models.fields.TextField', [], {})}),
'scopes': ('django.db.models.fields.BigIntegerField', [], {'default': 'None'}),
'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0', 'db_index': 'True'})
},
'sentry.apitoken': {
'Meta': {'object_name': 'ApiToken'},
'application': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.ApiApplication']", 'null': 'True'}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'expires_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2018, 4, 20, 0, 0)', 'null': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'refresh_token': ('django.db.models.fields.CharField', [], {'default': "'a3ad19998aaa4545964c0162fcd950e8b8f94f0d14dc4bfba4c173a03d2700d6'", 'max_length': '64', 'unique': 'True', 'null': 'True'}),
'scope_list': ('sentry.db.models.fields.array.ArrayField', [], {'of': ('django.db.models.fields.TextField', [], {})}),
'scopes': ('django.db.models.fields.BigIntegerField', [], {'default': 'None'}),
'token': ('django.db.models.fields.CharField', [], {'default': "'035aa96ceba648c99324ae41a4a56d8b9c2ccc8c72314e4fb273262a30dee078'", 'unique': 'True', 'max_length': '64'}),
'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']"})
},
'sentry.assistantactivity': {
'Meta': {'unique_together': "(('user', 'guide_id'),)", 'object_name': 'AssistantActivity', 'db_table': "'sentry_assistant_activity'"},
'dismissed_ts': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'guide_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'useful': ('django.db.models.fields.NullBooleanField', [], {'null': 'True', 'blank': 'True'}),
'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']"}),
'viewed_ts': ('django.db.models.fields.DateTimeField', [], {'null': 'True'})
},
'sentry.auditlogentry': {
'Meta': {'object_name': 'AuditLogEntry'},
'actor': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'blank': 'True', 'related_name': "'audit_actors'", 'null': 'True', 'to': "orm['sentry.User']"}),
'actor_key': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.ApiKey']", 'null': 'True', 'blank': 'True'}),
'actor_label': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True', 'blank': 'True'}),
'data': ('sentry.db.models.fields.gzippeddict.GzippedDictField', [], {}),
'datetime': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'event': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'ip_address': ('django.db.models.fields.GenericIPAddressField', [], {'max_length': '39', 'null': 'True'}),
'organization': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Organization']"}),
'target_object': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'}),
'target_user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'blank': 'True', 'related_name': "'audit_targets'", 'null': 'True', 'to': "orm['sentry.User']"})
},
'sentry.authenticator': {
'Meta': {'unique_together': "(('user', 'type'),)", 'object_name': 'Authenticator', 'db_table': "'auth_authenticator'"},
'config': ('sentry.db.models.fields.encrypted.EncryptedPickledObjectField', [], {}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedAutoField', [], {'primary_key': 'True'}),
'last_used_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'type': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}),
'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']"})
},
'sentry.authidentity': {
'Meta': {'unique_together': "(('auth_provider', 'ident'), ('auth_provider', 'user'))", 'object_name': 'AuthIdentity'},
'auth_provider': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.AuthProvider']"}),
'data': ('sentry.db.models.fields.encrypted.EncryptedJsonField', [], {'default': '{}'}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'ident': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'last_synced': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_verified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']"})
},
'sentry.authprovider': {
'Meta': {'object_name': 'AuthProvider'},
'config': ('sentry.db.models.fields.encrypted.EncryptedJsonField', [], {'default': '{}'}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'default_global_access': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'default_role': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '50'}),
'default_teams': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['sentry.Team']", 'symmetrical': 'False', 'blank': 'True'}),
'flags': ('django.db.models.fields.BigIntegerField', [], {'default': '0'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'last_sync': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'organization': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Organization']", 'unique': 'True'}),
'provider': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'sync_time': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'})
},
'sentry.broadcast': {
'Meta': {'object_name': 'Broadcast'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'date_expires': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2018, 3, 28, 0, 0)', 'null': 'True', 'blank': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'db_index': 'True'}),
'link': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'message': ('django.db.models.fields.CharField', [], {'max_length': '256'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'upstream_id': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True', 'blank': 'True'})
},
'sentry.broadcastseen': {
'Meta': {'unique_together': "(('broadcast', 'user'),)", 'object_name': 'BroadcastSeen'},
'broadcast': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Broadcast']"}),
'date_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']"})
},
'sentry.commit': {
'Meta': {'unique_together': "(('repository_id', 'key'),)", 'object_name': 'Commit', 'index_together': "(('repository_id', 'date_added'),)"},
'author': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.CommitAuthor']", 'null': 'True'}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'message': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'organization_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'db_index': 'True'}),
'repository_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {})
},
'sentry.commitauthor': {
'Meta': {'unique_together': "(('organization_id', 'email'), ('organization_id', 'external_id'))", 'object_name': 'CommitAuthor'},
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75'}),
'external_id': ('django.db.models.fields.CharField', [], {'max_length': '164', 'null': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True'}),
'organization_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'db_index': 'True'})
},
'sentry.commitfilechange': {
'Meta': {'unique_together': "(('commit', 'filename'),)", 'object_name': 'CommitFileChange'},
'commit': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Commit']"}),
'filename': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'organization_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'db_index': 'True'}),
'type': ('django.db.models.fields.CharField', [], {'max_length': '1'})
},
'sentry.counter': {
'Meta': {'object_name': 'Counter', 'db_table': "'sentry_projectcounter'"},
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']", 'unique': 'True'}),
'value': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {})
},
'sentry.deletedorganization': {
'Meta': {'object_name': 'DeletedOrganization'},
'actor_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {'null': 'True'}),
'actor_key': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True'}),
'actor_label': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}),
'date_created': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'date_deleted': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'ip_address': ('django.db.models.fields.GenericIPAddressField', [], {'max_length': '39', 'null': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}),
'reason': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'slug': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True'})
},
'sentry.deletedproject': {
'Meta': {'object_name': 'DeletedProject'},
'actor_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {'null': 'True'}),
'actor_key': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True'}),
'actor_label': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}),
'date_created': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'date_deleted': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'ip_address': ('django.db.models.fields.GenericIPAddressField', [], {'max_length': '39', 'null': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True'}),
'organization_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {'null': 'True'}),
'organization_name': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}),
'organization_slug': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True'}),
'platform': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}),
'reason': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'slug': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True'})
},
'sentry.deletedteam': {
'Meta': {'object_name': 'DeletedTeam'},
'actor_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {'null': 'True'}),
'actor_key': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True'}),
'actor_label': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}),
'date_created': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'date_deleted': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'ip_address': ('django.db.models.fields.GenericIPAddressField', [], {'max_length': '39', 'null': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}),
'organization_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {'null': 'True'}),
'organization_name': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}),
'organization_slug': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True'}),
'reason': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'slug': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True'})
},
'sentry.deploy': {
'Meta': {'object_name': 'Deploy'},
'date_finished': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'date_started': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'environment_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'db_index': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True', 'blank': 'True'}),
'notified': ('django.db.models.fields.NullBooleanField', [], {'default': 'False', 'null': 'True', 'db_index': 'True', 'blank': 'True'}),
'organization_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'db_index': 'True'}),
'release': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Release']"}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'})
},
'sentry.distribution': {
'Meta': {'unique_together': "(('release', 'name'),)", 'object_name': 'Distribution'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'organization_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'db_index': 'True'}),
'release': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Release']"})
},
'sentry.dsymapp': {
'Meta': {'unique_together': "(('project', 'platform', 'app_id'),)", 'object_name': 'DSymApp'},
'app_id': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'data': ('sentry.db.models.fields.jsonfield.JSONField', [], {'default': '{}'}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'last_synced': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'platform': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}),
'sync_id': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'})
},
'sentry.email': {
'Meta': {'object_name': 'Email'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('sentry.db.models.fields.citext.CIEmailField', [], {'unique': 'True', 'max_length': '75'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'})
},
'sentry.environment': {
'Meta': {'unique_together': "(('project_id', 'name'), ('organization_id', 'name'))", 'object_name': 'Environment'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'organization_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}),
'project_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'}),
'projects': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['sentry.Project']", 'through': "orm['sentry.EnvironmentProject']", 'symmetrical': 'False'})
},
'sentry.environmentproject': {
'Meta': {'unique_together': "(('project', 'environment'),)", 'object_name': 'EnvironmentProject'},
'environment': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Environment']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'is_hidden': ('django.db.models.fields.NullBooleanField', [], {'null': 'True', 'blank': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"})
},
'sentry.event': {
'Meta': {'unique_together': "(('project_id', 'event_id'),)", 'object_name': 'Event', 'db_table': "'sentry_message'", 'index_together': "(('group_id', 'datetime'),)"},
'data': ('sentry.db.models.fields.node.NodeField', [], {'null': 'True', 'blank': 'True'}),
'datetime': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'event_id': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True', 'db_column': "'message_id'"}),
'group_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {'null': 'True', 'blank': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'message': ('django.db.models.fields.TextField', [], {}),
'platform': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}),
'project_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {'null': 'True', 'blank': 'True'}),
'time_spent': ('sentry.db.models.fields.bounded.BoundedIntegerField', [], {'null': 'True'})
},
'sentry.eventmapping': {
'Meta': {'unique_together': "(('project_id', 'event_id'),)", 'object_name': 'EventMapping'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'event_id': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'group_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'project_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {})
},
'sentry.eventprocessingissue': {
'Meta': {'unique_together': "(('raw_event', 'processing_issue'),)", 'object_name': 'EventProcessingIssue'},
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'processing_issue': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.ProcessingIssue']"}),
'raw_event': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.RawEvent']"})
},
'sentry.eventtag': {
'Meta': {'unique_together': "(('event_id', 'key_id', 'value_id'),)", 'object_name': 'EventTag', 'index_together': "(('group_id', 'key_id', 'value_id'),)"},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'event_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {}),
'group_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {'null': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {}),
'project_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {}),
'value_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {})
},
'sentry.eventuser': {
'Meta': {'unique_together': "(('project_id', 'ident'), ('project_id', 'hash'))", 'object_name': 'EventUser', 'index_together': "(('project_id', 'email'), ('project_id', 'username'), ('project_id', 'ip_address'))"},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'null': 'True'}),
'hash': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'ident': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True'}),
'ip_address': ('django.db.models.fields.GenericIPAddressField', [], {'max_length': '39', 'null': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True'}),
'project_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'db_index': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True'})
},
'sentry.featureadoption': {
'Meta': {'unique_together': "(('organization', 'feature_id'),)", 'object_name': 'FeatureAdoption'},
'applicable': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'complete': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'data': ('sentry.db.models.fields.jsonfield.JSONField', [], {'default': '{}'}),
'date_completed': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'feature_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'organization': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Organization']"})
},
'sentry.file': {
'Meta': {'object_name': 'File'},
'blob': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'legacy_blob'", 'null': 'True', 'to': "orm['sentry.FileBlob']"}),
'blobs': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['sentry.FileBlob']", 'through': "orm['sentry.FileBlobIndex']", 'symmetrical': 'False'}),
'checksum': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True', 'db_index': 'True'}),
'headers': ('sentry.db.models.fields.jsonfield.JSONField', [], {'default': '{}'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'path': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'size': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'}),
'timestamp': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'type': ('django.db.models.fields.CharField', [], {'max_length': '64'})
},
'sentry.fileblob': {
'Meta': {'object_name': 'FileBlob'},
'checksum': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '40'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'path': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'size': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'}),
'timestamp': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'})
},
'sentry.fileblobindex': {
'Meta': {'unique_together': "(('file', 'blob', 'offset'),)", 'object_name': 'FileBlobIndex'},
'blob': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.FileBlob']"}),
'file': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.File']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'offset': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {})
},
'sentry.fileblobowner': {
'Meta': {'unique_together': "(('blob', 'organization'),)", 'object_name': 'FileBlobOwner'},
'blob': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.FileBlob']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'organization': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Organization']"})
},
'sentry.group': {
'Meta': {'unique_together': "(('project', 'short_id'),)", 'object_name': 'Group', 'db_table': "'sentry_groupedmessage'", 'index_together': "(('project', 'first_release'),)"},
'active_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'db_index': 'True'}),
'culprit': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'db_column': "'view'", 'blank': 'True'}),
'data': ('sentry.db.models.fields.gzippeddict.GzippedDictField', [], {'null': 'True', 'blank': 'True'}),
'first_release': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Release']", 'null': 'True', 'on_delete': 'models.PROTECT'}),
'first_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'is_public': ('django.db.models.fields.NullBooleanField', [], {'default': 'False', 'null': 'True', 'blank': 'True'}),
'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'level': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '40', 'db_index': 'True', 'blank': 'True'}),
'logger': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '64', 'db_index': 'True', 'blank': 'True'}),
'message': ('django.db.models.fields.TextField', [], {}),
'num_comments': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0', 'null': 'True'}),
'platform': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']", 'null': 'True'}),
'resolved_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'db_index': 'True'}),
'score': ('sentry.db.models.fields.bounded.BoundedIntegerField', [], {'default': '0'}),
'short_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {'null': 'True'}),
'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0', 'db_index': 'True'}),
'time_spent_count': ('sentry.db.models.fields.bounded.BoundedIntegerField', [], {'default': '0'}),
'time_spent_total': ('sentry.db.models.fields.bounded.BoundedIntegerField', [], {'default': '0'}),
'times_seen': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '1', 'db_index': 'True'})
},
'sentry.groupassignee': {
'Meta': {'object_name': 'GroupAssignee', 'db_table': "'sentry_groupasignee'"},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'assignee_set'", 'unique': 'True', 'to': "orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'assignee_set'", 'to': "orm['sentry.Project']"}),
'team': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'sentry_assignee_set'", 'null': 'True', 'to': "orm['sentry.Team']"}),
'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'sentry_assignee_set'", 'null': 'True', 'to': "orm['sentry.User']"})
},
'sentry.groupbookmark': {
'Meta': {'unique_together': "(('project', 'user', 'group'),)", 'object_name': 'GroupBookmark'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True'}),
'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'bookmark_set'", 'to': "orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'bookmark_set'", 'to': "orm['sentry.Project']"}),
'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'sentry_bookmark_set'", 'to': "orm['sentry.User']"})
},
'sentry.groupcommitresolution': {
'Meta': {'unique_together': "(('group_id', 'commit_id'),)", 'object_name': 'GroupCommitResolution'},
'commit_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'db_index': 'True'}),
'datetime': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'group_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'})
},
'sentry.groupemailthread': {
'Meta': {'unique_together': "(('email', 'group'), ('email', 'msgid'))", 'object_name': 'GroupEmailThread'},
'date': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75'}),
'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'groupemail_set'", 'to': "orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'msgid': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'groupemail_set'", 'to': "orm['sentry.Project']"})
},
'sentry.groupenvironment': {
'Meta': {'unique_together': "[('group_id', 'environment_id')]", 'object_name': 'GroupEnvironment', 'index_together': "[('environment_id', 'first_release_id')]"},
'environment_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}),
'first_release_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'}),
'group_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'})
},
'sentry.grouphash': {
'Meta': {'unique_together': "(('project', 'hash'),)", 'object_name': 'GroupHash'},
'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Group']", 'null': 'True'}),
'group_tombstone_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True', 'db_index': 'True'}),
'hash': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']", 'null': 'True'}),
'state': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'})
},
'sentry.grouplink': {
'Meta': {'unique_together': "(('group_id', 'linked_type', 'linked_id'),)", 'object_name': 'GroupLink'},
'data': ('sentry.db.models.fields.jsonfield.JSONField', [], {'default': '{}'}),
'datetime': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'group_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'linked_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {}),
'linked_type': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '1'}),
'project_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {'db_index': 'True'}),
'relationship': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '2'})
},
'sentry.groupmeta': {
'Meta': {'unique_together': "(('group', 'key'),)", 'object_name': 'GroupMeta'},
'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'value': ('django.db.models.fields.TextField', [], {})
},
'sentry.groupredirect': {
'Meta': {'object_name': 'GroupRedirect'},
'group_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {'db_index': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'previous_group_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {'unique': 'True'})
},
'sentry.grouprelease': {
'Meta': {'unique_together': "(('group_id', 'release_id', 'environment'),)", 'object_name': 'GroupRelease'},
'environment': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '64'}),
'first_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'group_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'project_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'db_index': 'True'}),
'release_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'db_index': 'True'})
},
'sentry.groupresolution': {
'Meta': {'object_name': 'GroupResolution'},
'actor_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'}),
'datetime': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Group']", 'unique': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'release': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Release']"}),
'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0'}),
'type': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'})
},
'sentry.grouprulestatus': {
'Meta': {'unique_together': "(('rule', 'group'),)", 'object_name': 'GroupRuleStatus'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'last_active': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}),
'rule': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Rule']"}),
'status': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'})
},
'sentry.groupseen': {
'Meta': {'unique_together': "(('user', 'group'),)", 'object_name': 'GroupSeen'},
'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}),
'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']", 'db_index': 'False'})
},
'sentry.groupshare': {
'Meta': {'object_name': 'GroupShare'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Group']", 'unique': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}),
'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']", 'null': 'True'}),
'uuid': ('django.db.models.fields.CharField', [], {'default': "'13d0e9633db64057bfa89592c15f547f'", 'unique': 'True', 'max_length': '32'})
},
'sentry.groupsnooze': {
'Meta': {'object_name': 'GroupSnooze'},
'actor_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'}),
'count': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'}),
'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Group']", 'unique': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'state': ('sentry.db.models.fields.jsonfield.JSONField', [], {'null': 'True'}),
'until': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'user_count': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'}),
'user_window': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'}),
'window': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'})
},
'sentry.groupsubscription': {
'Meta': {'unique_together': "(('group', 'user'),)", 'object_name': 'GroupSubscription'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True'}),
'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'subscription_set'", 'to': "orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'subscription_set'", 'to': "orm['sentry.Project']"}),
'reason': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0'}),
'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']"})
},
'sentry.grouptagkey': {
'Meta': {'unique_together': "(('project_id', 'group_id', 'key'),)", 'object_name': 'GroupTagKey'},
'group_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'db_index': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'project_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True', 'db_index': 'True'}),
'values_seen': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0'})
},
'sentry.grouptagvalue': {
'Meta': {'unique_together': "(('group_id', 'key', 'value'),)", 'object_name': 'GroupTagValue', 'db_table': "'sentry_messagefiltervalue'", 'index_together': "(('project_id', 'key', 'value', 'last_seen'),)"},
'first_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True', 'db_index': 'True'}),
'group_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'db_index': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True', 'db_index': 'True'}),
'project_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True', 'db_index': 'True'}),
'times_seen': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0'}),
'value': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
'sentry.grouptombstone': {
'Meta': {'object_name': 'GroupTombstone'},
'actor_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'}),
'culprit': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'data': ('sentry.db.models.fields.gzippeddict.GzippedDictField', [], {'null': 'True', 'blank': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'level': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '40', 'blank': 'True'}),
'message': ('django.db.models.fields.TextField', [], {}),
'previous_group_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'unique': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"})
},
'sentry.identity': {
'Meta': {'unique_together': "(('idp', 'external_id'),)", 'object_name': 'Identity'},
'data': ('sentry.db.models.fields.encrypted.EncryptedJsonField', [], {'default': '{}'}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'date_verified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'external_id': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'idp': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.IdentityProvider']"}),
'scopes': ('sentry.db.models.fields.array.ArrayField', [], {'of': ('django.db.models.fields.TextField', [], {})}),
'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0'}),
'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']"})
},
'sentry.identityprovider': {
'Meta': {'unique_together': "(('type', 'organization'),)", 'object_name': 'IdentityProvider'},
'config': ('sentry.db.models.fields.encrypted.EncryptedJsonField', [], {'default': '{}'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'organization': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Organization']"}),
'type': ('django.db.models.fields.CharField', [], {'max_length': '64'})
},
'sentry.integration': {
'Meta': {'unique_together': "(('provider', 'external_id'),)", 'object_name': 'Integration'},
'external_id': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'metadata': ('sentry.db.models.fields.encrypted.EncryptedJsonField', [], {'default': '{}'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'organizations': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'integrations'", 'symmetrical': 'False', 'through': "orm['sentry.OrganizationIntegration']", 'to': "orm['sentry.Organization']"}),
'projects': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'integrations'", 'symmetrical': 'False', 'through': "orm['sentry.ProjectIntegration']", 'to': "orm['sentry.Project']"}),
'provider': ('django.db.models.fields.CharField', [], {'max_length': '64'})
},
'sentry.latestrelease': {
'Meta': {'unique_together': "(('repository_id', 'environment_id'),)", 'object_name': 'LatestRelease'},
'commit_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {'null': 'True'}),
'deploy_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {'null': 'True'}),
'environment_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'release_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {}),
'repository_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {})
},
'sentry.lostpasswordhash': {
'Meta': {'object_name': 'LostPasswordHash'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'hash': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']", 'unique': 'True'})
},
'sentry.option': {
'Meta': {'object_name': 'Option'},
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '64'}),
'last_updated': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'value': ('sentry.db.models.fields.encrypted.EncryptedPickledObjectField', [], {})
},
'sentry.organization': {
'Meta': {'object_name': 'Organization'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'default_role': ('django.db.models.fields.CharField', [], {'default': "'member'", 'max_length': '32'}),
'flags': ('django.db.models.fields.BigIntegerField', [], {'default': '1'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'members': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'org_memberships'", 'symmetrical': 'False', 'through': "orm['sentry.OrganizationMember']", 'to': "orm['sentry.User']"}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '50'}),
'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0'})
},
'sentry.organizationaccessrequest': {
'Meta': {'unique_together': "(('team', 'member'),)", 'object_name': 'OrganizationAccessRequest'},
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'member': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.OrganizationMember']"}),
'team': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Team']"})
},
'sentry.organizationavatar': {
'Meta': {'object_name': 'OrganizationAvatar'},
'avatar_type': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}),
'file': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.File']", 'unique': 'True', 'null': 'True', 'on_delete': 'models.SET_NULL'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'ident': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '32', 'db_index': 'True'}),
'organization': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'avatar'", 'unique': 'True', 'to': "orm['sentry.Organization']"})
},
'sentry.organizationintegration': {
'Meta': {'unique_together': "(('organization', 'integration'),)", 'object_name': 'OrganizationIntegration'},
'config': ('sentry.db.models.fields.encrypted.EncryptedJsonField', [], {'default': '{}'}),
'default_auth_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True', 'db_index': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'integration': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Integration']"}),
'organization': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Organization']"})
},
'sentry.organizationmember': {
'Meta': {'unique_together': "(('organization', 'user'), ('organization', 'email'))", 'object_name': 'OrganizationMember'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'null': 'True', 'blank': 'True'}),
'flags': ('django.db.models.fields.BigIntegerField', [], {'default': '0'}),
'has_global_access': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'organization': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'member_set'", 'to': "orm['sentry.Organization']"}),
'role': ('django.db.models.fields.CharField', [], {'default': "'member'", 'max_length': '32'}),
'teams': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['sentry.Team']", 'symmetrical': 'False', 'through': "orm['sentry.OrganizationMemberTeam']", 'blank': 'True'}),
'token': ('django.db.models.fields.CharField', [], {'max_length': '64', 'unique': 'True', 'null': 'True', 'blank': 'True'}),
'type': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '50', 'blank': 'True'}),
'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'blank': 'True', 'related_name': "'sentry_orgmember_set'", 'null': 'True', 'to': "orm['sentry.User']"})
},
'sentry.organizationmemberteam': {
'Meta': {'unique_together': "(('team', 'organizationmember'),)", 'object_name': 'OrganizationMemberTeam', 'db_table': "'sentry_organizationmember_teams'"},
'id': ('sentry.db.models.fields.bounded.BoundedAutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'organizationmember': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.OrganizationMember']"}),
'team': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Team']"})
},
'sentry.organizationonboardingtask': {
'Meta': {'unique_together': "(('organization', 'task'),)", 'object_name': 'OrganizationOnboardingTask'},
'data': ('sentry.db.models.fields.jsonfield.JSONField', [], {'default': '{}'}),
'date_completed': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'organization': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Organization']"}),
'project_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {'null': 'True', 'blank': 'True'}),
'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}),
'task': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}),
'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']", 'null': 'True'})
},
'sentry.organizationoption': {
'Meta': {'unique_together': "(('organization', 'key'),)", 'object_name': 'OrganizationOption', 'db_table': "'sentry_organizationoptions'"},
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'organization': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Organization']"}),
'value': ('sentry.db.models.fields.encrypted.EncryptedPickledObjectField', [], {})
},
'sentry.processingissue': {
'Meta': {'unique_together': "(('project', 'checksum', 'type'),)", 'object_name': 'ProcessingIssue'},
'checksum': ('django.db.models.fields.CharField', [], {'max_length': '40', 'db_index': 'True'}),
'data': ('sentry.db.models.fields.gzippeddict.GzippedDictField', [], {}),
'datetime': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}),
'type': ('django.db.models.fields.CharField', [], {'max_length': '30'})
},
'sentry.project': {
'Meta': {'unique_together': "(('organization', 'slug'),)", 'object_name': 'Project'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'first_event': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'flags': ('django.db.models.fields.BigIntegerField', [], {'default': '0', 'null': 'True'}),
'forced_color': ('django.db.models.fields.CharField', [], {'max_length': '6', 'null': 'True', 'blank': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'organization': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Organization']"}),
'platform': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}),
'public': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '50', 'null': 'True'}),
'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0', 'db_index': 'True'}),
'teams': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'teams'", 'symmetrical': 'False', 'through': "orm['sentry.ProjectTeam']", 'to': "orm['sentry.Team']"})
},
'sentry.projectbookmark': {
'Meta': {'unique_together': "(('project_id', 'user'),)", 'object_name': 'ProjectBookmark'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'project_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {'null': 'True', 'blank': 'True'}),
'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']"})
},
'sentry.projectdsymfile': {
'Meta': {'unique_together': "(('project', 'uuid'),)", 'object_name': 'ProjectDSymFile'},
'cpu_name': ('django.db.models.fields.CharField', [], {'max_length': '40'}),
'file': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.File']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'object_name': ('django.db.models.fields.TextField', [], {}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']", 'null': 'True'}),
'uuid': ('django.db.models.fields.CharField', [], {'max_length': '36'})
},
'sentry.projectintegration': {
'Meta': {'unique_together': "(('project', 'integration'),)", 'object_name': 'ProjectIntegration'},
'config': ('sentry.db.models.fields.encrypted.EncryptedJsonField', [], {'default': '{}'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'integration': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Integration']"}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"})
},
'sentry.projectkey': {
'Meta': {'object_name': 'ProjectKey'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True', 'blank': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'key_set'", 'to': "orm['sentry.Project']"}),
'public_key': ('django.db.models.fields.CharField', [], {'max_length': '32', 'unique': 'True', 'null': 'True'}),
'rate_limit_count': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'}),
'rate_limit_window': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'}),
'roles': ('django.db.models.fields.BigIntegerField', [], {'default': '1'}),
'secret_key': ('django.db.models.fields.CharField', [], {'max_length': '32', 'unique': 'True', 'null': 'True'}),
'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0', 'db_index': 'True'})
},
'sentry.projectoption': {
'Meta': {'unique_together': "(('project', 'key'),)", 'object_name': 'ProjectOption', 'db_table': "'sentry_projectoptions'"},
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}),
'value': ('sentry.db.models.fields.encrypted.EncryptedPickledObjectField', [], {})
},
'sentry.projectownership': {
'Meta': {'object_name': 'ProjectOwnership'},
'date_created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'fallthrough': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'last_updated': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']", 'unique': 'True'}),
'raw': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'schema': ('sentry.db.models.fields.jsonfield.JSONField', [], {'null': 'True'})
},
'sentry.projectplatform': {
'Meta': {'unique_together': "(('project_id', 'platform'),)", 'object_name': 'ProjectPlatform'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'platform': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'project_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {})
},
'sentry.projectsymcachefile': {
'Meta': {'unique_together': "(('project', 'dsym_file'),)", 'object_name': 'ProjectSymCacheFile'},
'cache_file': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.File']"}),
'checksum': ('django.db.models.fields.CharField', [], {'max_length': '40'}),
'dsym_file': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.ProjectDSymFile']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']", 'null': 'True'}),
'version': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {})
},
'sentry.projectteam': {
'Meta': {'unique_together': "(('project', 'team'),)", 'object_name': 'ProjectTeam'},
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}),
'team': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Team']"})
},
'sentry.pullrequest': {
'Meta': {'unique_together': "(('repository_id', 'key'),)", 'object_name': 'PullRequest', 'db_table': "'sentry_pull_request'", 'index_together': "(('repository_id', 'date_added'),)"},
'author': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.CommitAuthor']", 'null': 'True'}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'merge_commit_sha': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}),
'message': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'organization_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'db_index': 'True'}),
'repository_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}),
'title': ('django.db.models.fields.TextField', [], {'null': 'True'})
},
'sentry.rawevent': {
'Meta': {'unique_together': "(('project', 'event_id'),)", 'object_name': 'RawEvent'},
'data': ('sentry.db.models.fields.node.NodeField', [], {'null': 'True', 'blank': 'True'}),
'datetime': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'event_id': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"})
},
'sentry.release': {
'Meta': {'unique_together': "(('organization', 'version'),)", 'object_name': 'Release'},
'authors': ('sentry.db.models.fields.array.ArrayField', [], {'of': ('django.db.models.fields.TextField', [], {})}),
'commit_count': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'}),
'data': ('sentry.db.models.fields.jsonfield.JSONField', [], {'default': '{}'}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'date_released': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'date_started': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'last_commit_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'}),
'last_deploy_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'}),
'new_groups': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0'}),
'organization': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Organization']"}),
'owner': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']", 'null': 'True', 'blank': 'True'}),
'project_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'}),
'projects': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'releases'", 'symmetrical': 'False', 'through': "orm['sentry.ReleaseProject']", 'to': "orm['sentry.Project']"}),
'ref': ('django.db.models.fields.CharField', [], {'max_length': '250', 'null': 'True', 'blank': 'True'}),
'total_deploys': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'version': ('django.db.models.fields.CharField', [], {'max_length': '250'})
},
'sentry.releasecommit': {
'Meta': {'unique_together': "(('release', 'commit'), ('release', 'order'))", 'object_name': 'ReleaseCommit'},
'commit': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Commit']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'order': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}),
'organization_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'db_index': 'True'}),
'project_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'}),
'release': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Release']"})
},
'sentry.releaseenvironment': {
'Meta': {'unique_together': "(('organization_id', 'release_id', 'environment_id'),)", 'object_name': 'ReleaseEnvironment', 'db_table': "'sentry_environmentrelease'"},
'environment_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'db_index': 'True'}),
'first_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'organization_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'db_index': 'True'}),
'project_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'}),
'release_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'db_index': 'True'})
},
'sentry.releasefile': {
'Meta': {'unique_together': "(('release', 'ident'),)", 'object_name': 'ReleaseFile'},
'dist': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Distribution']", 'null': 'True'}),
'file': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.File']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'ident': ('django.db.models.fields.CharField', [], {'max_length': '40'}),
'name': ('django.db.models.fields.TextField', [], {}),
'organization': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Organization']"}),
'project_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'}),
'release': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Release']"})
},
'sentry.releaseheadcommit': {
'Meta': {'unique_together': "(('repository_id', 'release'),)", 'object_name': 'ReleaseHeadCommit'},
'commit': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Commit']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'organization_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'db_index': 'True'}),
'release': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Release']"}),
'repository_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {})
},
'sentry.releaseproject': {
'Meta': {'unique_together': "(('project', 'release'),)", 'object_name': 'ReleaseProject', 'db_table': "'sentry_release_project'"},
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'new_groups': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0', 'null': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}),
'release': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Release']"})
},
'sentry.releaseprojectenvironment': {
'Meta': {'unique_together': "(('project', 'release', 'environment'),)", 'object_name': 'ReleaseProjectEnvironment'},
'environment': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Environment']"}),
'first_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'new_issues_count': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}),
'release': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Release']"})
},
'sentry.repository': {
'Meta': {'unique_together': "(('organization_id', 'name'), ('organization_id', 'provider', 'external_id'))", 'object_name': 'Repository'},
'config': ('sentry.db.models.fields.jsonfield.JSONField', [], {'default': '{}'}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'external_id': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'integration_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True', 'db_index': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'organization_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'db_index': 'True'}),
'provider': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}),
'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0', 'db_index': 'True'}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True'})
},
'sentry.reprocessingreport': {
'Meta': {'unique_together': "(('project', 'event_id'),)", 'object_name': 'ReprocessingReport'},
'datetime': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'event_id': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"})
},
'sentry.rule': {
'Meta': {'object_name': 'Rule'},
'data': ('sentry.db.models.fields.gzippeddict.GzippedDictField', [], {}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'environment_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}),
'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0', 'db_index': 'True'})
},
'sentry.savedsearch': {
'Meta': {'unique_together': "(('project', 'name'),)", 'object_name': 'SavedSearch'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'is_default': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'owner': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']", 'null': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}),
'query': ('django.db.models.fields.TextField', [], {})
},
'sentry.savedsearchuserdefault': {
'Meta': {'unique_together': "(('project', 'user'),)", 'object_name': 'SavedSearchUserDefault', 'db_table': "'sentry_savedsearch_userdefault'"},
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}),
'savedsearch': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.SavedSearch']"}),
'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']"})
},
'sentry.scheduleddeletion': {
'Meta': {'unique_together': "(('app_label', 'model_name', 'object_id'),)", 'object_name': 'ScheduledDeletion'},
'aborted': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'actor_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {'null': 'True'}),
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'data': ('sentry.db.models.fields.jsonfield.JSONField', [], {'default': '{}'}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'date_scheduled': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2018, 4, 20, 0, 0)'}),
'guid': ('django.db.models.fields.CharField', [], {'default': "'4ab522335c9c457fa7b0e2dd43273c35'", 'unique': 'True', 'max_length': '32'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'in_progress': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'model_name': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'object_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {})
},
'sentry.scheduledjob': {
'Meta': {'object_name': 'ScheduledJob'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'date_scheduled': ('django.db.models.fields.DateTimeField', [], {}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'payload': ('sentry.db.models.fields.jsonfield.JSONField', [], {'default': '{}'})
},
'sentry.servicehook': {
'Meta': {'object_name': 'ServiceHook'},
'actor_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'db_index': 'True'}),
'application': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.ApiApplication']", 'null': 'True'}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'events': ('sentry.db.models.fields.array.ArrayField', [], {'of': ('django.db.models.fields.TextField', [], {})}),
'guid': ('django.db.models.fields.CharField', [], {'max_length': '32', 'unique': 'True', 'null': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'project_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'db_index': 'True'}),
'secret': ('sentry.db.models.fields.encrypted.EncryptedTextField', [], {'default': "'8b4e9c643dd24b00b068654ee0ff4634471c8b810e234f96bce621e48c61df97'"}),
'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0', 'db_index': 'True'}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '512'}),
'version': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0'})
},
'sentry.tagkey': {
'Meta': {'unique_together': "(('project_id', 'key'),)", 'object_name': 'TagKey', 'db_table': "'sentry_filterkey'"},
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}),
'project_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'db_index': 'True'}),
'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0'}),
'values_seen': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0'})
},
'sentry.tagvalue': {
'Meta': {'unique_together': "(('project_id', 'key', 'value'),)", 'object_name': 'TagValue', 'db_table': "'sentry_filtervalue'", 'index_together': "(('project_id', 'key', 'last_seen'),)"},
'data': ('sentry.db.models.fields.gzippeddict.GzippedDictField', [], {'null': 'True', 'blank': 'True'}),
'first_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True', 'db_index': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True', 'db_index': 'True'}),
'project_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True', 'db_index': 'True'}),
'times_seen': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0'}),
'value': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
'sentry.team': {
'Meta': {'unique_together': "(('organization', 'slug'),)", 'object_name': 'Team'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'organization': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Organization']"}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '50'}),
'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0'})
},
'sentry.user': {
'Meta': {'object_name': 'User', 'db_table': "'auth_user'"},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedAutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_managed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_password_expired': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_active': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_password_change': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200', 'db_column': "'first_name'", 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'session_nonce': ('django.db.models.fields.CharField', [], {'max_length': '12', 'null': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '128'})
},
'sentry.useravatar': {
'Meta': {'object_name': 'UserAvatar'},
'avatar_type': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}),
'file': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.File']", 'unique': 'True', 'null': 'True', 'on_delete': 'models.SET_NULL'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'ident': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '32', 'db_index': 'True'}),
'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'avatar'", 'unique': 'True', 'to': "orm['sentry.User']"})
},
'sentry.useremail': {
'Meta': {'unique_together': "(('user', 'email'),)", 'object_name': 'UserEmail'},
'date_hash_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'is_verified': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'emails'", 'to': "orm['sentry.User']"}),
'validation_hash': ('django.db.models.fields.CharField', [], {'default': "u'v3MKTLoatafPf0YNZcPZiqcrK6rfsWTM'", 'max_length': '32'})
},
'sentry.userip': {
'Meta': {'unique_together': "(('user', 'ip_address'),)", 'object_name': 'UserIP'},
'first_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'ip_address': ('django.db.models.fields.GenericIPAddressField', [], {'max_length': '39'}),
'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']"})
},
'sentry.useroption': {
'Meta': {'unique_together': "(('user', 'project', 'key'), ('user', 'organization', 'key'))", 'object_name': 'UserOption'},
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'organization': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Organization']", 'null': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']", 'null': 'True'}),
'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']"}),
'value': ('sentry.db.models.fields.encrypted.EncryptedPickledObjectField', [], {})
},
'sentry.userpermission': {
'Meta': {'unique_together': "(('user', 'permission'),)", 'object_name': 'UserPermission'},
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'permission': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']"})
},
'sentry.userreport': {
'Meta': {'unique_together': "(('project', 'event_id'),)", 'object_name': 'UserReport', 'index_together': "(('project', 'event_id'), ('project', 'date_added'))"},
'comments': ('django.db.models.fields.TextField', [], {}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75'}),
'environment': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Environment']", 'null': 'True'}),
'event_id': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'event_user_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {'null': 'True'}),
'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Group']", 'null': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"})
},
'sentry.versiondsymfile': {
'Meta': {'unique_together': "(('dsym_file', 'version', 'build'),)", 'object_name': 'VersionDSymFile'},
'build': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True'}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'dsym_app': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.DSymApp']"}),
'dsym_file': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.ProjectDSymFile']", 'null': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'version': ('django.db.models.fields.CharField', [], {'max_length': '32'})
}
}
complete_apps = ['sentry']
| bsd-3-clause | 9,205,398,465,352,764,000 | 90.668459 | 233 | 0.580878 | false |
micahjonas/python-2048-ai | chromectrl.py | 1 | 3685 | import urllib, urllib2, json, threading, itertools
try:
import websocket
except ImportError:
websocket = None
class ChromeDebuggerControl(object):
''' Control Chrome using the debugging socket.
Chrome must be launched using the --remote-debugging-port=<port> option for this to work! '''
def __init__(self, port):
if websocket is None:
raise NotImplementedError("websocket-client library not available; cannot control Chrome.\n"
"Please install it (pip install websocket-client) then try again.")
# Obtain the list of pages
pages = json.loads(urllib2.urlopen('http://localhost:%d/json/list' % port).read())
if len(pages) == 0:
raise Exception("No pages to attach to!")
elif len(pages) == 1:
page = pages[0]
else:
print "Select a page to attach to:"
for i, page in enumerate(pages):
print "%d) %s" % (i+1, page['title'].encode('unicode_escape'))
while 1:
try:
pageidx = int(raw_input("Selection? "))
page = pages[pageidx-1]
break
except Exception, e:
print "Invalid selection:", e
# Configure debugging websocket
wsurl = page['webSocketDebuggerUrl']
self.ws = websocket.create_connection(wsurl)
self.requests = {} # dictionary containing in-flight requests
self.results = {}
self.req_counter = itertools.count(1)
self.thread = threading.Thread(target=self._receive_thread)
self.thread.daemon = True
self.thread.start()
self._send_cmd_noresult('Runtime.enable')
def _receive_thread(self):
''' Continually read events and command results '''
while 1:
try:
message = json.loads(self.ws.recv())
if 'id' in message:
id = message['id']
event = self.requests.pop(id, None)
if event is not None:
self.results[id] = message
event.set()
except Exception as e:
break
def _send_cmd_noresult(self, method, **params):
''' Send a command and ignore the result. '''
id = next(self.req_counter)
out = {'id': id, 'method': method}
if params:
out['params'] = params
self.ws.send(json.dumps(out))
def _send_cmd(self, method, **params):
''' Send a command and wait for the result to be available. '''
id = next(self.req_counter)
out = {'id': id, 'method': method}
if params:
out['params'] = params
# Receive thread will signal us when the response is available
event = threading.Event()
self.requests[id] = event
self.ws.send(json.dumps(out))
event.wait()
resp = self.results.pop(id)
if 'error' in resp:
raise Exception("Command %s(%s) failed: %s (%d)" % (
method, ', '.join('%s=%r' % (k,v) for k,v in params.iteritems()), resp['error']['message'], resp['error']['code']))
return resp['result']
def execute(self, cmd):
resp = self._send_cmd('Runtime.evaluate', expression=cmd)
#if resp['wasThrown']:
# raise Exception("JS evaluation threw an error: %s" % resp['result']['description'])
result = resp['result']
if 'value' in result:
return result['value']
if 'description' in result:
return result['description']
return None
| mit | 258,010,317,988,155,460 | 36.222222 | 131 | 0.544369 | false |
hpc-cecal-uy/pf_metrics | jmetal/pf_metrics.py | 1 | 4208 | # Copyright 2015 Renzo Massobrio
# Facultad de Ingenieria, UdelaR
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
############################################################################################################################
# INSTRUCTIONS
#
# Script to plot the global Pareto front and calculate generational distance, spread, spacing and relative hypervolume based
# on the pareto fronts output from jMetal (http://jmetal.sourceforge.net/).
#
# USAGE:
# python pf_metrics.py <path_to_results> <number_of_runs> <objective_1_name> <objective_2_name>
#
# To run the example:
# python pf_metrics.py example/ 5 obj1 obj2
#
# Notes:
# -<path_to_results> is the folder where the files "FUN.*" are located
# -<number_of_runs> is the amount of jobs executed. e.g.: if number_of_runs is 4 you should have FUN.0,...,FUN.3
# -<objective_J_name> is the label for the axis corresponding to objective J in the plot
#
# IMPORTANT: THIS SCRIPT ASSUMES MINIMIZATION OF BOTH OBJECTIVES. YOU SHOULD MODIFY THESE BEHAVIOUR TO FIT YOUR NEEDS.
#
# The metrics are calculated using the formulas in "Multiobjective optimization using Evolutionary Algorithms" from Kalyanmoy Deb.
# For the spread calculation, the euclidean distance is used.
#
# Hypervolumes are calculated using the code of Simon Wessing from TU Dortmund University found at:
# https://ls11-www.cs.uni-dortmund.de/rudolph/hypervolume/start
#
# Please feel free to contact me at: [email protected]
#
############################################################################################################################
import sys
from os import path
sys.path.append('../libs')
import generic_pf_metrics
def load_jmetal_results(path_to_results, objectives, number_of_runs):
#Initialize dictionary to parse the pareto fronts
results = []
for run in range (0,number_of_runs):
results.append([])
for no in range(len(objectives)):
results[run].append([])
for run in range(0,number_of_runs):
path_to_file = path.join(path_to_results, "FUN.{0}".format(run))
with open(path_to_file) as f:
for line in f:
tokens = line.split()
for no in range(len(objectives)):
results[run][no].append(float(tokens[no]))
return results
####################################
########## MAIN ####################
def main():
ref_pf_file = None
normalize = None
if len(sys.argv) != 6 and len(sys.argv) != 7:
print("Not enough parameters. Usage:")
print(" - python {0} <path_to_results> <number_of_runs> <normalize> <obj1_name> <obj2_name>".format(sys.argv[0]))
print(" - python {0} <reference pf> <path_to_results> <number_of_runs> <normalize> <obj1_name> <obj2_name>".format(sys.argv[0]))
exit(-1)
else:
if len(sys.argv) == 6:
path_to_results = sys.argv[1]
number_of_runs = int(sys.argv[2])
normalize = sys.argv[3].strip().lower()
objectives = [sys.argv[4], sys.argv[5]]
else:
ref_pf_file = sys.argv[1]
path_to_results = sys.argv[2]
number_of_runs = int(sys.argv[3])
normalize = sys.argv[4].strip().lower()
objectives = [sys.argv[5], sys.argv[6]]
#Load the pareto fronts from the files
results = load_jmetal_results(path_to_results, objectives, number_of_runs)
generic_pf_metrics.compute(ref_pf_file, path_to_results, number_of_runs, objectives, results, normalize)
if __name__ == "__main__":
main()
| gpl-3.0 | 1,158,768,144,247,686,000 | 38.327103 | 136 | 0.612643 | false |
arnaudjuracek/py_suzanne | bak.run.py | 1 | 1766 | # ---------------------------
# py_suzanne 1.0
# Arnaud Juracek
# github.com/arnaudjuracek
import RPi.GPIO as GPIO
import glob, pygame, time, os, random
# --------------------------
# startup notification
print 'py_suzanne started'
os.system('omxplayer data/hello_world.aiff')
# --------------------------
# GPIO settings
GPIO.setmode(GPIO.BCM)
GPIO.setup(18, GPIO.IN, pull_up_down=GPIO.PUD_UP)
# --------------------------
# USB handling/mounting
usb = '/home/pi/Suzanne/usb/'
files = []
def getfile():
if not os.listdir(usb):
print 'getfile(): usb not mounted, mounting...'
for drive in glob.glob('/dev/sd*'):
os.system('sudo mount '+ drive + ' ' + usb +' -o uid=pi,gid=pi')
files = soundfiles(usb)
if len(files)>0:
file = random.choice(files)
print 'getfile(): '+ file +' selected'
return file
else:
print "getfile():error: couldn't get file : usb directory empty or not mounted correctly"
return 'data/error.mp3'
# -------------------------
# sound files filter
# see http://stackoverflow.com/a/4568638
def soundfiles(path):
ext = (path + '*.mp3', path + '*.wav')
sounds = []
for files in ext:
sounds.extend(glob.glob(files))
return sounds
# -------------------------
# instantiate pygame.mixer, player, etc
# see http://www.pygame.org/docs/ref/music.html#module-pygame.mixer.music
mixer = pygame.mixer
player = mixer.music
mixer.init()
# -------------------------
# lid open/close listenning
# see http://razzpisampler.oreilly.com/ch07.html
while True:
time.sleep(.5)
# GPIO.input(18) == False when 18 linked to GND
# GPIO.input(18) == True when 18 not linked to GND
if GPIO.input(18) == True:
if player.get_busy() == False:
player.load(getfile())
player.play()
else:
#player.fadeout(1000)
player.stop()
| gpl-3.0 | 2,631,060,658,673,370,000 | 24.970588 | 91 | 0.610985 | false |
markfinal/BuildAMation | codingtools/dotnetcore_make_release.py | 1 | 10008 | #!/usr/bin/python
from generate_docs import build_documentation
from generate_docs import NoDoxygenError
from optparse import OptionParser
import os
import platform
import shutil
import stat
import subprocess
import sys
import tarfile
import tempfile
import traceback
import zipfile
g_script_dir = os.path.dirname(os.path.realpath(__file__))
g_bam_dir = os.path.dirname(g_script_dir)
def log(msg):
print >>sys.stdout, msg
sys.stdout.flush()
def run_process(args):
try:
log('Running: %s' % ' '.join(args))
subprocess.check_call(args)
except OSError, e:
raise RuntimeError('Unable to run process "%s" because "%s"' % (' '.join(args), str(e)))
def _run_git(arguments):
args = []
args.append('git')
args.extend(arguments)
log('Running: %s' % ' '.join(args))
result = subprocess.check_output(args)
return result.rstrip()
def get_branch_name():
return _run_git(['rev-parse', '--abbrev-ref', 'HEAD'])
def get_hash():
return _run_git(['rev-parse', '--short', 'HEAD'])
def run_dotnet(target, project_path, source_dir, output_dir, configuration='Release', framework='netcoreapp2.1', force=True, standalone_platform=None, verbosity='normal', extra_properties=None):
output_dir = os.path.join(output_dir, 'bin', configuration, framework)
cur_dir = os.getcwd()
os.chdir(source_dir)
try:
args = []
args.append('dotnet')
args.append(target)
args.append(project_path)
args.append('-c')
args.append(configuration)
args.append('-f')
args.append(framework)
if force:
args.append('--force')
args.append('-o')
args.append(output_dir)
args.append('-v')
args.append(verbosity)
if standalone_platform:
args.append('--self-contained')
args.append('-r')
args.append(standalone_platform)
if extra_properties:
args.append(extra_properties)
run_process(args)
finally:
os.chdir(cur_dir)
def delete_directory(dir):
if os.path.isdir(dir):
log('Deleting folder, %s' % dir)
shutil.rmtree(dir)
def run_dotnet_publish(source_dir, build_dir, configuration='Release', framework='netcoreapp2.1', force=True, standalone_platform=None, verbosity='normal'):
delete_directory(build_dir)
os.makedirs(build_dir)
project = os.path.join(source_dir, 'Bam', 'Bam.csproj') # specifically build the Bam executable, so that the unit test dependencies don't get dragged in
run_dotnet('clean', project, source_dir, build_dir, configuration=configuration, framework=framework, force=False, standalone_platform=None, verbosity=verbosity)
run_dotnet('publish', project, source_dir, build_dir, configuration=configuration, framework=framework, force=force, standalone_platform=standalone_platform, verbosity=verbosity, extra_properties='/p:DebugType=None')
def copy_directory_to_directory(srcdir,destdir):
log('\tCopying directory ' + srcdir)
shutil.copytree(srcdir, destdir)
def copy_file_to_directory(srcfile,destdir):
log('\tCopying file ' + srcfile)
shutil.copy(srcfile, destdir)
def copy_support_files(source_dir, build_dir):
cur_dir = os.getcwd()
os.chdir(source_dir)
log('Copying support files from %s to %s ...' % (source_dir, build_dir))
try:
copy_directory_to_directory('packages', os.path.join(build_dir, 'packages'))
copy_directory_to_directory('tests', os.path.join(build_dir, 'tests'))
copy_file_to_directory('env.sh', build_dir)
copy_file_to_directory('env.bat', build_dir)
copy_file_to_directory('Changelog.txt', build_dir)
copy_file_to_directory('License.md', build_dir)
copy_file_to_directory('MS-PL.md', build_dir)
copy_file_to_directory('3rdPartyLicenses.md', build_dir)
finally:
os.chdir(cur_dir)
def list_files(base_dir):
log('Listing files in ' + base_dir)
starting_depth = base_dir.count(os.sep)
for root, dirs, files in os.walk(base_dir):
depth = root.count(os.sep) - starting_depth
log(' ' * depth + os.path.basename(root))
for f in files:
log(' ' * (depth + 1) + f)
def zip_dir(zip_path, dir):
log('Zipping directory %s to %s' % (dir, zip_path))
base_dir, leaf = os.path.split(dir)
cwd = os.getcwd()
try:
os.chdir(base_dir)
with zipfile.ZipFile(zip_path, "w", zipfile.ZIP_DEFLATED) as zip_object:
for root, dirs, files in os.walk(leaf):
for file_path in files:
zip_object.write(os.path.join(root, file_path))
finally:
os.chdir(cwd)
def tar_dir(tar_path, dir):
def windows_executable_filter(tarinfo):
if platform.system() != "Windows":
return tarinfo
# attempt to fix up the permissions that are lost during tarring on Windows
if tarinfo.name.endswith(".exe") or\
tarinfo.name.endswith(".dll") or\
tarinfo.name.endswith(".py") or\
tarinfo.name.endswith(".sh") or\
tarinfo.name.endswith("bam"):
tarinfo.mode = stat.S_IRUSR | stat.S_IXUSR | stat.S_IRGRP | stat.S_IXGRP | stat.S_IROTH | stat.S_IXOTH
return tarinfo
log('Tarring directory %s to %s' % (dir, tar_path))
base_dir, leaf = os.path.split(dir)
cwd = os.getcwd()
try:
os.chdir(base_dir)
with tarfile.open(tar_path, "w:gz") as tar:
tar.add(leaf, filter=windows_executable_filter)
finally:
os.chdir(cwd)
def main(options, build_dir, source_dir):
_,bam_version_dir = os.path.split(build_dir)
if options.doxygen:
generated_docs_dir = os.path.join(source_dir, 'docs')
delete_directory(generated_docs_dir)
try:
build_documentation(source_dir, options.doxygen, False)
if options.make_distribution:
zip_dir(os.path.realpath(os.path.join(build_dir, '..', '%s-docs' % bam_version_dir) + '.zip'), generated_docs_dir)
tar_dir(os.path.realpath(os.path.join(build_dir, '..', '%s-docs' % bam_version_dir) + '.tgz'), generated_docs_dir)
except NoDoxygenError, e:
log(str(e)) # not fatal, but do complain
run_dotnet_publish(
source_dir,
build_dir,
configuration='Release',
framework='netcoreapp2.1',
force=True,
verbosity='normal'
)
copy_support_files(source_dir, build_dir)
#list_files(build_dir)
if options.make_distribution:
zip_dir(os.path.realpath(os.path.join(build_dir, '..', '%s-AnyCPU' % bam_version_dir) + '.zip'), build_dir)
tar_dir(os.path.realpath(os.path.join(build_dir, '..', '%s-AnyCPU' % bam_version_dir) + '.tgz'), build_dir)
if options.standalone:
platforms = []
platforms.append('win-x64')
platforms.append('osx-x64')
platforms.append('linux-x64')
for platform in platforms:
platform_build_dir = build_dir + '-' + platform
run_dotnet_publish(
source_dir,
platform_build_dir,
configuration='Release',
framework='netcoreapp2.1',
force=True,
standalone_platform=platform
)
copy_support_files(platform_output_dir)
#list_files(platform_output_dir)
def clone_repo(checkout_dir, gittag):
args = [
"git",
"clone",
"--depth",
"1",
"--branch",
gittag,
"https://github.com/markfinal/BuildAMation.git",
checkout_dir
]
log('Running: %s' % ' '.join(args))
subprocess.check_call(args)
log('Cloning complete')
if __name__ == '__main__':
parser = OptionParser()
parser.add_option('-s', '--standalone', action='store_true', dest='standalone', help='Make builds specific to the current platform and standalone.')
parser.add_option('-d', '--doxygen', dest='doxygen', default=None, help='Location of the doxygen executable in order to generate documentation.')
parser.add_option('-t', '--tag', dest='gittag', default=None, help='Create a release from a named git tag. Clones at depth 1 from the named tag into a temporary directory.')
parser.add_option('-x', '--distribution', action='store_true', dest='make_distribution', help='Generate zip and tar archives for the build to distribute.')
parser.add_option('-l', '--local', action='store_true', dest='local', help='Builds the local checkout into a bam_publish subdirectory')
parser.add_option('-c', '--clean', action='store_true', dest='cleanup', help='Clean up any intermediate temporary folders created at the end of a successful build.')
(options, args) = parser.parse_args()
temp_dir = tempfile.mkdtemp()
if options.gittag:
# for some reason, cloning into a temporary folder (at least on macOS), causes the build
# not to fail, but to generate an invalid set of assemblies
# the Bam.dll won't run with dotnet, and the schema is in the wrong place, for starters
source_dir = os.path.realpath(os.path.join(g_bam_dir, '..', "BuildAMation-%s-src" % options.gittag))
build_dir = os.path.join(temp_dir, "BuildAMation-%s" % options.gittag)
clone_repo(source_dir, options.gittag)
elif options.local:
source_dir = g_bam_dir
build_dir = os.path.join(source_dir, 'bam_publish')
else:
source_dir = g_bam_dir
branch = get_branch_name()
hash = get_hash()
build_dir = os.path.join(temp_dir, "BuildAMation-%s-%s" % (hash,branch))
try:
main(options, build_dir, source_dir)
# remove cloned checkout directory
if options.gittag and options.cleanup:
delete_directory(source_dir)
except Exception, e:
log('*** Failure reason: %s' % str(e))
log(traceback.format_exc())
finally:
pass
log('Done')
| bsd-3-clause | 2,779,776,771,498,325,000 | 36.066667 | 220 | 0.624201 | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.