code
stringlengths 3
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 3
1.05M
|
---|---|---|---|---|---|
"""
WSGI config for generic project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.10/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "generic.settings")
application = get_wsgi_application()
| wasit7/tutorials | django/django_generic_view/generic/generic/wsgi.py | Python | mit | 392 |
# cloudscope.utils.timez
# Time string utilities for ensuring that the timezone is properly handled.
#
# Author: Benjamin Bengfort <[email protected]>
# Created: Tue Nov 24 17:35:49 2015 -0500
#
# Copyright (C) 2015 Bengfort.com
# For license information, see LICENSE.txt
#
# ID: timez.py [d0f0ca1] [email protected] $
"""
Time string utilities for ensuring that the timezone is properly handled.
"""
##########################################################################
## Imports
##########################################################################
import re
from calendar import timegm
from dateutil.tz import tzutc
from datetime import datetime, timedelta
from dateutil.relativedelta import relativedelta
from cloudscope.config import settings
##########################################################################
## Format constants
##########################################################################
HUMAN_DATETIME = "%a %b %d %H:%M:%S %Y %z"
HUMAN_DATE = "%b %d, %Y"
HUMAN_TIME = "%I:%M:%S %p"
JSON_DATETIME = "%Y-%m-%dT%H:%M:%S.%fZ" # Must be UTC
ISO8601_DATETIME = "%Y-%m-%dT%H:%M:%S%z"
ISO8601_DATE = "%Y-%m-%d"
ISO8601_TIME = "%H:%M:%S"
COMMON_DATETIME = "%d/%b/%Y:%H:%M:%S %z"
##########################################################################
## Module helper functions
##########################################################################
zre = re.compile(r'([\-\+]\d{4})')
def strptimez(dtstr, dtfmt):
"""
Helper function that performs the timezone calculation to correctly
compute the '%z' format that is not added by default in Python 2.7.
"""
if '%z' not in dtfmt:
return datetime.strptime(dtstr, dtfmt)
dtfmt = dtfmt.replace('%z', '')
offset = int(zre.search(dtstr).group(1))
dtstr = zre.sub('', dtstr)
delta = timedelta(hours = offset/100)
utctsp = datetime.strptime(dtstr, dtfmt) - delta
return utctsp.replace(tzinfo=tzutc())
def epochftime(dt):
"""
Returns the Unix epoch time from a datetime. The epoch time is the number
of seconds since January 1, 1970 at midnight UTC.
"""
# Handle timezone aware datetime objects
if dt.tzinfo is not None and dt.utcoffset() is not None:
dt = dt.replace(tzinfo=None) - dt.utcoffset()
return timegm(dt.timetuple())
def epochptime(epoch):
"""
Returns a date time from a Unix epoch time.
"""
if isinstance(epoch, basestring):
epoch = float(epoch)
if isinstance(epoch, float):
epoch = int(epoch)
return datetime.utcfromtimestamp(epoch).replace(tzinfo=tzutc())
def humanizedelta(*args, **kwargs):
"""
Wrapper around dateutil.relativedelta (same construtor args) and returns
a humanized string representing the detla in a meaningful way.
"""
if 'milliseconds' in kwargs:
sec = kwargs.get('seconds', 0)
msec = kwargs.pop('milliseconds')
kwargs['seconds'] = sec + (float(msec) / 1000.0)
delta = relativedelta(*args, **kwargs)
attrs = ('years', 'months', 'days', 'hours', 'minutes', 'seconds')
parts = [
'%d %s' % (getattr(delta, attr), getattr(delta, attr) > 1 and attr or attr[:-1])
for attr in attrs if getattr(delta, attr)
]
return " ".join(parts)
| bbengfort/cloudscope | cloudscope/utils/timez.py | Python | mit | 3,309 |
from __future__ import print_function
import importlib
import numpy as np
import matplotlib.pyplot as plt
import swe.derives as derives
import swe.unsplit_fluxes as flx
import mesh.boundary as bnd
from simulation_null import NullSimulation, grid_setup, bc_setup
import util.plot_tools as plot_tools
import particles.particles as particles
class Variables(object):
"""
a container class for easy access to the different swe
variables by an integer key
"""
def __init__(self, myd):
self.nvar = len(myd.names)
# conserved variables -- we set these when we initialize for
# they match the CellCenterData2d object
self.ih = myd.names.index("height")
self.ixmom = myd.names.index("x-momentum")
self.iymom = myd.names.index("y-momentum")
# if there are any additional variables, we treat them as
# passively advected scalars
self.naux = self.nvar - 3
if self.naux > 0:
self.ihx = 3
else:
self.ihx = -1
# primitive variables
self.nq = 3 + self.naux
self.ih = 0
self.iu = 1
self.iv = 2
if self.naux > 0:
self.ix = 3 # advected scalar
else:
self.ix = -1
def cons_to_prim(U, g, ivars, myg):
"""
Convert an input vector of conserved variables
:math:`U = (h, hu, hv, {hX})`
to primitive variables :math:`q = (h, u, v, {X})`.
"""
q = myg.scratch_array(nvar=ivars.nq)
q[:, :, ivars.ih] = U[:, :, ivars.ih]
q[:, :, ivars.iu] = U[:, :, ivars.ixmom]/U[:, :, ivars.ih]
q[:, :, ivars.iv] = U[:, :, ivars.iymom]/U[:, :, ivars.ih]
if ivars.naux > 0:
for nq, nu in zip(range(ivars.ix, ivars.ix+ivars.naux),
range(ivars.ihx, ivars.ihx+ivars.naux)):
q[:, :, nq] = U[:, :, nu]/q[:, :, ivars.ih]
return q
def prim_to_cons(q, g, ivars, myg):
"""
Convert an input vector of primitive variables :math:`q = (h, u, v, {X})`
to conserved variables :math:`U = (h, hu, hv, {hX})`
"""
U = myg.scratch_array(nvar=ivars.nvar)
U[:, :, ivars.ih] = q[:, :, ivars.ih]
U[:, :, ivars.ixmom] = q[:, :, ivars.iu]*U[:, :, ivars.ih]
U[:, :, ivars.iymom] = q[:, :, ivars.iv]*U[:, :, ivars.ih]
if ivars.naux > 0:
for nq, nu in zip(range(ivars.ix, ivars.ix+ivars.naux),
range(ivars.ihx, ivars.ihx+ivars.naux)):
U[:, :, nu] = q[:, :, nq]*q[:, :, ivars.ih]
return U
class Simulation(NullSimulation):
"""The main simulation class for the corner transport upwind
swe hydrodynamics solver
"""
def initialize(self, extra_vars=None, ng=4):
"""
Initialize the grid and variables for swe flow and set
the initial conditions for the chosen problem.
"""
my_grid = grid_setup(self.rp, ng=ng)
my_data = self.data_class(my_grid)
bc, bc_xodd, bc_yodd = bc_setup(self.rp)
# are we dealing with solid boundaries? we'll use these for
# the Riemann solver
self.solid = bnd.bc_is_solid(bc)
# density and energy
my_data.register_var("height", bc)
my_data.register_var("x-momentum", bc_xodd)
my_data.register_var("y-momentum", bc_yodd)
my_data.register_var("fuel", bc)
# any extras?
if extra_vars is not None:
for v in extra_vars:
my_data.register_var(v, bc)
# store the gravitational acceration g as an auxillary quantity
# so we can have a
# self-contained object stored in output files to make plots.
# store grav because we'll need that in some BCs
my_data.set_aux("g", self.rp.get_param("swe.grav"))
my_data.create()
self.cc_data = my_data
if self.rp.get_param("particles.do_particles") == 1:
n_particles = self.rp.get_param("particles.n_particles")
particle_generator = self.rp.get_param("particles.particle_generator")
self.particles = particles.Particles(self.cc_data, bc, n_particles, particle_generator)
# some auxillary data that we'll need to fill GC in, but isn't
# really part of the main solution
aux_data = self.data_class(my_grid)
aux_data.register_var("ymom_src", bc_yodd)
aux_data.create()
self.aux_data = aux_data
self.ivars = Variables(my_data)
# derived variables
self.cc_data.add_derived(derives.derive_primitives)
# initial conditions for the problem
problem = importlib.import_module("{}.problems.{}".format(
self.solver_name, self.problem_name))
problem.init_data(self.cc_data, self.rp)
if self.verbose > 0:
print(my_data)
def method_compute_timestep(self):
"""
The timestep function computes the advective timestep (CFL)
constraint. The CFL constraint says that information cannot
propagate further than one zone per timestep.
We use the driver.cfl parameter to control what fraction of the
CFL step we actually take.
"""
cfl = self.rp.get_param("driver.cfl")
# get the variables we need
u, v, cs = self.cc_data.get_var(["velocity", "soundspeed"])
# the timestep is min(dx/(|u| + cs), dy/(|v| + cs))
xtmp = self.cc_data.grid.dx/(abs(u) + cs)
ytmp = self.cc_data.grid.dy/(abs(v) + cs)
self.dt = cfl*float(min(xtmp.min(), ytmp.min()))
def evolve(self):
"""
Evolve the equations of swe hydrodynamics through a
timestep dt.
"""
tm_evolve = self.tc.timer("evolve")
tm_evolve.begin()
myg = self.cc_data.grid
Flux_x, Flux_y = flx.unsplit_fluxes(self.cc_data, self.aux_data, self.rp,
self.ivars, self.solid, self.tc, self.dt)
# conservative update
dtdx = self.dt/myg.dx
dtdy = self.dt/myg.dy
for n in range(self.ivars.nvar):
var = self.cc_data.get_var_by_index(n)
var.v()[:, :] += \
dtdx*(Flux_x.v(n=n) - Flux_x.ip(1, n=n)) + \
dtdy*(Flux_y.v(n=n) - Flux_y.jp(1, n=n))
if self.particles is not None:
self.particles.update_particles(self.dt)
# increment the time
self.cc_data.t += self.dt
self.n += 1
tm_evolve.end()
def dovis(self):
"""
Do runtime visualization.
"""
plt.clf()
plt.rc("font", size=10)
# we do this even though ivars is in self, so this works when
# we are plotting from a file
ivars = Variables(self.cc_data)
# access g from the cc_data object so we can use dovis
# outside of a running simulation.
g = self.cc_data.get_aux("g")
q = cons_to_prim(self.cc_data.data, g, ivars, self.cc_data.grid)
h = q[:, :, ivars.ih]
u = q[:, :, ivars.iu]
v = q[:, :, ivars.iv]
fuel = q[:, :, ivars.ix]
magvel = np.sqrt(u**2 + v**2)
myg = self.cc_data.grid
vort = myg.scratch_array()
dv = 0.5*(v.ip(1) - v.ip(-1))/myg.dx
du = 0.5*(u.jp(1) - u.jp(-1))/myg.dy
vort.v()[:, :] = dv - du
fields = [h, magvel, fuel, vort]
field_names = [r"$h$", r"$|U|$", r"$X$", r"$\nabla\times U$"]
_, axes, cbar_title = plot_tools.setup_axes(myg, len(fields))
for n, ax in enumerate(axes):
v = fields[n]
img = ax.imshow(np.transpose(v.v()),
interpolation="nearest", origin="lower",
extent=[myg.xmin, myg.xmax, myg.ymin, myg.ymax],
cmap=self.cm)
ax.set_xlabel("x")
ax.set_ylabel("y")
# needed for PDF rendering
cb = axes.cbar_axes[n].colorbar(img)
cb.solids.set_rasterized(True)
cb.solids.set_edgecolor("face")
if cbar_title:
cb.ax.set_title(field_names[n])
else:
ax.set_title(field_names[n])
if self.particles is not None:
ax = axes[0]
particle_positions = self.particles.get_positions()
# dye particles
colors = self.particles.get_init_positions()[:, 0]
# plot particles
ax.scatter(particle_positions[:, 0],
particle_positions[:, 1], s=5, c=colors, alpha=0.8, cmap="Greys")
ax.set_xlim([myg.xmin, myg.xmax])
ax.set_ylim([myg.ymin, myg.ymax])
plt.figtext(0.05, 0.0125, "t = {:10.5g}".format(self.cc_data.t))
plt.pause(0.001)
plt.draw()
| harpolea/pyro2 | swe/simulation.py | Python | bsd-3-clause | 8,797 |
from __future__ import print_function
import os, sys
import subprocess
import atexit
import time
import argparse
class File(object):
def __init__(self, path):
self.m_path = path
def exists(self):
return os.path.exists(self.m_path)
def read(self):
with open(self.m_path, 'r') as fd:
return fd.read().strip()
def write(self, what):
with open(self.m_path, 'w') as fd:
fd.write(what)
def __str__(self):
return self.m_path
class DirBase(object):
def __init__(self, path):
self.m_path = path
def getFile(self, which):
return File(self.getPath(which))
def getPath(self, which):
return os.path.join(self.m_path, which)
class Window(DirBase):
def __init__(self, _id):
self.m_id = _id
DirBase.__init__(self, self.getDir())
@classmethod
def setBase(self, base):
self.m_base = base
def getDir(self):
windir = os.path.join(self.m_base.m_windows, self.m_id)
if not os.path.isdir(windir):
print("Test window directory", windir, "is not existing?")
sys.exit(1)
return windir
def __str__(self):
return os.path.basename(self.m_id)
class ManagerDir(DirBase):
def __init__(self, path):
DirBase.__init__(self, path)
class TestBase(object):
def setupParser(self):
self.m_parser = argparse.ArgumentParser("xwmfs unit test")
self.m_parser.add_argument(
"-l", "--logfile",
help = "Path to write xwmfs logs to"
)
self.m_parser.add_argument(
"-d", "--debug",
help = "Run xwmfs with debugging extras",
action = 'store_true'
)
self.m_parser.add_argument(
"-b", "--binary",
help = "Location of the xwmfs executable to test",
default = None
)
def parseArgs(self):
self.m_args = self.m_parser.parse_args()
def __init__(self):
self.setupParser()
self.m_res = 0
atexit.register(self._cleanup)
self.m_proc = None
self.m_test_window = None
self.m_mount_dir = "/tmp/xwmfs"
Window.setBase(self)
def _cleanup(self):
if self.m_proc:
self.m_proc.terminate()
self.m_proc.wait()
os.rmdir(self.m_mount_dir)
if self.m_test_window:
self.closeTestWindow()
def getBinary(self):
xwmfs = os.environ.get("XWMFS", None)
if self.m_args.binary:
ret = self.m_args.binary
elif xwmfs:
ret = xwmfs
else:
ret = None
if not ret:
print("Expecting path to xwmfs binary as parameter or in the XWMFS environment variable")
sys.exit(1)
if not os.path.isfile(ret):
print("Not a regular file:", ret)
sys.exit(1)
return ret
def logSetting(self):
return "111{}".format(
"1" if self.m_args.debug else "0"
)
def extraSettings(self):
debug_opts = [ "--xsync" ]
# [ "-o", "debug" ]
return debug_opts if self.m_args.debug else []
def mount(self):
if os.path.exists(self.m_mount_dir):
print("Refusing to operate on existing mount dir", self.m_mount_dir)
sys.exit(1)
os.makedirs(self.m_mount_dir)
self.m_proc = subprocess.Popen(
[
self.m_xwmfs, "-f",
"--logger={}".format(self.logSetting()),
] + self.extraSettings() + [ self.m_mount_dir ]
)
while len(os.listdir(self.m_mount_dir)) == 0:
# poll until the directory is actually mounted
try:
res = self.m_proc.wait(timeout = 0.25)
print("Failed to mount xwmfs, exited with", res)
sys.exit(1)
except subprocess.TimeoutExpired:
pass
def unmount(self):
self.m_proc.terminate()
res = self.m_proc.wait()
self.m_proc = None
os.rmdir(self.m_mount_dir)
if res != 0:
print("xwmfs exited with non-zero code of", res)
sys.exit(res)
def run(self):
if not "DISPLAY" in os.environ:
# don't fail tests because of a missing DISPLAY. this
# is typically the case on automated build servers and
# alike. our tests are mor for interactive testing.
# according to autotools documentation this is the
# exit code to signal a skipped test:
# https://www.gnu.org/software/automake/manual/html_node/Scripts_002dbased-Testsuites.html
return 77
self.parseArgs()
self.m_xwmfs = self.getBinary()
self.mount()
self.m_windows = os.path.join(self.m_mount_dir, "windows")
self.m_mgr = ManagerDir(os.path.join(self.m_mount_dir, "wm"))
self.test()
self.unmount()
return self.m_res
def getWindowList(self):
return [ Window(w) for w in os.listdir(self.m_windows) ]
def getTestWindow(self):
our_id = os.environ.get("WINDOWID", None)
if our_id:
return Window(our_id)
# otherwise just the first one we approach
return Window(self.getWindowList()[0])
def createTestWindow(self, required_files = []):
# creates a new window and returns its window ID
# this currently assumes an xterm executable is around
if self.m_test_window:
raise Exception("Double create of test window, without closeTestWindow()")
print("Creating test window")
try:
self.m_test_window = subprocess.Popen("xterm")
except Exception as e:
print("Failed to run xterm to create a test window")
raise
diff = set()
our_win = None
while not our_win:
windows = self.getWindowList()
for window in windows:
pid = window.getFile("pid")
try:
pid = int(pid.read())
except Exception as e:
# race condition, no PID yet
continue
if pid == self.m_test_window.pid:
our_win = window
break
else:
time.sleep(0.25)
print("Created window", our_win, "waiting for", required_files)
for req in required_files:
wf = our_win.getFile(req)
count = 0
print("Waiting for", req, "file")
while not wf.exists():
count += 1
time.sleep(0.25)
if count >= 50:
raise Exception("Required window file '{}' did not appear".format(req))
print("All files present")
# wait for the window to become mapped
mapped = our_win.getFile("mapped")
while mapped.read() != "1":
time.sleep(0.25)
return our_win
def closeTestWindow(self):
# waits for a previously created test window to exit
self.m_test_window.kill()
self.m_test_window.wait()
self.m_test_window = None
def getManagerFile(self, which):
return self.m_mgr.getFile(which)
def setGoodResult(self, text):
print("Good:", text)
def setBadResult(self, text):
print("Bad:", text)
self.m_res = 1
| gerstner-hub/xwmfs | tests/base/base.py | Python | gpl-2.0 | 6,170 |
#
# (c) 2016 Red Hat Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
import sys
import copy
from ansible.plugins.action.normal import ActionModule as _ActionModule
from ansible.utils.path import unfrackpath
from ansible.plugins import connection_loader
from ansible.compat.six import iteritems
from ansible.module_utils.vyos import vyos_argument_spec
from ansible.module_utils.basic import AnsibleFallbackNotFound
from ansible.module_utils._text import to_bytes
try:
from __main__ import display
except ImportError:
from ansible.utils.display import Display
display = Display()
class ActionModule(_ActionModule):
def run(self, tmp=None, task_vars=None):
if self._play_context.connection != 'local':
return dict(
failed=True,
msg='invalid connection specified, expected connection=local, '
'got %s' % self._play_context.connection
)
provider = self.load_provider()
pc = copy.deepcopy(self._play_context)
pc.connection = 'network_cli'
pc.network_os = 'vyos'
pc.port = provider['port'] or self._play_context.port or 22
pc.remote_user = provider['username'] or self._play_context.connection_user
pc.password = provider['password'] or self._play_context.password
pc.private_key_file = provider['ssh_keyfile'] or self._play_context.private_key_file
pc.timeout = provider['timeout'] or self._play_context.timeout
connection = self._shared_loader_obj.connection_loader.get('persistent', pc, sys.stdin)
socket_path = self._get_socket_path(pc)
display.vvvv('socket_path: %s' % socket_path, pc.remote_addr)
if not os.path.exists(socket_path):
# start the connection if it isn't started
rc, out, err = connection.exec_command('open_shell()')
if rc != 0:
return {'failed': True, 'msg': 'unable to connect to control socket'}
else:
# make sure we are in the right cli context which should be
# enable mode and not config module
rc, out, err = connection.exec_command('prompt()')
while str(out).strip().endswith('#'):
display.vvvv('wrong context, sending exit to device', self._play_context.remote_addr)
connection.exec_command('exit')
rc, out, err = connection.exec_command('prompt()')
task_vars['ansible_socket'] = socket_path
return super(ActionModule, self).run(tmp, task_vars)
def _get_socket_path(self, play_context):
ssh = connection_loader.get('ssh', class_only=True)
cp = ssh._create_control_path(play_context.remote_addr, play_context.port, play_context.remote_user)
path = unfrackpath("$HOME/.ansible/pc")
return cp % dict(directory=path)
def load_provider(self):
provider = self._task.args.get('provider', {})
for key, value in iteritems(vyos_argument_spec):
if key != 'provider' and key not in provider:
if key in self._task.args:
provider[key] = self._task.args[key]
elif 'fallback' in value:
provider[key] = self._fallback(value['fallback'])
elif key not in provider:
provider[key] = None
return provider
def _fallback(self, fallback):
strategy = fallback[0]
args = []
kwargs = {}
for item in fallback[1:]:
if isinstance(item, dict):
kwargs = item
else:
args = item
try:
return strategy(*args, **kwargs)
except AnsibleFallbackNotFound:
pass
| adityacs/ansible | lib/ansible/plugins/action/vyos.py | Python | gpl-3.0 | 4,460 |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""An extremely simple WSGI web application framework.
This module is an alias for the webapp2 module i.e. the following are
equivalent:
1. from google.appengine.ext import webapp
2. import webapp2 as webapp
It exports three primary classes: Request, Response, and RequestHandler. You
implement a web application by subclassing RequestHandler. As WSGI requests come
in, they are passed to instances of your RequestHandlers. The RequestHandler
class provides access to the easy-to-use Request and Response objects so you can
interpret the request and write the response with no knowledge of the esoteric
WSGI semantics. Here is a simple example:
from google.appengine.ext import webapp
import wsgiref.simple_server
class MainPage(webapp.RequestHandler):
def get(self):
self.response.out.write(
'<html><body><form action="/hello" method="post">'
'Name: <input name="name" type="text" size="20"> '
'<input type="submit" value="Say Hello"></form></body></html>')
class HelloPage(webapp.RequestHandler):
def post(self):
self.response.headers['Content-Type'] = 'text/plain'
self.response.out.write('Hello, %s' % self.request.get('name'))
application = webapp.WSGIApplication([
('/', MainPage),
('/hello', HelloPage)
], debug=True)
The WSGIApplication class maps URI regular expressions to your RequestHandler
classes. It is a WSGI-compatible application object, so you can use it in
conjunction with wsgiref to make your web application into, e.g., a CGI
script or a simple HTTP server, as in the example above.
The framework does not support streaming output. All output from a response
is stored in memory before it is written.
"""
import logging
import os
from google.appengine.api import lib_config
def __django_version_setup():
"""Selects a particular Django version to load."""
django_version = _config_handle.django_version
if django_version is not None:
from google.appengine.dist import use_library
use_library('django', str(django_version))
else:
from google.appengine.dist import _library
version, explicit = _library.installed.get('django', ('0.96', False))
if not explicit:
logging.warn('You are using the default Django version (%s). '
'The default Django version will change in an '
'App Engine release in the near future. '
'Please call use_library() to explicitly select a '
'Django version. '
'For more information see %s',
version,
'https://developers.google.com/appengine/docs/python/tools/'
'libraries#Django')
try:
import django
if not hasattr(django, 'VERSION'):
from django import v0_96
except ImportError:
pass
def _django_setup():
"""Imports and configures Django.
This can be overridden by defining a function named
webapp_django_setup() in the app's appengine_config.py file (see
lib_config docs). Such a function should import and configure
Django.
In the Python 2.5 runtime, you can also just configure the Django version to
be used by setting webapp_django_version in that file.
Finally, calling use_library('django', <version>) in that file
should also work:
# Example taken from from
# https://developers.google.com/appengine/docs/python/tools/libraries#Django
import os
os.environ['DJANGO_SETTINGS_MODULE'] = 'settings'
from google.appengine.dist import use_library
use_library('django', '1.2')
In the Python 2.7 runtime, the Django version is specified in you app.yaml
file and use_library is not supported.
If your application also imports Django directly it should ensure
that the same code is executed before your app imports Django
(directly or indirectly). Perhaps the simplest way to ensure that
is to include the following in your main.py (and in each alternate
main script):
from google.appengine.ext.webapp import template
import django
This will ensure that whatever Django setup code you have included
in appengine_config.py is executed, as a side effect of importing
the webapp.template module.
"""
if os.environ.get('APPENGINE_RUNTIME') != 'python27':
__django_version_setup()
import django
import django.conf
try:
getattr(django.conf.settings, 'FAKE_ATTR', None)
except (ImportError, EnvironmentError), e:
if os.getenv(django.conf.ENVIRONMENT_VARIABLE):
logging.warning(e)
try:
django.conf.settings.configure(
DEBUG=False,
TEMPLATE_DEBUG=False,
TEMPLATE_LOADERS=(
'django.template.loaders.filesystem.load_template_source',
),
)
except (EnvironmentError, RuntimeError):
pass
if os.environ.get('APPENGINE_RUNTIME') == 'python27':
_config_handle = lib_config.register(
'webapp',
{'add_wsgi_middleware': lambda app: app,})
from webapp2 import *
else:
_config_handle = lib_config.register(
'webapp',
{'django_setup': _django_setup,
'django_version': None,
'add_wsgi_middleware': lambda app: app,
})
from google.appengine.ext.webapp._webapp25 import *
from google.appengine.ext.webapp._webapp25 import __doc__
| ychen820/microblog | y/google-cloud-sdk/platform/google_appengine/google/appengine/ext/webapp/__init__.py | Python | bsd-3-clause | 5,929 |
from CCH import CCH
from config import *
from GSM import *
import numpy as np
class BCCH(CCH):
def __init__(self,slot):
CCH.__init__(self)
self.config = (range(2,6),slot)
self.name = "BCCH"
def callback(self,b,fn,state):
b.training = state.bcc
return CCH.callback(self,b,fn,state)
| ruishihan/R7-with-notes | src/host/python/gsmlib/BCCH.py | Python | apache-2.0 | 303 |
"""
LWR job manager that uses a CLI interface to a job queue (e.g. Torque's qsub,
qstat, etc...).
"""
from .base.external import ExternalBaseManager
from .util.external import parse_external_id
from .util.cli import CliInterface, split_params
from .util.job_script import job_script
from logging import getLogger
log = getLogger(__name__)
class CliQueueManager(ExternalBaseManager):
manager_type = "queued_cli"
def __init__(self, name, app, **kwds):
super(CliQueueManager, self).__init__(name, app, **kwds)
self.cli_interface = CliInterface(code_dir='.')
self.shell_params, self.job_params = split_params(kwds)
def launch(self, job_id, command_line, submit_params={}, dependencies_description=None, env=[]):
self._check_execution_with_tool_file(job_id, command_line)
shell, job_interface = self.__get_cli_plugins()
stdout_path = self._stdout_path(job_id)
stderr_path = self._stderr_path(job_id)
job_name = self._job_name(job_id)
command_line = self._expand_command_line(command_line, dependencies_description)
job_script_kwargs = self._job_template_env(job_id, command_line=command_line, env=env)
extra_kwargs = job_interface.job_script_kwargs(stdout_path, stderr_path, job_name)
job_script_kwargs.update(extra_kwargs)
script = job_script(**job_script_kwargs)
script_path = self._write_job_script(job_id, script)
submission_command = job_interface.submit(script_path)
cmd_out = shell.execute(submission_command)
if cmd_out.returncode != 0:
log.warn("Failed to submit job - command was %s" % submission_command)
raise Exception("Failed to submit job")
external_id = parse_external_id(cmd_out.stdout.strip())
if not external_id:
message_template = "Failed to obtain externl id for job_id %s and submission_command %s"
message = message_template % (job_id, submission_command)
log.warn(message)
raise Exception("Failed to obtain external id")
self._register_external_id(job_id, external_id)
def __get_cli_plugins(self):
return self.cli_interface.get_plugins(self.shell_params, self.job_params)
def _kill_external(self, external_id):
shell, job_interface = self.__get_cli_plugins()
kill_command = job_interface.delete(external_id)
shell.execute(kill_command)
def _get_status_external(self, external_id):
shell, job_interface = self.__get_cli_plugins()
status_command = job_interface.get_single_status(external_id)
cmd_out = shell.execute(status_command)
state = job_interface.parse_single_status(cmd_out.stdout, external_id)
return state
| jmchilton/lwr | lwr/managers/queued_cli.py | Python | apache-2.0 | 2,771 |
# -*- coding: utf-8 -*-
# Copyright (C) 2014-2017 Andrey Antukh <[email protected]>
# Copyright (C) 2014-2017 Jesús Espino <[email protected]>
# Copyright (C) 2014-2017 David Barragán <[email protected]>
# Copyright (C) 2014-2017 Alejandro Alonso <[email protected]>
# Copyright (C) 2014-2017 Anler Hernández <[email protected]>
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from taiga.projects.attachments.utils import attach_basic_attachments
from taiga.projects.notifications.utils import attach_watchers_to_queryset
from taiga.projects.notifications.utils import attach_total_watchers_to_queryset
from taiga.projects.notifications.utils import attach_is_watcher_to_queryset
from taiga.projects.history.utils import attach_total_comments_to_queryset
from taiga.projects.votes.utils import attach_total_voters_to_queryset
from taiga.projects.votes.utils import attach_is_voter_to_queryset
def attach_total_points(queryset, as_field="total_points_attr"):
"""Attach total of point values to each object of the queryset.
:param queryset: A Django user stories queryset object.
:param as_field: Attach the points as an attribute with this name.
:return: Queryset object with the additional `as_field` field.
"""
model = queryset.model
sql = """SELECT SUM(projects_points.value)
FROM userstories_rolepoints
INNER JOIN projects_points ON userstories_rolepoints.points_id = projects_points.id
WHERE userstories_rolepoints.user_story_id = {tbl}.id"""
sql = sql.format(tbl=model._meta.db_table)
queryset = queryset.extra(select={as_field: sql})
return queryset
def attach_role_points(queryset, as_field="role_points_attr"):
"""Attach role point as json column to each object of the queryset.
:param queryset: A Django user stories queryset object.
:param as_field: Attach the role points as an attribute with this name.
:return: Queryset object with the additional `as_field` field.
"""
model = queryset.model
sql = """SELECT FORMAT('{{%%s}}',
STRING_AGG(format(
'"%%s":%%s',
TO_JSON(userstories_rolepoints.role_id),
TO_JSON(userstories_rolepoints.points_id)
), ',')
)::json
FROM userstories_rolepoints
WHERE userstories_rolepoints.user_story_id = {tbl}.id"""
sql = sql.format(tbl=model._meta.db_table)
queryset = queryset.extra(select={as_field: sql})
return queryset
def attach_tasks(queryset, as_field="tasks_attr"):
"""Attach tasks as json column to each object of the queryset.
:param queryset: A Django user stories queryset object.
:param as_field: Attach tasks as an attribute with this name.
:return: Queryset object with the additional `as_field` field.
"""
model = queryset.model
sql = """SELECT json_agg(row_to_json(t))
FROM(
SELECT
tasks_task.id,
tasks_task.ref,
tasks_task.subject,
tasks_task.status_id,
tasks_task.is_blocked,
tasks_task.is_iocaine,
projects_taskstatus.is_closed
FROM tasks_task
INNER JOIN projects_taskstatus on projects_taskstatus.id = tasks_task.status_id
WHERE user_story_id = {tbl}.id
ORDER BY tasks_task.us_order, tasks_task.ref
) t
"""
sql = sql.format(tbl=model._meta.db_table)
queryset = queryset.extra(select={as_field: sql})
return queryset
def attach_epics(queryset, as_field="epics_attr"):
"""Attach epics as json column to each object of the queryset.
:param queryset: A Django user stories queryset object.
:param as_field: Attach the epics as an attribute with this name.
:return: Queryset object with the additional `as_field` field.
"""
model = queryset.model
sql = """SELECT json_agg(row_to_json(t))
FROM (SELECT "epics_epic"."id" AS "id",
"epics_epic"."ref" AS "ref",
"epics_epic"."subject" AS "subject",
"epics_epic"."color" AS "color",
(SELECT row_to_json(p)
FROM (SELECT "projects_project"."id" AS "id",
"projects_project"."name" AS "name",
"projects_project"."slug" AS "slug"
) p
) AS "project"
FROM "epics_relateduserstory"
INNER JOIN "epics_epic" ON "epics_epic"."id" = "epics_relateduserstory"."epic_id"
INNER JOIN "projects_project" ON "projects_project"."id" = "epics_epic"."project_id"
WHERE "epics_relateduserstory"."user_story_id" = {tbl}.id
ORDER BY "projects_project"."name", "epics_epic"."ref") t"""
sql = sql.format(tbl=model._meta.db_table)
queryset = queryset.extra(select={as_field: sql})
return queryset
def attach_epic_order(queryset, epic_id, as_field="epic_order"):
"""Attach epic_order column to each object of the queryset.
:param queryset: A Django user stories queryset object.
:param epic_id: Order related to this epic.
:param as_field: Attach order as an attribute with this name.
:return: Queryset object with the additional `as_field` field.
"""
model = queryset.model
sql = """SELECT "epics_relateduserstory"."order" AS "epic_order"
FROM "epics_relateduserstory"
WHERE "epics_relateduserstory"."user_story_id" = {tbl}.id and
"epics_relateduserstory"."epic_id" = {epic_id}"""
sql = sql.format(tbl=model._meta.db_table, epic_id=epic_id)
queryset = queryset.extra(select={as_field: sql})
return queryset
def attach_extra_info(queryset, user=None, include_attachments=False, include_tasks=False, epic_id=None):
queryset = attach_total_points(queryset)
queryset = attach_role_points(queryset)
queryset = attach_epics(queryset)
if include_attachments:
queryset = attach_basic_attachments(queryset)
queryset = queryset.extra(select={"include_attachments": "True"})
if include_tasks:
queryset = attach_tasks(queryset)
queryset = queryset.extra(select={"include_tasks": "True"})
if epic_id is not None:
queryset = attach_epic_order(queryset, epic_id)
queryset = queryset.extra(select={"include_epic_order": "True"})
queryset = attach_total_voters_to_queryset(queryset)
queryset = attach_watchers_to_queryset(queryset)
queryset = attach_total_watchers_to_queryset(queryset)
queryset = attach_is_voter_to_queryset(queryset, user)
queryset = attach_is_watcher_to_queryset(queryset, user)
queryset = attach_total_comments_to_queryset(queryset)
return queryset
| dayatz/taiga-back | taiga/projects/userstories/utils.py | Python | agpl-3.0 | 7,817 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_pyramid_sendgrid_webhooks
----------------------------------
Tests for `pyramid_sendgrid_webhooks` module.
"""
from __future__ import unicode_literals
import unittest
import pyramid_sendgrid_webhooks as psw
from pyramid_sendgrid_webhooks import events, errors
class EventGrabber(object):
""" Grabs events as they're dispatched """
def __init__(self):
self.events = []
self.last = None
def __call__(self, event):
self.events.append(event)
self.last = event
def simple_app(global_config, **settings):
from pyramid.config import Configurator
config = Configurator(settings=settings)
config.include('pyramid_sendgrid_webhooks', WebhookTestBase._PREFIX)
config.registry.grabber = EventGrabber()
config.add_subscriber(config.registry.grabber, events.BaseWebhookEvent)
return config.make_wsgi_app()
class WebhookTestBase(unittest.TestCase):
_PREFIX = '/webhook'
_PATH = _PREFIX + '/receive'
def setUp(self):
from pyramid import testing
self.request = testing.DummyRequest()
self.config = testing.setUp(request=self.request)
def tearDown(self):
from pyramid import testing
testing.tearDown()
def _createGrabber(self, event_cls=events.BaseWebhookEvent):
grabber = EventGrabber()
self.config.add_subscriber(grabber, event_cls)
return grabber
def _createRequest(self, event_body):
if not isinstance(event_body, list):
event_body = [event_body]
self.request.json_body = event_body
return self.request
def _createApp(self, event_cls=events.BaseWebhookEvent):
from webtest.app import TestApp
app = TestApp(simple_app({}))
app.grabber = app.app.registry.grabber
return app
class TestBaseEvents(WebhookTestBase):
def _makeOne(self, event_type='bounce', category='category'):
return {
'asm_group_id': 1,
'category': category,
'cert_error': '0',
'email': '[email protected]',
'event': event_type,
'ip': '127.0.0.1',
'reason': '500 No Such User',
'smtp-id': '<[email protected]>',
'status': '5.0.0',
'timestamp': 1249948800,
'tls': '1',
'type': 'bounce',
'unique_arg_key': 'unique_arg_value',
}
def _create_dt(self):
import datetime
return datetime.datetime(2009, 8, 11, 0, 0)
def test_event_parsed(self):
grabber = self._createGrabber()
request = self._createRequest(self._makeOne())
psw.receive_events(request)
self.assertEqual(len(grabber.events), 1)
def test_event_parsed_from_request(self):
app = self._createApp()
grabber = app.grabber
app.post_json(self._PATH, [self._makeOne()])
self.assertEqual(len(grabber.events), 1)
def test_multiple_events_parsed_from_request(self, n=3):
app = self._createApp()
grabber = app.grabber
app.post_json(self._PATH, [self._makeOne()] * n)
self.assertEqual(len(grabber.events), n)
def test_specific_event_caught(self):
grabber = self._createGrabber(events.BounceEvent)
request = self._createRequest(self._makeOne())
psw.receive_events(request)
self.assertEqual(len(grabber.events), 1)
def test_unspecified_event_ignored(self):
grabber = self._createGrabber(events.DeferredEvent)
request = self._createRequest(self._makeOne())
psw.receive_events(request)
self.assertEqual(len(grabber.events), 0)
def test_timestamp_parsed(self):
grabber = self._createGrabber()
request = self._createRequest(self._makeOne())
psw.receive_events(request)
self.assertEqual(grabber.last.dt, self._create_dt())
def test_unique_arguments_extracted(self):
grabber = self._createGrabber()
request = self._createRequest(self._makeOne())
psw.receive_events(request)
self.assertDictEqual(grabber.last.unique_arguments, {
'unique_arg_key': 'unique_arg_value',
})
def test_correct_subclass(self):
grabber = self._createGrabber()
request = self._createRequest(self._makeOne())
psw.receive_events(request)
self.assertIsInstance(grabber.last, events.BounceEvent)
def test_unknown_event_raises_exception(self):
request = self._createRequest(self._makeOne(event_type='UNKNOWN'))
self.assertRaises(
errors.UnknownEventError, psw.receive_events, request)
def test_single_category_is_list_wrapped(self):
grabber = self._createGrabber()
request = self._createRequest(self._makeOne())
psw.receive_events(request)
self.assertEqual([grabber.last.category], grabber.last.categories)
def test_multiple_categories_are_unchanged(self):
grabber = self._createGrabber()
request = self._createRequest(self._makeOne(category=['c1', 'c2']))
psw.receive_events(request)
self.assertEqual(grabber.last.category, grabber.last.categories)
def test_empty_categories_is_empty_list(self):
grabber = self._createGrabber()
request = self._createRequest(self._makeOne(category=None))
psw.receive_events(request)
self.assertEqual(grabber.last.categories, [])
class TestDeliveryEvents(WebhookTestBase):
def _makeOne(self):
return {
'asm_group_id': 1,
'category': ['category1', 'category2'],
'cert_error': '0',
'email': '[email protected]',
'event': 'bounce',
'ip': '127.0.0.1',
'reason': '500 No Such User',
'smtp-id': '<[email protected]>',
'status': '5.0.0',
'timestamp': 1249948800,
'tls': '1',
'type': 'bounce',
'unique_arg_key': 'unique_arg_value',
}
class TestEngagementEvents(WebhookTestBase):
def _makeOne(self):
return {
'asm_group_id': 1,
'category': ['category1', 'category2'],
'email': '[email protected]',
'event': 'click',
'ip': '255.255.255.255',
'timestamp': 1249948800,
'unique_arg_key': 'unique_arg_value',
'url': 'http://yourdomain.com/blog/news.html',
'useragent': 'Example Useragent',
}
if __name__ == '__main__':
import sys
sys.exit(unittest.main())
| GoodRx/pyramid-sendgrid-webhooks | tests/test_pyramid_sendgrid_webhooks.py | Python | mit | 6,637 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
# thumbor imaging service
# https://github.com/thumbor/thumbor/wiki
# Licensed under the MIT license:
# http://www.opensource.org/licenses/mit-license
# Copyright (c) 2011 globo.com [email protected]
import sys
import optparse
from thumbor import __version__
from libthumbor import CryptoURL
from thumbor.config import Config
def main(arguments=None): # NOQA
'''Converts a given url with the specified arguments.'''
if arguments is None:
arguments = sys.argv[1:]
parser = optparse.OptionParser(
usage='thumbor-url [options] imageurl or type thumbor-url -h (--help) for help',
description=__doc__,
version=__version__
)
parser.add_option(
'-l', '--key_file', dest='key_file', default=None, help='The file to read the security key from [default: %default].'
)
parser.add_option(
'-k', '--key', dest='key', default=None, help='The security key to encrypt the url with [default: %default].'
)
parser.add_option(
'-w', '--width', dest='width', type='int', default=0, help='The target width for the image [default: %default].'
)
parser.add_option(
'-e', '--height', dest='height', type='int', default=0, help='The target height for the image [default: %default].'
)
parser.add_option(
'-n', '--fitin', dest='fitin', action='store_true', default=False,
help='Indicates that fit-in resizing should be performed.'
)
parser.add_option(
'-m', '--meta', dest='meta', action='store_true', default=False,
help='Indicates that meta information should be retrieved.'
)
parser.add_option(
'', '--adaptive', action='store_true', dest='adaptive', default=False,
help='Indicates that adaptive fit-in cropping should be used.'
)
parser.add_option(
'', '--full', action='store_true', dest='full', default=False, help='Indicates that fit-full cropping should be used.'
)
parser.add_option(
'-s', '--smart', action='store_true', dest='smart', default=False, help='Indicates that smart cropping should be used.'
)
parser.add_option(
'-t', '--trim', action='store_true', default=False, help='Indicate that surrounding whitespace should be trimmed.'
)
parser.add_option(
'-f', '--horizontal-flip', action='store_true', dest='horizontal_flip', default=False,
help='Indicates that the image should be horizontally flipped.'
)
parser.add_option(
'-v', '--vertical-flip', action='store_true', dest='vertical_flip', default=False,
help='Indicates that the image should be vertically flipped.'
)
parser.add_option(
'-a', '--halign', dest='halign', default='center',
help='The horizontal alignment to use for cropping [default: %default].'
)
parser.add_option(
'-i', '--valign', dest='valign', default='middle',
help='The vertical alignment to use for cropping [default: %default].'
)
parser.add_option(
'', '--filters', dest='filters', action='append',
help='Filters to be applied to the image, e.g. brightness(10).'
)
parser.add_option(
'-o', '--old-format', dest='old', action='store_true', default=False,
help='Indicates that thumbor should generate old-format urls [default: %default].'
)
parser.add_option(
'-c', '--crop', dest='crop', default=None,
help='The coordinates of the points to manual cropping in the format leftxtop:rightxbottom '
'(100x200:400x500) [default: %default].'
)
(parsed_options, arguments) = parser.parse_args(arguments)
if not arguments:
print 'Error: The image argument is mandatory. For more information type thumbor-url -h'
return
image_url = arguments[0]
if image_url.startswith('/'):
image_url = image_url[1:]
try:
config = Config.load(None)
except:
config = None
if not parsed_options.key and not config:
print 'Error: The -k or --key argument is mandatory. For more information type thumbor-url -h'
return
if parsed_options.key_file:
f = open(parsed_options.key_file)
security_key = f.read().strip()
f.close()
else:
security_key = config.SECURITY_KEY if not parsed_options.key else parsed_options.key
crop_left = crop_top = crop_right = crop_bottom = 0
if parsed_options.crop:
crops = parsed_options.crop.split(':')
crop_left, crop_top = crops[0].split('x')
crop_right, crop_bottom = crops[1].split('x')
options = {
'old': parsed_options.old,
'width': parsed_options.width,
'height': parsed_options.height,
'smart': parsed_options.smart,
'meta': parsed_options.meta,
'horizontal_flip': parsed_options.horizontal_flip,
'vertical_flip': parsed_options.vertical_flip,
'halign': parsed_options.halign,
'valign': parsed_options.valign,
'trim': parsed_options.trim,
'crop_left': crop_left,
'crop_top': crop_top,
'crop_right': crop_right,
'crop_bottom': crop_bottom,
'filters': parsed_options.filters,
'image_url': image_url,
}
if parsed_options.fitin and parsed_options.full and parsed_options.adaptive:
options['adaptive_full_fit_in'] = True
elif parsed_options.fitin and parsed_options.full:
options['full_fit_in'] = True
elif parsed_options.fitin and parsed_options.adaptive:
options['adaptive_fit_in'] = True
elif parsed_options.fitin:
options['fit_in'] = True
crypto = CryptoURL(key=security_key)
url = crypto.generate(**options)
print 'URL:'
print url
return url
if __name__ == '__main__':
main(sys.argv[1:])
| wking/thumbor | thumbor/url_composer.py | Python | mit | 5,855 |
import time, sys, getopt, configparser
from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
def main(argv):
try:
opts, args = getopt.getopt(argv, "hcus", ["config=", "url=", "screenshot="])
except getopt.GetoptError:
print("PyPhantom\n--config=<Config file, e.g. config.ini>\n--url=<URL to capture, e.g. http://google.com>\n--screenshot=<File to save screenshot as, e.g. screenshot (optional)>")
sys.exit(2)
config_file = ""
url = ""
screenshot = ""
for opt, arg in opts:
if opt == "-h":
print("PyPhantom\n--config=<Config file, e.g. config.ini>\n--url=<URL to capture, e.g. http://google.com>\n--screenshot=<File to save screenshot as, e.g. screenshot (optional)>")
sys.exit()
elif opt in ("-c", "--config"):
if arg == "":
print("PyPhantom\n--config=<Config file, e.g. config.ini>\n--url=<URL to capture, e.g. http://google.com>\n--screenshot=<File to save screenshot as, e.g. screenshot (optional)>")
sys.exit()
config_file = arg
elif opt in ("-u", "--url"):
if arg == "":
print("PyPhantom\n--config=<Config file, e.g. config.ini>\n--url=<URL to capture, e.g. http://google.com>\n--screenshot=<File to save screenshot as, e.g. screenshot (optional)>")
sys.exit()
url = arg
elif opt in ("-s", "--screenshot"):
screenshot = arg
if (config_file == "" or url == ""):
print("PyPhantom\n--config=<Config file, e.g. config.ini>\n--url=<URL to capture, e.g. http://google.com>\n--screenshot=<File to save screenshot as, e.g. screenshot (optional)>")
sys.exit()
Config = init(config_file)
print(phantomGet(Config, url, screenshot))
def init(config_file):
Config = get_config(config_file)
if ("phantomjs" not in Config or Config["phantomjs"] == ""):
print("Error reading config file, PhantomJS startup path missing")
sys.exit()
if ("waittime" not in Config or Config["waittime"] == ""):
print("Error reading config file, WaitTime missing")
sys.exit()
return Config
def get_config(config_file):
Config = configparser.ConfigParser()
Config.read(config_file)
ConfigDict = {}
Options = Config.options("PyPhantom")
for Option in Options:
try:
ConfigDict[Option] = Config.get("PyPhantom", Option)
except:
print("Exception reading config file option %s!" % Option)
ConfigDict[Option] = None
return ConfigDict
def phantomGet(Config, url, screenshot):
driver = webdriver.PhantomJS(Config["phantomjs"])
driver.set_window_size(1024, 768)
driver.set_page_load_timeout(180)
driver.get(url)
time.sleep(int(Config["waittime"]))
if screenshot != "":
driver.save_screenshot(screenshot + ".png")
res = {"result":driver.page_source, "screenshot":screenshot + ".png"}
else:
res = {"result":driver.page_source}
driver.close()
driver.quit()
return res
if __name__ == "__main__":
main(sys.argv[1:]) | ryanskidmore/PyPhantom | pyphantom.py | Python | mit | 2,928 |
import logging, traceback, time
''' Phidget abstraction layer '''
from Phidgets.Manager import Manager
from Phidgets import Devices
from Phidgets.PhidgetException import PhidgetException
_ENCODER_TICKS_PER_REVOLUTION = 80
class __PhidgetWrapper:
def __init__(self, phidget):
self._phidget = phidget
def __getattr__(self, attr):
phidget = getattr(self, "_phidget")
method = getattr(phidget, attr)
def safe(*args):
try:
return method(*args)
except PhidgetException as pex:
_log.debug("PhidgetException in %s%s: %s" % (attr,args,pex.details) )
return None
return safe
def _init():
global _log, _serial2phidgets, _manager, _encoderHistory, _encoderAsAxis, _lastPollDevices, _sync
_log = logging.getLogger("phidgets")
_serial2phidgets = dict()
_encoderHistory = dict()
_encoderAsAxis = dict()
_lastPollDevices = 0
_sync = 0
try:
_manager = Manager()
_manager.openManager()
except Exception as e:
_manager = None
_log.warning("Cannot initialize support for Phidgets (%s)", e)
_log.debug(traceback.format_exc())
all()
def num():
return len(_manager.getAttachedDevices()) if _manager else 0
def _phidget(serial):
try:
return _serial2phidgets[serial]
except KeyError:
pass
if not _manager:
raise EnvironmentError("phidgets not initialized")
for device in _manager.getAttachedDevices():
if device.getSerialNum() == serial:
try:
ptype = "Phidgets.Devices."+Devices.__all__[device.getDeviceClass()]+"."+Devices.__all__[device.getDeviceClass()]
_log.debug("Trying class %s for #%s" % (ptype, device.getSerialNum()))
phidget = _classbyname(ptype)()
except:
_log.debug(traceback.format_exc())
raise EnvironmentError("No specific wrapper for wrapper %s can be found" % phidget.getDeviceType)
_log.info("phidgets.get(%s) # returns %s" % (device.getSerialNum(), ptype) )
phidget = __PhidgetWrapper(phidget)
phidget.openPhidget(serial)
_serial2phidgets[serial] = phidget
return phidget
raise EnvironmentError("phidgets.get(%s) is not connected" % serial)
def _classbyname(name):
parts = name.split('.')
m = __import__(".".join(parts[:-1]))
for p in parts[1:]:
m = getattr(m, p)
return m
def all(): #@ReservedAssignment
global _lastPollDevices
if time.clock() - _lastPollDevices > 3:
if _manager:
for p in _manager.getAttachedDevices():
_phidget(p.getSerialNum())
_lastPollDevices = time.clock()
return _serial2phidgets.values()
def get(serial):
phidget = _phidget(serial)
if not phidget.isAttached():
phidget.waitForAttach(1000)
if not phidget.isAttached():
raise EnvironmentError("phidgets.get(%s) is not ready for use" % serial)
return phidget
def flatten(phidget):
return "%s #%s" % (phidget.getDeviceName(), phidget.getSerialNum())
def sync():
global _sync
# look for unwatched encoder axis that need to be re-aligned b/o rotated encoder
for ((encoder,key), (sync, datum, old)) in _encoderAsAxis.items():
if sync == _sync:
continue
new = encoder.getPosition(0)
if new == old:
continue
_encoderAsAxis[(encoder,key)] = (_sync+1, datum+new-old, new)
# next sync
_sync += 1
'''
returns an axis value -1<=v<=1 for a given encoder
'''
def getAxis(encoder, key=None, revolutions=1, default=0):
pos = encoder.getPosition(0)
key = (encoder, key)
range = revolutions*_ENCODER_TICKS_PER_REVOLUTION
if not key in _encoderAsAxis:
value = default
datum = pos + int( float(range)/2 * default)
else:
_,datum,_ = _encoderAsAxis.get(key)
datum, pos, _ = _rerange(datum, pos, datum + int(range))
value = (pos-datum) / float(range) * 2 - 1
_encoderAsAxis[key] = (_sync, datum, pos)
return value
'''
l<v<u returns l<v<u
l<u<v returns l<v=u
v<l<u returns l=v<u
'''
def _rerange(lower, value, upper):
if value>upper:
return value-(upper-lower), value, value
elif value<lower:
return value, value, value+(upper-lower)
return lower, value, upper
''' translates a boundless encoder positions to increments per revolution '''
def getDelta(encoder, ticks=8):
# pos has to move to one of the 'click' areas
pos = int(encoder.getPosition(0) / (_ENCODER_TICKS_PER_REVOLUTION/ticks/2) )
if pos&1:
return 0
pos >>= 1
# calculate delta to current position
delta = _encoderHistory.get(encoder, pos) - pos
_encoderHistory[encoder] = pos
return delta
_init()
| Denney/SimScript | modules/phidgets.py | Python | bsd-3-clause | 5,242 |
"""Formal Power Series"""
from __future__ import print_function, division
from collections import defaultdict
from sympy import oo, zoo, nan
from sympy.core.expr import Expr
from sympy.core.add import Add
from sympy.core.mul import Mul
from sympy.core.function import Derivative, Function
from sympy.core.singleton import S
from sympy.core.sympify import sympify
from sympy.core.symbol import Wild, Dummy, symbols, Symbol
from sympy.core.relational import Eq
from sympy.core.numbers import Rational
from sympy.core.compatibility import iterable
from sympy.sets.sets import Interval
from sympy.functions.combinatorial.factorials import binomial, factorial, rf
from sympy.functions.elementary.piecewise import Piecewise
from sympy.functions.elementary.integers import floor, frac, ceiling
from sympy.functions.elementary.miscellaneous import Min, Max
from sympy.series.sequences import sequence
from sympy.series.series_class import SeriesBase
from sympy.series.order import Order
from sympy.series.limits import Limit
def rational_algorithm(f, x, k, order=4, full=False):
"""Rational algorithm for computing
formula of coefficients of Formal Power Series
of a function.
Applicable when f(x) or some derivative of f(x)
is a rational function in x.
:func:`rational_algorithm` uses :func:`apart` function for partial fraction
decomposition. :func:`apart` by default uses 'undetermined coefficients
method'. By setting ``full=True``, 'Bronstein's algorithm' can be used
instead.
Looks for derivative of a function up to 4'th order (by default).
This can be overridden using order option.
Returns
=======
formula : Expr
ind : Expr
Independent terms.
order : int
Examples
========
>>> from sympy import log, atan, I
>>> from sympy.series.formal import rational_algorithm as ra
>>> from sympy.abc import x, k
>>> ra(1 / (1 - x), x, k)
(1, 0, 0)
>>> ra(log(1 + x), x, k)
(-(-1)**(-k)/k, 0, 1)
>>> ra(atan(x), x, k, full=True)
((-I*(-I)**(-k)/2 + I*I**(-k)/2)/k, 0, 1)
Notes
=====
By setting ``full=True``, range of admissible functions to be solved using
``rational_algorithm`` can be increased. This option should be used
carefully as it can significantly slow down the computation as ``doit`` is
performed on the :class:`RootSum` object returned by the ``apart`` function.
Use ``full=False`` whenever possible.
See Also
========
sympy.polys.partfrac.apart
References
==========
.. [1] Formal Power Series - Dominik Gruntz, Wolfram Koepf
.. [2] Power Series in Computer Algebra - Wolfram Koepf
"""
from sympy.polys import RootSum, apart
from sympy.integrals import integrate
diff = f
ds = [] # list of diff
for i in range(order + 1):
if i:
diff = diff.diff(x)
if diff.is_rational_function(x):
coeff, sep = S.Zero, S.Zero
terms = apart(diff, x, full=full)
if terms.has(RootSum):
terms = terms.doit()
for t in Add.make_args(terms):
num, den = t.as_numer_denom()
if not den.has(x):
sep += t
else:
if isinstance(den, Mul):
# m*(n*x - a)**j -> (n*x - a)**j
ind = den.as_independent(x)
den = ind[1]
num /= ind[0]
# (n*x - a)**j -> (x - b)
den, j = den.as_base_exp()
a, xterm = den.as_coeff_add(x)
# term -> m/x**n
if not a:
sep += t
continue
xc = xterm[0].coeff(x)
a /= -xc
num /= xc**j
ak = ((-1)**j * num *
binomial(j + k - 1, k).rewrite(factorial) /
a**(j + k))
coeff += ak
# Hacky, better way?
if coeff is S.Zero:
return None
if (coeff.has(x) or coeff.has(zoo) or coeff.has(oo) or
coeff.has(nan)):
return None
for j in range(i):
coeff = (coeff / (k + j + 1))
sep = integrate(sep, x)
sep += (ds.pop() - sep).limit(x, 0) # constant of integration
return (coeff.subs(k, k - i), sep, i)
else:
ds.append(diff)
return None
def rational_independent(terms, x):
"""Returns a list of all the rationally independent terms.
Examples
========
>>> from sympy import sin, cos
>>> from sympy.series.formal import rational_independent
>>> from sympy.abc import x
>>> rational_independent([cos(x), sin(x)], x)
[cos(x), sin(x)]
>>> rational_independent([x**2, sin(x), x*sin(x), x**3], x)
[x**3 + x**2, x*sin(x) + sin(x)]
"""
if not terms:
return []
ind = terms[0:1]
for t in terms[1:]:
n = t.as_independent(x)[1]
for i, term in enumerate(ind):
d = term.as_independent(x)[1]
q = (n / d).cancel()
if q.is_rational_function(x):
ind[i] += t
break
else:
ind.append(t)
return ind
def simpleDE(f, x, g, order=4):
r"""Generates simple DE.
DE is of the form
.. math::
f^k(x) + \sum\limits_{j=0}^{k-1} A_j f^j(x) = 0
where :math:`A_j` should be rational function in x.
Generates DE's upto order 4 (default). DE's can also have free parameters.
By increasing order, higher order DE's can be found.
Yields a tuple of (DE, order).
"""
from sympy.solvers.solveset import linsolve
a = symbols('a:%d' % (order))
def _makeDE(k):
eq = f.diff(x, k) + Add(*[a[i]*f.diff(x, i) for i in range(0, k)])
DE = g(x).diff(x, k) + Add(*[a[i]*g(x).diff(x, i) for i in range(0, k)])
return eq, DE
eq, DE = _makeDE(order)
found = False
for k in range(1, order + 1):
eq, DE = _makeDE(k)
eq = eq.expand()
terms = eq.as_ordered_terms()
ind = rational_independent(terms, x)
if found or len(ind) == k:
sol = dict(zip(a, (i for s in linsolve(ind, a[:k]) for i in s)))
if sol:
found = True
DE = DE.subs(sol)
DE = DE.as_numer_denom()[0]
DE = DE.factor().as_coeff_mul(Derivative)[1][0]
yield DE.collect(Derivative(g(x))), k
def exp_re(DE, r, k):
"""Converts a DE with constant coefficients (explike) into a RE.
Performs the substitution:
.. math::
f^j(x) \\to r(k + j)
Normalises the terms so that lowest order of a term is always r(k).
Examples
========
>>> from sympy import Function, Derivative
>>> from sympy.series.formal import exp_re
>>> from sympy.abc import x, k
>>> f, r = Function('f'), Function('r')
>>> exp_re(-f(x) + Derivative(f(x)), r, k)
-r(k) + r(k + 1)
>>> exp_re(Derivative(f(x), x) + Derivative(f(x), (x, 2)), r, k)
r(k) + r(k + 1)
See Also
========
sympy.series.formal.hyper_re
"""
RE = S.Zero
g = DE.atoms(Function).pop()
mini = None
for t in Add.make_args(DE):
coeff, d = t.as_independent(g)
if isinstance(d, Derivative):
j = d.derivative_count
else:
j = 0
if mini is None or j < mini:
mini = j
RE += coeff * r(k + j)
if mini:
RE = RE.subs(k, k - mini)
return RE
def hyper_re(DE, r, k):
"""Converts a DE into a RE.
Performs the substitution:
.. math::
x^l f^j(x) \\to (k + 1 - l)_j . a_{k + j - l}
Normalises the terms so that lowest order of a term is always r(k).
Examples
========
>>> from sympy import Function, Derivative
>>> from sympy.series.formal import hyper_re
>>> from sympy.abc import x, k
>>> f, r = Function('f'), Function('r')
>>> hyper_re(-f(x) + Derivative(f(x)), r, k)
(k + 1)*r(k + 1) - r(k)
>>> hyper_re(-x*f(x) + Derivative(f(x), (x, 2)), r, k)
(k + 2)*(k + 3)*r(k + 3) - r(k)
See Also
========
sympy.series.formal.exp_re
"""
RE = S.Zero
g = DE.atoms(Function).pop()
x = g.atoms(Symbol).pop()
mini = None
for t in Add.make_args(DE.expand()):
coeff, d = t.as_independent(g)
c, v = coeff.as_independent(x)
l = v.as_coeff_exponent(x)[1]
if isinstance(d, Derivative):
j = d.derivative_count
else:
j = 0
RE += c * rf(k + 1 - l, j) * r(k + j - l)
if mini is None or j - l < mini:
mini = j - l
RE = RE.subs(k, k - mini)
m = Wild('m')
return RE.collect(r(k + m))
def _transformation_a(f, x, P, Q, k, m, shift):
f *= x**(-shift)
P = P.subs(k, k + shift)
Q = Q.subs(k, k + shift)
return f, P, Q, m
def _transformation_c(f, x, P, Q, k, m, scale):
f = f.subs(x, x**scale)
P = P.subs(k, k / scale)
Q = Q.subs(k, k / scale)
m *= scale
return f, P, Q, m
def _transformation_e(f, x, P, Q, k, m):
f = f.diff(x)
P = P.subs(k, k + 1) * (k + m + 1)
Q = Q.subs(k, k + 1) * (k + 1)
return f, P, Q, m
def _apply_shift(sol, shift):
return [(res, cond + shift) for res, cond in sol]
def _apply_scale(sol, scale):
return [(res, cond / scale) for res, cond in sol]
def _apply_integrate(sol, x, k):
return [(res / ((cond + 1)*(cond.as_coeff_Add()[1].coeff(k))), cond + 1)
for res, cond in sol]
def _compute_formula(f, x, P, Q, k, m, k_max):
"""Computes the formula for f."""
from sympy.polys import roots
sol = []
for i in range(k_max + 1, k_max + m + 1):
r = f.diff(x, i).limit(x, 0) / factorial(i)
if r is S.Zero:
continue
kterm = m*k + i
res = r
p = P.subs(k, kterm)
q = Q.subs(k, kterm)
c1 = p.subs(k, 1/k).leadterm(k)[0]
c2 = q.subs(k, 1/k).leadterm(k)[0]
res *= (-c1 / c2)**k
for r, mul in roots(p, k).items():
res *= rf(-r, k)**mul
for r, mul in roots(q, k).items():
res /= rf(-r, k)**mul
sol.append((res, kterm))
return sol
def _rsolve_hypergeometric(f, x, P, Q, k, m):
"""Recursive wrapper to rsolve_hypergeometric.
Returns a Tuple of (formula, series independent terms,
maximum power of x in independent terms) if successful
otherwise ``None``.
See :func:`rsolve_hypergeometric` for details.
"""
from sympy.polys import lcm, roots
from sympy.integrals import integrate
# transformation - c
proots, qroots = roots(P, k), roots(Q, k)
all_roots = dict(proots)
all_roots.update(qroots)
scale = lcm([r.as_numer_denom()[1] for r, t in all_roots.items()
if r.is_rational])
f, P, Q, m = _transformation_c(f, x, P, Q, k, m, scale)
# transformation - a
qroots = roots(Q, k)
if qroots:
k_min = Min(*qroots.keys())
else:
k_min = S.Zero
shift = k_min + m
f, P, Q, m = _transformation_a(f, x, P, Q, k, m, shift)
l = (x*f).limit(x, 0)
if not isinstance(l, Limit) and l != 0: # Ideally should only be l != 0
return None
qroots = roots(Q, k)
if qroots:
k_max = Max(*qroots.keys())
else:
k_max = S.Zero
ind, mp = S.Zero, -oo
for i in range(k_max + m + 1):
r = f.diff(x, i).limit(x, 0) / factorial(i)
if r.is_finite is False:
old_f = f
f, P, Q, m = _transformation_a(f, x, P, Q, k, m, i)
f, P, Q, m = _transformation_e(f, x, P, Q, k, m)
sol, ind, mp = _rsolve_hypergeometric(f, x, P, Q, k, m)
sol = _apply_integrate(sol, x, k)
sol = _apply_shift(sol, i)
ind = integrate(ind, x)
ind += (old_f - ind).limit(x, 0) # constant of integration
mp += 1
return sol, ind, mp
elif r:
ind += r*x**(i + shift)
pow_x = Rational((i + shift), scale)
if pow_x > mp:
mp = pow_x # maximum power of x
ind = ind.subs(x, x**(1/scale))
sol = _compute_formula(f, x, P, Q, k, m, k_max)
sol = _apply_shift(sol, shift)
sol = _apply_scale(sol, scale)
return sol, ind, mp
def rsolve_hypergeometric(f, x, P, Q, k, m):
"""Solves RE of hypergeometric type.
Attempts to solve RE of the form
Q(k)*a(k + m) - P(k)*a(k)
Transformations that preserve Hypergeometric type:
a. x**n*f(x): b(k + m) = R(k - n)*b(k)
b. f(A*x): b(k + m) = A**m*R(k)*b(k)
c. f(x**n): b(k + n*m) = R(k/n)*b(k)
d. f(x**(1/m)): b(k + 1) = R(k*m)*b(k)
e. f'(x): b(k + m) = ((k + m + 1)/(k + 1))*R(k + 1)*b(k)
Some of these transformations have been used to solve the RE.
Returns
=======
formula : Expr
ind : Expr
Independent terms.
order : int
Examples
========
>>> from sympy import exp, ln, S
>>> from sympy.series.formal import rsolve_hypergeometric as rh
>>> from sympy.abc import x, k
>>> rh(exp(x), x, -S.One, (k + 1), k, 1)
(Piecewise((1/factorial(k), Eq(Mod(k, 1), 0)), (0, True)), 1, 1)
>>> rh(ln(1 + x), x, k**2, k*(k + 1), k, 1)
(Piecewise(((-1)**(k - 1)*factorial(k - 1)/RisingFactorial(2, k - 1),
Eq(Mod(k, 1), 0)), (0, True)), x, 2)
References
==========
.. [1] Formal Power Series - Dominik Gruntz, Wolfram Koepf
.. [2] Power Series in Computer Algebra - Wolfram Koepf
"""
result = _rsolve_hypergeometric(f, x, P, Q, k, m)
if result is None:
return None
sol_list, ind, mp = result
sol_dict = defaultdict(lambda: S.Zero)
for res, cond in sol_list:
j, mk = cond.as_coeff_Add()
c = mk.coeff(k)
if j.is_integer is False:
res *= x**frac(j)
j = floor(j)
res = res.subs(k, (k - j) / c)
cond = Eq(k % c, j % c)
sol_dict[cond] += res # Group together formula for same conditions
sol = []
for cond, res in sol_dict.items():
sol.append((res, cond))
sol.append((S.Zero, True))
sol = Piecewise(*sol)
if mp is -oo:
s = S.Zero
elif mp.is_integer is False:
s = ceiling(mp)
else:
s = mp + 1
# save all the terms of
# form 1/x**k in ind
if s < 0:
ind += sum(sequence(sol * x**k, (k, s, -1)))
s = S.Zero
return (sol, ind, s)
def _solve_hyper_RE(f, x, RE, g, k):
"""See docstring of :func:`rsolve_hypergeometric` for details."""
terms = Add.make_args(RE)
if len(terms) == 2:
gs = list(RE.atoms(Function))
P, Q = map(RE.coeff, gs)
m = gs[1].args[0] - gs[0].args[0]
if m < 0:
P, Q = Q, P
m = abs(m)
return rsolve_hypergeometric(f, x, P, Q, k, m)
def _solve_explike_DE(f, x, DE, g, k):
"""Solves DE with constant coefficients."""
from sympy.solvers import rsolve
for t in Add.make_args(DE):
coeff, d = t.as_independent(g)
if coeff.free_symbols:
return
RE = exp_re(DE, g, k)
init = {}
for i in range(len(Add.make_args(RE))):
if i:
f = f.diff(x)
init[g(k).subs(k, i)] = f.limit(x, 0)
sol = rsolve(RE, g(k), init)
if sol:
return (sol / factorial(k), S.Zero, S.Zero)
def _solve_simple(f, x, DE, g, k):
"""Converts DE into RE and solves using :func:`rsolve`."""
from sympy.solvers import rsolve
RE = hyper_re(DE, g, k)
init = {}
for i in range(len(Add.make_args(RE))):
if i:
f = f.diff(x)
init[g(k).subs(k, i)] = f.limit(x, 0) / factorial(i)
sol = rsolve(RE, g(k), init)
if sol:
return (sol, S.Zero, S.Zero)
def _transform_explike_DE(DE, g, x, order, syms):
"""Converts DE with free parameters into DE with constant coefficients."""
from sympy.solvers.solveset import linsolve
eq = []
highest_coeff = DE.coeff(Derivative(g(x), x, order))
for i in range(order):
coeff = DE.coeff(Derivative(g(x), x, i))
coeff = (coeff / highest_coeff).expand().collect(x)
for t in Add.make_args(coeff):
eq.append(t)
temp = []
for e in eq:
if e.has(x):
break
elif e.has(Symbol):
temp.append(e)
else:
eq = temp
if eq:
sol = dict(zip(syms, (i for s in linsolve(eq, list(syms)) for i in s)))
if sol:
DE = DE.subs(sol)
DE = DE.factor().as_coeff_mul(Derivative)[1][0]
DE = DE.collect(Derivative(g(x)))
return DE
def _transform_DE_RE(DE, g, k, order, syms):
"""Converts DE with free parameters into RE of hypergeometric type."""
from sympy.solvers.solveset import linsolve
RE = hyper_re(DE, g, k)
eq = []
for i in range(1, order):
coeff = RE.coeff(g(k + i))
eq.append(coeff)
sol = dict(zip(syms, (i for s in linsolve(eq, list(syms)) for i in s)))
if sol:
m = Wild('m')
RE = RE.subs(sol)
RE = RE.factor().as_numer_denom()[0].collect(g(k + m))
RE = RE.as_coeff_mul(g)[1][0]
for i in range(order): # smallest order should be g(k)
if RE.coeff(g(k + i)) and i:
RE = RE.subs(k, k - i)
break
return RE
def solve_de(f, x, DE, order, g, k):
"""Solves the DE.
Tries to solve DE by either converting into a RE containing two terms or
converting into a DE having constant coefficients.
Returns
=======
formula : Expr
ind : Expr
Independent terms.
order : int
Examples
========
>>> from sympy import Derivative as D
>>> from sympy import exp, ln
>>> from sympy.series.formal import solve_de
>>> from sympy.abc import x, k, f
>>> solve_de(exp(x), x, D(f(x), x) - f(x), 1, f, k)
(Piecewise((1/factorial(k), Eq(Mod(k, 1), 0)), (0, True)), 1, 1)
>>> solve_de(ln(1 + x), x, (x + 1)*D(f(x), x, 2) + D(f(x)), 2, f, k)
(Piecewise(((-1)**(k - 1)*factorial(k - 1)/RisingFactorial(2, k - 1),
Eq(Mod(k, 1), 0)), (0, True)), x, 2)
"""
sol = None
syms = DE.free_symbols.difference({g, x})
if syms:
RE = _transform_DE_RE(DE, g, k, order, syms)
else:
RE = hyper_re(DE, g, k)
if not RE.free_symbols.difference({k}):
sol = _solve_hyper_RE(f, x, RE, g, k)
if sol:
return sol
if syms:
DE = _transform_explike_DE(DE, g, x, order, syms)
if not DE.free_symbols.difference({x}):
sol = _solve_explike_DE(f, x, DE, g, k)
if sol:
return sol
def hyper_algorithm(f, x, k, order=4):
"""Hypergeometric algorithm for computing Formal Power Series.
Steps:
* Generates DE
* Convert the DE into RE
* Solves the RE
Examples
========
>>> from sympy import exp, ln
>>> from sympy.series.formal import hyper_algorithm
>>> from sympy.abc import x, k
>>> hyper_algorithm(exp(x), x, k)
(Piecewise((1/factorial(k), Eq(Mod(k, 1), 0)), (0, True)), 1, 1)
>>> hyper_algorithm(ln(1 + x), x, k)
(Piecewise(((-1)**(k - 1)*factorial(k - 1)/RisingFactorial(2, k - 1),
Eq(Mod(k, 1), 0)), (0, True)), x, 2)
See Also
========
sympy.series.formal.simpleDE
sympy.series.formal.solve_de
"""
g = Function('g')
des = [] # list of DE's
sol = None
for DE, i in simpleDE(f, x, g, order):
if DE is not None:
sol = solve_de(f, x, DE, i, g, k)
if sol:
return sol
if not DE.free_symbols.difference({x}):
des.append(DE)
# If nothing works
# Try plain rsolve
for DE in des:
sol = _solve_simple(f, x, DE, g, k)
if sol:
return sol
def _compute_fps(f, x, x0, dir, hyper, order, rational, full):
"""Recursive wrapper to compute fps.
See :func:`compute_fps` for details.
"""
if x0 in [S.Infinity, -S.Infinity]:
dir = S.One if x0 is S.Infinity else -S.One
temp = f.subs(x, 1/x)
result = _compute_fps(temp, x, 0, dir, hyper, order, rational, full)
if result is None:
return None
return (result[0], result[1].subs(x, 1/x), result[2].subs(x, 1/x))
elif x0 or dir == -S.One:
if dir == -S.One:
rep = -x + x0
rep2 = -x
rep2b = x0
else:
rep = x + x0
rep2 = x
rep2b = -x0
temp = f.subs(x, rep)
result = _compute_fps(temp, x, 0, S.One, hyper, order, rational, full)
if result is None:
return None
return (result[0], result[1].subs(x, rep2 + rep2b),
result[2].subs(x, rep2 + rep2b))
if f.is_polynomial(x):
return None
# Break instances of Add
# this allows application of different
# algorithms on different terms increasing the
# range of admissible functions.
if isinstance(f, Add):
result = False
ak = sequence(S.Zero, (0, oo))
ind, xk = S.Zero, None
for t in Add.make_args(f):
res = _compute_fps(t, x, 0, S.One, hyper, order, rational, full)
if res:
if not result:
result = True
xk = res[1]
if res[0].start > ak.start:
seq = ak
s, f = ak.start, res[0].start
else:
seq = res[0]
s, f = res[0].start, ak.start
save = Add(*[z[0]*z[1] for z in zip(seq[0:(f - s)], xk[s:f])])
ak += res[0]
ind += res[2] + save
else:
ind += t
if result:
return ak, xk, ind
return None
result = None
# from here on it's x0=0 and dir=1 handling
k = Dummy('k')
if rational:
result = rational_algorithm(f, x, k, order, full)
if result is None and hyper:
result = hyper_algorithm(f, x, k, order)
if result is None:
return None
ak = sequence(result[0], (k, result[2], oo))
xk = sequence(x**k, (k, 0, oo))
ind = result[1]
return ak, xk, ind
def compute_fps(f, x, x0=0, dir=1, hyper=True, order=4, rational=True,
full=False):
"""Computes the formula for Formal Power Series of a function.
Tries to compute the formula by applying the following techniques
(in order):
* rational_algorithm
* Hypergeomitric algorithm
Parameters
==========
x : Symbol
x0 : number, optional
Point to perform series expansion about. Default is 0.
dir : {1, -1, '+', '-'}, optional
If dir is 1 or '+' the series is calculated from the right and
for -1 or '-' the series is calculated from the left. For smooth
functions this flag will not alter the results. Default is 1.
hyper : {True, False}, optional
Set hyper to False to skip the hypergeometric algorithm.
By default it is set to False.
order : int, optional
Order of the derivative of ``f``, Default is 4.
rational : {True, False}, optional
Set rational to False to skip rational algorithm. By default it is set
to True.
full : {True, False}, optional
Set full to True to increase the range of rational algorithm.
See :func:`rational_algorithm` for details. By default it is set to
False.
Returns
=======
ak : sequence
Sequence of coefficients.
xk : sequence
Sequence of powers of x.
ind : Expr
Independent terms.
mul : Pow
Common terms.
See Also
========
sympy.series.formal.rational_algorithm
sympy.series.formal.hyper_algorithm
"""
f = sympify(f)
x = sympify(x)
if not f.has(x):
return None
x0 = sympify(x0)
if dir == '+':
dir = S.One
elif dir == '-':
dir = -S.One
elif dir not in [S.One, -S.One]:
raise ValueError("Dir must be '+' or '-'")
else:
dir = sympify(dir)
return _compute_fps(f, x, x0, dir, hyper, order, rational, full)
class FormalPowerSeries(SeriesBase):
"""Represents Formal Power Series of a function.
No computation is performed. This class should only to be used to represent
a series. No checks are performed.
For computing a series use :func:`fps`.
See Also
========
sympy.series.formal.fps
"""
def __new__(cls, *args):
args = map(sympify, args)
return Expr.__new__(cls, *args)
@property
def function(self):
return self.args[0]
@property
def x(self):
return self.args[1]
@property
def x0(self):
return self.args[2]
@property
def dir(self):
return self.args[3]
@property
def ak(self):
return self.args[4][0]
@property
def xk(self):
return self.args[4][1]
@property
def ind(self):
return self.args[4][2]
@property
def interval(self):
return Interval(0, oo)
@property
def start(self):
return self.interval.inf
@property
def stop(self):
return self.interval.sup
@property
def length(self):
return oo
@property
def infinite(self):
"""Returns an infinite representation of the series"""
from sympy.concrete import Sum
ak, xk = self.ak, self.xk
k = ak.variables[0]
inf_sum = Sum(ak.formula * xk.formula, (k, ak.start, ak.stop))
return self.ind + inf_sum
def _get_pow_x(self, term):
"""Returns the power of x in a term."""
xterm, pow_x = term.as_independent(self.x)[1].as_base_exp()
if not xterm.has(self.x):
return S.Zero
return pow_x
def polynomial(self, n=6):
"""Truncated series as polynomial.
Returns series sexpansion of ``f`` upto order ``O(x**n)``
as a polynomial(without ``O`` term).
"""
terms = []
for i, t in enumerate(self):
xp = self._get_pow_x(t)
if xp >= n:
break
elif xp.is_integer is True and i == n + 1:
break
elif t is not S.Zero:
terms.append(t)
return Add(*terms)
def truncate(self, n=6):
"""Truncated series.
Returns truncated series expansion of f upto
order ``O(x**n)``.
If n is ``None``, returns an infinite iterator.
"""
if n is None:
return iter(self)
x, x0 = self.x, self.x0
pt_xk = self.xk.coeff(n)
if x0 is S.NegativeInfinity:
x0 = S.Infinity
return self.polynomial(n) + Order(pt_xk, (x, x0))
def _eval_term(self, pt):
try:
pt_xk = self.xk.coeff(pt)
pt_ak = self.ak.coeff(pt).simplify() # Simplify the coefficients
except IndexError:
term = S.Zero
else:
term = (pt_ak * pt_xk)
if self.ind:
ind = S.Zero
for t in Add.make_args(self.ind):
pow_x = self._get_pow_x(t)
if pt == 0 and pow_x < 1:
ind += t
elif pow_x >= pt and pow_x < pt + 1:
ind += t
term += ind
return term.collect(self.x)
def _eval_subs(self, old, new):
x = self.x
if old.has(x):
return self
def _eval_as_leading_term(self, x):
for t in self:
if t is not S.Zero:
return t
def _eval_derivative(self, x):
f = self.function.diff(x)
ind = self.ind.diff(x)
pow_xk = self._get_pow_x(self.xk.formula)
ak = self.ak
k = ak.variables[0]
if ak.formula.has(x):
form = []
for e, c in ak.formula.args:
temp = S.Zero
for t in Add.make_args(e):
pow_x = self._get_pow_x(t)
temp += t * (pow_xk + pow_x)
form.append((temp, c))
form = Piecewise(*form)
ak = sequence(form.subs(k, k + 1), (k, ak.start - 1, ak.stop))
else:
ak = sequence((ak.formula * pow_xk).subs(k, k + 1),
(k, ak.start - 1, ak.stop))
return self.func(f, self.x, self.x0, self.dir, (ak, self.xk, ind))
def integrate(self, x=None, **kwargs):
"""Integrate Formal Power Series.
Examples
========
>>> from sympy import fps, sin, integrate
>>> from sympy.abc import x
>>> f = fps(sin(x))
>>> f.integrate(x).truncate()
-1 + x**2/2 - x**4/24 + O(x**6)
>>> integrate(f, (x, 0, 1))
-cos(1) + 1
"""
from sympy.integrals import integrate
if x is None:
x = self.x
elif iterable(x):
return integrate(self.function, x)
f = integrate(self.function, x)
ind = integrate(self.ind, x)
ind += (f - ind).limit(x, 0) # constant of integration
pow_xk = self._get_pow_x(self.xk.formula)
ak = self.ak
k = ak.variables[0]
if ak.formula.has(x):
form = []
for e, c in ak.formula.args:
temp = S.Zero
for t in Add.make_args(e):
pow_x = self._get_pow_x(t)
temp += t / (pow_xk + pow_x + 1)
form.append((temp, c))
form = Piecewise(*form)
ak = sequence(form.subs(k, k - 1), (k, ak.start + 1, ak.stop))
else:
ak = sequence((ak.formula / (pow_xk + 1)).subs(k, k - 1),
(k, ak.start + 1, ak.stop))
return self.func(f, self.x, self.x0, self.dir, (ak, self.xk, ind))
def __add__(self, other):
other = sympify(other)
if isinstance(other, FormalPowerSeries):
if self.dir != other.dir:
raise ValueError("Both series should be calculated from the"
" same direction.")
elif self.x0 != other.x0:
raise ValueError("Both series should be calculated about the"
" same point.")
x, y = self.x, other.x
f = self.function + other.function.subs(y, x)
if self.x not in f.free_symbols:
return f
ak = self.ak + other.ak
if self.ak.start > other.ak.start:
seq = other.ak
s, e = other.ak.start, self.ak.start
else:
seq = self.ak
s, e = self.ak.start, other.ak.start
save = Add(*[z[0]*z[1] for z in zip(seq[0:(e - s)], self.xk[s:e])])
ind = self.ind + other.ind + save
return self.func(f, x, self.x0, self.dir, (ak, self.xk, ind))
elif not other.has(self.x):
f = self.function + other
ind = self.ind + other
return self.func(f, self.x, self.x0, self.dir,
(self.ak, self.xk, ind))
return Add(self, other)
def __radd__(self, other):
return self.__add__(other)
def __neg__(self):
return self.func(-self.function, self.x, self.x0, self.dir,
(-self.ak, self.xk, -self.ind))
def __sub__(self, other):
return self.__add__(-other)
def __rsub__(self, other):
return (-self).__add__(other)
def __mul__(self, other):
other = sympify(other)
if other.has(self.x):
return Mul(self, other)
f = self.function * other
ak = self.ak.coeff_mul(other)
ind = self.ind * other
return self.func(f, self.x, self.x0, self.dir, (ak, self.xk, ind))
def __rmul__(self, other):
return self.__mul__(other)
def fps(f, x=None, x0=0, dir=1, hyper=True, order=4, rational=True, full=False):
"""Generates Formal Power Series of f.
Returns the formal series expansion of ``f`` around ``x = x0``
with respect to ``x`` in the form of a ``FormalPowerSeries`` object.
Formal Power Series is represented using an explicit formula
computed using different algorithms.
See :func:`compute_fps` for the more details regarding the computation
of formula.
Parameters
==========
x : Symbol, optional
If x is None and ``f`` is univariate, the univariate symbols will be
supplied, otherwise an error will be raised.
x0 : number, optional
Point to perform series expansion about. Default is 0.
dir : {1, -1, '+', '-'}, optional
If dir is 1 or '+' the series is calculated from the right and
for -1 or '-' the series is calculated from the left. For smooth
functions this flag will not alter the results. Default is 1.
hyper : {True, False}, optional
Set hyper to False to skip the hypergeometric algorithm.
By default it is set to False.
order : int, optional
Order of the derivative of ``f``, Default is 4.
rational : {True, False}, optional
Set rational to False to skip rational algorithm. By default it is set
to True.
full : {True, False}, optional
Set full to True to increase the range of rational algorithm.
See :func:`rational_algorithm` for details. By default it is set to
False.
Examples
========
>>> from sympy import fps, O, ln, atan
>>> from sympy.abc import x
Rational Functions
>>> fps(ln(1 + x)).truncate()
x - x**2/2 + x**3/3 - x**4/4 + x**5/5 + O(x**6)
>>> fps(atan(x), full=True).truncate()
x - x**3/3 + x**5/5 + O(x**6)
See Also
========
sympy.series.formal.FormalPowerSeries
sympy.series.formal.compute_fps
"""
f = sympify(f)
if x is None:
free = f.free_symbols
if len(free) == 1:
x = free.pop()
elif not free:
return f
else:
raise NotImplementedError("multivariate formal power series")
result = compute_fps(f, x, x0, dir, hyper, order, rational, full)
if result is None:
return f
return FormalPowerSeries(f, x, x0, dir, result)
| wxgeo/geophar | wxgeometrie/sympy/series/formal.py | Python | gpl-2.0 | 34,626 |
class Node:
def __init__(self, car=None, prevNode=None, nextNode=None):
self.prevNode = prevNode
self.nextNode = nextNode
self.data = car
class LinkedList:
def __init__(self):
self.head = None
class Car:
def __init__(self, identification, name, brand, price, active):
self.identification = int(identification)
self.name = str(name)
self.brand = str(brand)
self.price = int(price)
self.active = bool(active)
db = LinkedList()
def clean():
db.head = None
def init(cars):
clean()
for car in cars:
add(car)
def add(car):
if getDatabaseHead() is None:
db.head = Node(car)
else:
prevItem = None
item = db.head
while True:
if item is None:
item = Node(car, prevItem)
prevItem.nextNode = item
break
elif item.data.price > car.price:
newItem = Node(car, prevItem, item)
item.prevNode = newItem
if prevItem is not None:
prevItem.nextNode = newItem
else:
db.head = newItem
break
else:
prevItem = item
item = item.nextNode
def updateName(identification, name):
item = db.head
while True:
if item is None:
return None
elif item.data.identification == identification:
item.data.name = name
return
else:
item = item.nextNode
def updateBrand(identification, brand):
item = db.head
while True:
if item is None:
return None
elif item.data.identification == identification:
item.data.brand = brand
return
else:
item = item.nextNode
def activateCar(identification):
item = db.head
while True:
if item is None:
return None
elif item.data.identification == identification:
item.data.active = True
return
else:
item = item.nextNode
def deactivateCar(identification):
item = db.head
while True:
if item is None:
return None
elif item.data.identification == identification:
item.data.active = False
return
else:
item = item.nextNode
def getDatabaseHead():
return db.head
def getDatabase():
return db
def calculateCarPrice():
sumPrice = 0
item = db.head
while item is not None:
if item.data.active:
sumPrice += item.data.price
item = item.nextNode
return sumPrice
def printDatabase():
element = db.head
if element is None:
return
i = 0
while True:
i += 1
print('\nPrinting ' + str(i) + '. car:')
if element.data is not None:
print('\tidentification = ' + str(element.data.identification))
print('\tname = ' + str(element.data.name))
print('\tbrand = ' + str(element.data.brand))
print('\tprice = ' + str(element.data.price))
print('\tactive = ' + str(element.data.active))
else:
print('\t' + str(i) + '. car is None')
if element.nextNode is None:
break
else:
element = element.nextNode
# def initDatabase():
# audi = Car(1, 'R8', 'Audi', 200, True)
# volkswagen = Car(3, 'Passat', 'Volkswagen', 300, True)
# ford = Car(2, 'Mustang', 'Ford', 250, True)
# init([audi, volkswagen, ford])
# first = Car(1, 'Octavia', 'Skoda', 123000, True)
# second = Car(23, 'Felicia', 'Skoda', 5000, True)
# third = Car(11, 'Superb', 'Skoda', 54000, True)
# init([first, second, third])
#
#
# initDatabase()
# printDatabase()
# print('\nsum = ' + str(calculateCarPrice()))
| Pyronia/cvut.zal | 07/showroom.py | Python | apache-2.0 | 3,922 |
#!/usr/bin/env python
# Licensed to Cloudera, Inc. under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. Cloudera, Inc. licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import time
try:
import json
except ImportError:
import simplejson as json
from nose.tools import assert_true, assert_false, assert_equal, assert_raises
from django.contrib.auth.models import User
from django.core.urlresolvers import reverse
from desktop.lib.django_test_util import make_logged_in_client
from liboozie.oozie_api_test import OozieServerProvider
from oozie.models import Workflow, Node, Action, Start, Kill, End, Link
LOG = logging.getLogger(__name__)
class TestJobsubWithHadoop(OozieServerProvider):
def setUp(self):
OozieServerProvider.setup_class()
self.cluster.fs.do_as_user('test', self.cluster.fs.create_home_dir, '/user/jobsub_test')
self.cluster.fs.do_as_superuser(self.cluster.fs.chmod, '/user/jobsub_test', 0777, True)
self.client = make_logged_in_client(username='jobsub_test')
# Ensure access to MR folder.
# Need to chmod because jobs are submitted as a
# different user than what was previously used.
for i in range(0,10):
try:
self.cluster.fs.do_as_superuser(self.cluster.fs.chmod, '/tmp', 0777, recursive=True)
break
except Exception, e:
# chmod failure likely do to async processing of resource deletion.
# If the directory has improper permissions, should fail later in the test case.
LOG.warn("Received the following exception while change mode attempt %d of /tmp: %s" % (i, str(e)))
time.sleep(1)
self.design = self.create_design()
def tearDown(self):
Workflow.objects.all().delete()
def create_design(self):
response = self.client.post(reverse('jobsub.views.new_design',
kwargs={'node_type': 'mapreduce'}),
data={'name': 'sleep_job',
'description': '',
'node_type': 'mapreduce',
'jar_path': '/user/hue/oozie/workspaces/lib/hadoop-examples.jar',
'prepares': '[]',
'files': '[]',
'archives': '[]',
'job_properties': '[{\"name\":\"mapred.reduce.tasks\",\"value\":\"1\"},{\"name\":\"mapred.mapper.class\",\"value\":\"org.apache.hadoop.examples.SleepJob\"},{\"name\":\"mapred.reducer.class\",\"value\":\"org.apache.hadoop.examples.SleepJob\"},{\"name\":\"mapred.mapoutput.key.class\",\"value\":\"org.apache.hadoop.io.IntWritable\"},{\"name\":\"mapred.mapoutput.value.class\",\"value\":\"org.apache.hadoop.io.NullWritable\"},{\"name\":\"mapred.output.format.class\",\"value\":\"org.apache.hadoop.mapred.lib.NullOutputFormat\"},{\"name\":\"mapred.input.format.class\",\"value\":\"org.apache.hadoop.examples.SleepJob$SleepInputFormat\"},{\"name\":\"mapred.partitioner.class\",\"value\":\"org.apache.hadoop.examples.SleepJob\"},{\"name\":\"mapred.speculative.execution\",\"value\":\"false\"},{\"name\":\"sleep.job.map.sleep.time\",\"value\":\"0\"},{\"name\":\"sleep.job.reduce.sleep.time\",\"value\":\"${REDUCER_SLEEP_TIME}\"}]'},
HTTP_X_REQUESTED_WITH='XMLHttpRequest')
assert_equal(response.status_code, 200)
return Workflow.objects.all()[0]
def test_new_design(self):
# Ensure the following:
# - creator is owner.
# - workflow name and description are the same as action name and description.
# - workflow has one action.
assert_false(self.design.managed)
assert_equal(4, Action.objects.filter(workflow=self.design).count())
assert_equal(1, Kill.objects.filter(workflow=self.design).count())
assert_equal(1, Start.objects.filter(workflow=self.design).count())
assert_equal(1, End.objects.filter(workflow=self.design).count())
assert_equal(4, Node.objects.filter(workflow=self.design).count())
assert_equal(3, Link.objects.filter(parent__workflow=self.design).count())
def test_save_design(self):
response = self.client.post(reverse('jobsub.views.save_design',
kwargs={'design_id': self.design.id}),
data={'name': 'mapreduce1',
'description': '',
'node_type': 'mapreduce',
'jar_path': '/user/hue/oozie/workspaces/lib/hadoop-examples.jar',
'prepares': '[]',
'files': '[{"name": "test", "dummy": ""}]',
'archives': '[]',
'job_properties': '[{\"name\":\"mapred.reduce.tasks\",\"value\":\"1\"},{\"name\":\"mapred.mapper.class\",\"value\":\"org.apache.hadoop.examples.SleepJob\"},{\"name\":\"mapred.reducer.class\",\"value\":\"org.apache.hadoop.examples.SleepJob\"},{\"name\":\"mapred.mapoutput.key.class\",\"value\":\"org.apache.hadoop.io.IntWritable\"},{\"name\":\"mapred.mapoutput.value.class\",\"value\":\"org.apache.hadoop.io.NullWritable\"},{\"name\":\"mapred.output.format.class\",\"value\":\"org.apache.hadoop.mapred.lib.NullOutputFormat\"},{\"name\":\"mapred.input.format.class\",\"value\":\"org.apache.hadoop.examples.SleepJob$SleepInputFormat\"},{\"name\":\"mapred.partitioner.class\",\"value\":\"org.apache.hadoop.examples.SleepJob\"},{\"name\":\"mapred.speculative.execution\",\"value\":\"false\"},{\"name\":\"sleep.job.map.sleep.time\",\"value\":\"0\"},{\"name\":\"sleep.job.reduce.sleep.time\",\"value\":\"${REDUCER_SLEEP_TIME}\"}]'},
HTTP_X_REQUESTED_WITH='XMLHttpRequest')
assert_equal(response.status_code, 200)
self.design = Workflow.objects.get(id=self.design.id)
assert_equal(self.design.start.get_child('to').get_full_node().files, '[{"name": "test", "dummy": ""}]')
def test_get_design(self):
response = self.client.get(reverse('jobsub.views.get_design',
kwargs={'design_id': self.design.id}),
HTTP_X_REQUESTED_WITH='XMLHttpRequest')
assert_equal(response.status_code, 200)
def test_delete_design(self):
# Trash
assert_equal(1, Workflow.objects.available().count())
response = self.client.post(reverse('jobsub.views.delete_design',
kwargs={'design_id': self.design.id}),
follow=True,
HTTP_X_REQUESTED_WITH='XMLHttpRequest')
assert_equal(response.status_code, 200)
assert_equal(0, Workflow.objects.available().count())
assert_equal(1, Workflow.objects.trashed().count())
# Destroy
response = self.client.post(reverse('jobsub.views.delete_design',
kwargs={'design_id': self.design.id}) + '?skip_trash',
follow=True,
HTTP_X_REQUESTED_WITH='XMLHttpRequest')
assert_equal(response.status_code, 200)
assert_equal(0, Workflow.objects.available().count())
assert_equal(0, Workflow.objects.trashed().count())
def test_clone_design(self):
assert_equal(1, Workflow.objects.available().count())
response = self.client.post(reverse('jobsub.views.clone_design',
kwargs={'design_id': self.design.id}),
follow=True,
HTTP_X_REQUESTED_WITH='XMLHttpRequest')
assert_equal(response.status_code, 200)
assert_equal(2, Workflow.objects.available().count())
def test_restore_design(self):
assert_equal(1, Workflow.objects.available().count())
response = self.client.post(reverse('jobsub.views.delete_design',
kwargs={'design_id': self.design.id}),
follow=True,
HTTP_X_REQUESTED_WITH='XMLHttpRequest')
assert_equal(response.status_code, 200)
assert_equal(0, Workflow.objects.available().count())
assert_equal(1, Workflow.objects.trashed().count())
response = self.client.post(reverse('jobsub.views.restore_design',
kwargs={'design_id': self.design.id}),
follow=True,
HTTP_X_REQUESTED_WITH='XMLHttpRequest')
assert_equal(response.status_code, 200)
assert_equal(1, Workflow.objects.available().count())
assert_equal(0, Workflow.objects.trashed().count()) | 2013Commons/HUE-SHARK | apps/jobsub/src/jobsub/tests.py | Python | apache-2.0 | 8,310 |
class Solution(object):
def findMin(self, nums):
"""
:type nums: List[int]
:rtype: int
"""
idx = 0
while True:
if nums[idx-1] < nums[idx]:
idx = idx-1
else:
return nums[idx]
| dborzov/practicin | 52-minimum-in-rotated-array/sol.py | Python | mit | 282 |
#!/cfme_pristine_venv/bin/python2
try:
from cfme.utils import conf
except ImportError:
from utils import conf
import subprocess
import sys
key_list = [key[-9:].replace(' ', '') for key in conf['gpg']['allowed_keys']]
proc = subprocess.Popen(['gpg', '--recv-keys'] + key_list)
proc.wait()
sys.exit(proc.returncode)
| jteehan/cfme_tests | scripts/dockerbot/pytestbase/get_keys.py | Python | gpl-2.0 | 324 |
# -*- coding: utf-8 -*-
import os
import codecs
import yaml
from jinja2 import Template
from flask import current_app
_forms = {}
def get_mapping(blueprint, endpoint):
global _forms
if not _forms:
forms_yaml_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'forms.yml')
with codecs.open(forms_yaml_path, 'rb', 'utf-8') as fp:
t = Template(fp.read())
yml = t.render({'settings': current_app.config})
_forms = yaml.load(yml)
return _forms.get(blueprint, {}).get(endpoint, {})
| 360skyeye/kael | kael/web_admin/forms.py | Python | apache-2.0 | 556 |
from django.db import reset_queries
from celery import Task, current_task
import dogstats_wrapper as dog_stats_api
import json
import logging
from util.db import outer_atomic
from time import time
from lms.djangoapps.instructor_task.models import InstructorTask, PROGRESS
TASK_LOG = logging.getLogger('edx.celery.task')
class TaskProgress(object):
"""
Encapsulates the current task's progress by keeping track of
'attempted', 'succeeded', 'skipped', 'failed', 'total',
'action_name', and 'duration_ms' values.
"""
def __init__(self, action_name, total, start_time):
self.action_name = action_name
self.total = total
self.start_time = start_time
self.attempted = 0
self.succeeded = 0
self.skipped = 0
self.failed = 0
def update_task_state(self, extra_meta=None):
"""
Update the current celery task's state to the progress state
specified by the current object. Returns the progress
dictionary for use by `run_main_task` and
`BaseInstructorTask.on_success`.
Arguments:
extra_meta (dict): Extra metadata to pass to `update_state`
Returns:
dict: The current task's progress dict
"""
progress_dict = {
'action_name': self.action_name,
'attempted': self.attempted,
'succeeded': self.succeeded,
'skipped': self.skipped,
'failed': self.failed,
'total': self.total,
'duration_ms': int((time() - self.start_time) * 1000),
}
if extra_meta is not None:
progress_dict.update(extra_meta)
_get_current_task().update_state(state=PROGRESS, meta=progress_dict)
return progress_dict
def run_main_task(entry_id, task_fcn, action_name):
"""
Applies the `task_fcn` to the arguments defined in `entry_id` InstructorTask.
Arguments passed to `task_fcn` are:
`entry_id` : the primary key for the InstructorTask entry representing the task.
`course_id` : the id for the course.
`task_input` : dict containing task-specific arguments, JSON-decoded from InstructorTask's task_input.
`action_name` : past-tense verb to use for constructing status messages.
If no exceptions are raised, the `task_fcn` should return a dict containing
the task's result with the following keys:
'attempted': number of attempts made
'succeeded': number of attempts that "succeeded"
'skipped': number of attempts that "skipped"
'failed': number of attempts that "failed"
'total': number of possible subtasks to attempt
'action_name': user-visible verb to use in status messages.
Should be past-tense. Pass-through of input `action_name`.
'duration_ms': how long the task has (or had) been running.
"""
# Get the InstructorTask to be updated. If this fails then let the exception return to Celery.
# There's no point in catching it here.
with outer_atomic():
entry = InstructorTask.objects.get(pk=entry_id)
entry.task_state = PROGRESS
entry.save_now()
# Get inputs to use in this task from the entry
task_id = entry.task_id
course_id = entry.course_id
task_input = json.loads(entry.task_input)
# Construct log message
fmt = u'Task: {task_id}, InstructorTask ID: {entry_id}, Course: {course_id}, Input: {task_input}'
task_info_string = fmt.format(task_id=task_id, entry_id=entry_id, course_id=course_id, task_input=task_input)
TASK_LOG.info(u'%s, Starting update (nothing %s yet)', task_info_string, action_name)
# Check that the task_id submitted in the InstructorTask matches the current task
# that is running.
request_task_id = _get_current_task().request.id
if task_id != request_task_id:
fmt = u'{task_info}, Requested task did not match actual task "{actual_id}"'
message = fmt.format(task_info=task_info_string, actual_id=request_task_id)
TASK_LOG.error(message)
raise ValueError(message)
# Now do the work
with dog_stats_api.timer('instructor_tasks.time.overall', tags=[u'action:{name}'.format(name=action_name)]):
task_progress = task_fcn(entry_id, course_id, task_input, action_name)
# Release any queries that the connection has been hanging onto
reset_queries()
# Log and exit, returning task_progress info as task result
TASK_LOG.info(u'%s, Task type: %s, Finishing task: %s', task_info_string, action_name, task_progress)
return task_progress
def _get_current_task():
"""
Stub to make it easier to test without actually running Celery.
This is a wrapper around celery.current_task, which provides access
to the top of the stack of Celery's tasks. When running tests, however,
it doesn't seem to work to mock current_task directly, so this wrapper
is used to provide a hook to mock in tests, while providing the real
`current_task` in production.
"""
return current_task
| fintech-circle/edx-platform | lms/djangoapps/instructor_task/tasks_helper/runner.py | Python | agpl-3.0 | 5,085 |
# -*- coding: utf-8 -*-
# The MIT License (MIT)
#
# Copyright (c) 2021, Roland Rickborn ([email protected])
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
# Revision history:
# 2021-03-25 Created
#
# Requirements:
# This script requires the CardDAV CLI client pyCardDAV to be installed!
# Get it from here: https://github.com/geier/pycarddav
#
# This script generates Contacts.xml valid for MicroSIP VoIP client v3.10
# ---------------------------------------------------------------------------
import requests
import os
import sys
import configparser
from requests.auth import HTTPBasicAuth
microSipDataPath = os.environ['APPDATA']+"\MicroSIP"
bridgeDataPath = os.environ['LOCALAPPDATA']+"\CardDAV2MicroSIP"
config = configparser.ConfigParser()
config.read(os.path.join(bridgeDataPath, 'bridge.conf'))
def get():
accounts = {}
servers = config.sections()
counter = 1
for server in servers:
accounts[counter] = {}
_urls = []
for key in config[server]:
if key == 'user':
accounts[counter]['user'] = config[server]['user']
elif key == 'pass':
accounts[counter]['pass'] = config[server]['pass']
elif key.startswith('url'):
_urls.append(config[server][key])
accounts[counter]['url'] = _urls
counter = counter + 1
contents = ''
for account in accounts:
for abook in accounts[account]['url']:
response = requests.get(abook+'/?export', auth = HTTPBasicAuth(accounts[account]['user'], accounts[account]['pass']))
contents = contents + str(response.content.decode('utf-8'))
return contents
def create_cards_list(file_content):
return str(file_content).replace('\\r\\n','\n').split("END:VCARD")
def read_card(item):
info = {}
lines = item.replace('\n ','').split('\n')
tup_lin = [tuple(li.split(":")) for li in lines]
for d in tup_lin:
if str(d[0]) == 'FN':
info["Fullname"] = str(d[1]).strip()
elif str(d[0]) == 'ORG':
_org = str(d[1]).replace(';','').strip()
if _org != '':
info["Organisation"] = _org
elif str(d[0]).upper().startswith('TEL'):
teltypes = d[0].upper()
if teltypes.find('WORK', 0, len(teltypes)):
info["WorkTel"] = str(d[1]).replace('+49','0').replace(' ' ,'').replace('-' ,'').replace('(' ,'').replace(')' ,'').strip()
elif teltypes.find('CELL', 0, len(teltypes)):
info["MobileTel"] = str(d[1]).replace('+49','0').replace(' ' ,'').replace('-' ,'').replace('(' ,'').replace(')' ,'').strip()
elif teltypes.find('HOME', 0, len(teltypes)):
info["HomeTel"] = str(d[1]).replace('+49','0').replace(' ' ,'').replace('-' ,'').replace('(' ,'').replace(')' ,'').strip()
elif str(d[0]).upper().startswith('ITEMTEL'):
teltypes = str(d[0]).upper().split('.')
if 'TEL' in teltypes:
info["Tel"] = str(d[1]).replace('+49','0').replace(' ' ,'').replace('-' ,'').replace('(' ,'').replace(')' ,'').strip()
return info
def export_to_xml(items):
f = open(os.path.join(microSipDataPath, 'Contacts.xml'), 'w', encoding='utf8')
f.write(u'\ufeff')
f.write('<?xml version="1.0"?>\n')
f.write('<contacts>\n')
for item in items:
if "Organisation" in item:
if not item["Organisation"] == item["Fullname"]:
_org = item["Organisation"] + ', '
else:
_org = ''
else:
_org = ''
if "WorkTel" in item:
f.write('<contact number="{}" name="{} ({}Work)" presence="0" directory="0" ></contact>\n'.format(item["WorkTel"], item["Fullname"], _org))
if "MobileTel" in item:
f.write('<contact number="{}" name="{} ({}Mobile)" presence="0" directory="0" ></contact>\n'.format(item["MobileTel"], item["Fullname"], _org))
if "HomeTel" in item:
f.write('<contact number="{}" name="{} ({}Home)" presence="0" directory="0" ></contact>\n'.format(item["HomeTel"], item["Fullname"], _org))
if "Tel" in item:
f.write('<contact number="{}" name="{} ({}Voice)" presence="0" directory="0" ></contact>\n'.format(item["Tel"], item["Fullname"], _org))
f.write('</contacts>\n')
f.close()
def convert(fileName):
card_list = create_cards_list(fileName)
d_list = [read_card(item) for item in card_list]
export_to_xml(d_list)
def main_is_frozen():
return (hasattr(sys, "frozen") or hasattr(sys, "importers"))
def get_main_dir():
if main_is_frozen():
return os.path.dirname(sys.executable)
return os.path.dirname(sys.argv[0])
vcf = get()
convert(vcf) | gitRigge/CardDAV2MicroSIP | bridge.py | Python | mit | 5,902 |
# -*- coding: utf-8 -*-
"""
***************************************************************************
fixedaspectsvgwidget.py
---------------------
Date : August 2016
Copyright : (C) 2016 Boundless, http://boundlessgeo.com
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
__author__ = 'Alexander Bruy'
__date__ = 'August 2016'
__copyright__ = '(C) 2016 Boundless, http://boundlessgeo.com'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
from qgis.PyQt.QtCore import QRect
from qgis.PyQt.QtGui import QPainter
from qgis.PyQt.QtSvg import QSvgWidget
class FixedAspectSvgWidget(QSvgWidget):
def paintEvent(self, event):
painter = QPainter(self)
painter.setViewport(self.centeredViewport(self.size()))
self.renderer().render(painter)
def centeredViewport(self, size):
width = size.width()
height = size.height()
aspectRatio = float(self.renderer().defaultSize().width()) / float(self.renderer().defaultSize().height())
heightFromWidth = int(width / aspectRatio)
widthFromHeight = int(height * aspectRatio)
if heightFromWidth <= height:
return QRect(0, (height - heightFromWidth) / 2, width, heightFromWidth)
else:
return QRect((width - widthFromHeight) / 2, 0, widthFromHeight, height)
| boundlessgeo/qgis-connect-plugin | boundlessconnect/gui/fixedaspectsvgwidget.py | Python | gpl-2.0 | 1,963 |
# -*- coding: utf-8 -*-
#
# Project Unit Tests
#
# To run this script use:
# python web2py.py -S eden -M -R applications/eden/modules/unit_tests/s3db/project.py
#
import unittest
from gluon import *
from gluon.storage import Storage
from s3dal import Row
from eden.project import S3ProjectActivityModel
from unit_tests import run_suite
# =============================================================================
class ProjectTests(unittest.TestCase):
""" Project Module Tests """
def setUp(self):
""" Set up organisation records """
# auth = current.auth
s3db = current.s3db
auth.override = True
ptable = s3db.project_project
atable = s3db.project_activity
p1 = Row(name="Test Project 1", code="TP1")
p1_id = ptable.insert(**p1)
p1.update(id=p1_id)
a1 = Row(name="Test Activity 1", project_id=p1_id)
a1_id = atable.insert(**a1)
a1.update(id=a1_id)
# activity without a project
a2 = Row(name="Test Activity 2")
a2_id = atable.insert(**a2)
a2.update(id=a2_id)
self.p1 = p1
self.a1 = a1
self.a2 = a2
def testActivityRepresent(self):
rep = S3ProjectActivityModel.project_activity_represent
self.assertEqual(
rep(self.a1.id),
"%s - %s" % (self.p1.code, self.a1.name),
)
self.assertEqual(
rep(self.a2.id),
"%s" % self.a2.name,
)
self.assertEqual(
rep(None),
current.messages.NONE,
)
self.assertEqual(
rep(self.a1),
"%s - %s" % (self.p1.code, self.a1.name),
)
self.assertEqual(
rep(self.a1.id, self.a1),
"%s - %s" % (self.p1.code, self.a1.name),
)
def tearDown(self):
db.rollback()
auth.override = False
# =============================================================================
if __name__ == "__main__":
run_suite(
ProjectTests,
)
# END ========================================================================
| flavour/eden | modules/unit_tests/s3db/project.py | Python | mit | 2,145 |
# Copyright (c) 2010 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import fnmatch
import os
import os.path
import BaseHTTPServer
from webkitpy.common.host import Host # FIXME: This should not be needed!
from webkitpy.layout_tests.port.base import Port
from webkitpy.tool.servers.reflectionhandler import ReflectionHandler
STATE_NEEDS_REBASELINE = 'needs_rebaseline'
STATE_REBASELINE_FAILED = 'rebaseline_failed'
STATE_REBASELINE_SUCCEEDED = 'rebaseline_succeeded'
def _get_actual_result_files(test_file, test_config):
test_name, _ = os.path.splitext(test_file)
test_directory = os.path.dirname(test_file)
test_results_directory = test_config.filesystem.join(
test_config.results_directory, test_directory)
actual_pattern = os.path.basename(test_name) + '-actual.*'
actual_files = []
for filename in test_config.filesystem.listdir(test_results_directory):
if fnmatch.fnmatch(filename, actual_pattern):
actual_files.append(filename)
actual_files.sort()
return tuple(actual_files)
def _rebaseline_test(test_file, baseline_target, baseline_move_to, test_config, log):
test_name, _ = os.path.splitext(test_file)
test_directory = os.path.dirname(test_name)
log('Rebaselining %s...' % test_name)
actual_result_files = _get_actual_result_files(test_file, test_config)
filesystem = test_config.filesystem
scm = test_config.scm
layout_tests_directory = test_config.layout_tests_directory
results_directory = test_config.results_directory
target_expectations_directory = filesystem.join(
layout_tests_directory, 'platform', baseline_target, test_directory)
test_results_directory = test_config.filesystem.join(
test_config.results_directory, test_directory)
# If requested, move current baselines out
current_baselines = get_test_baselines(test_file, test_config)
if baseline_target in current_baselines and baseline_move_to != 'none':
log(' Moving current %s baselines to %s' %
(baseline_target, baseline_move_to))
# See which ones we need to move (only those that are about to be
# updated), and make sure we're not clobbering any files in the
# destination.
current_extensions = set(current_baselines[baseline_target].keys())
actual_result_extensions = [
os.path.splitext(f)[1] for f in actual_result_files]
extensions_to_move = current_extensions.intersection(
actual_result_extensions)
if extensions_to_move.intersection(
current_baselines.get(baseline_move_to, {}).keys()):
log(' Already had baselines in %s, could not move existing '
'%s ones' % (baseline_move_to, baseline_target))
return False
# Do the actual move.
if extensions_to_move:
if not _move_test_baselines(
test_file,
list(extensions_to_move),
baseline_target,
baseline_move_to,
test_config,
log):
return False
else:
log(' No current baselines to move')
log(' Updating baselines for %s' % baseline_target)
filesystem.maybe_make_directory(target_expectations_directory)
for source_file in actual_result_files:
source_path = filesystem.join(test_results_directory, source_file)
destination_file = source_file.replace('-actual', '-expected')
destination_path = filesystem.join(
target_expectations_directory, destination_file)
filesystem.copyfile(source_path, destination_path)
exit_code = scm.add(destination_path, return_exit_code=True)
if exit_code:
log(' Could not update %s in SCM, exit code %d' %
(destination_file, exit_code))
return False
else:
log(' Updated %s' % destination_file)
return True
def _move_test_baselines(test_file, extensions_to_move, source_platform, destination_platform, test_config, log):
test_file_name = os.path.splitext(os.path.basename(test_file))[0]
test_directory = os.path.dirname(test_file)
filesystem = test_config.filesystem
# Want predictable output order for unit tests.
extensions_to_move.sort()
source_directory = os.path.join(
test_config.layout_tests_directory,
'platform',
source_platform,
test_directory)
destination_directory = os.path.join(
test_config.layout_tests_directory,
'platform',
destination_platform,
test_directory)
filesystem.maybe_make_directory(destination_directory)
for extension in extensions_to_move:
file_name = test_file_name + '-expected' + extension
source_path = filesystem.join(source_directory, file_name)
destination_path = filesystem.join(destination_directory, file_name)
filesystem.copyfile(source_path, destination_path)
exit_code = test_config.scm.add(destination_path, return_exit_code=True)
if exit_code:
log(' Could not update %s in SCM, exit code %d' %
(file_name, exit_code))
return False
else:
log(' Moved %s' % file_name)
return True
def get_test_baselines(test_file, test_config):
# FIXME: This seems like a hack. This only seems used to access the Port.expected_baselines logic.
class AllPlatformsPort(Port):
def __init__(self, host):
super(AllPlatformsPort, self).__init__(host, 'mac')
self._platforms_by_directory = dict([(self._webkit_baseline_path(p), p) for p in test_config.platforms])
def baseline_search_path(self):
return self._platforms_by_directory.keys()
def platform_from_directory(self, directory):
return self._platforms_by_directory[directory]
test_path = test_config.filesystem.join(test_config.layout_tests_directory, test_file)
# FIXME: This should get the Host from the test_config to be mockable!
host = Host()
host.initialize_scm()
host.filesystem = test_config.filesystem
all_platforms_port = AllPlatformsPort(host)
all_test_baselines = {}
for baseline_extension in ('.txt', '.checksum', '.png'):
test_baselines = test_config.test_port.expected_baselines(test_file, baseline_extension)
baselines = all_platforms_port.expected_baselines(test_file, baseline_extension, all_baselines=True)
for platform_directory, expected_filename in baselines:
if not platform_directory:
continue
if platform_directory == test_config.layout_tests_directory:
platform = 'base'
else:
platform = all_platforms_port.platform_from_directory(platform_directory)
platform_baselines = all_test_baselines.setdefault(platform, {})
was_used_for_test = (platform_directory, expected_filename) in test_baselines
platform_baselines[baseline_extension] = was_used_for_test
return all_test_baselines
class RebaselineHTTPServer(BaseHTTPServer.HTTPServer):
def __init__(self, httpd_port, config):
server_name = ""
BaseHTTPServer.HTTPServer.__init__(self, (server_name, httpd_port), RebaselineHTTPRequestHandler)
self.test_config = config['test_config']
self.results_json = config['results_json']
self.platforms_json = config['platforms_json']
class RebaselineHTTPRequestHandler(ReflectionHandler):
STATIC_FILE_NAMES = frozenset([
"index.html",
"loupe.js",
"main.js",
"main.css",
"queue.js",
"util.js",
])
STATIC_FILE_DIRECTORY = os.path.join(os.path.dirname(__file__), "data", "rebaselineserver")
def results_json(self):
self._serve_json(self.server.results_json)
def test_config(self):
self._serve_json(self.server.test_config)
def platforms_json(self):
self._serve_json(self.server.platforms_json)
def rebaseline(self):
test = self.query['test'][0]
baseline_target = self.query['baseline-target'][0]
baseline_move_to = self.query['baseline-move-to'][0]
test_json = self.server.results_json['tests'][test]
if test_json['state'] != STATE_NEEDS_REBASELINE:
self.send_error(400, "Test %s is in unexpected state: %s" % (test, test_json["state"]))
return
log = []
success = _rebaseline_test(
test,
baseline_target,
baseline_move_to,
self.server.test_config,
log=lambda l: log.append(l))
if success:
test_json['state'] = STATE_REBASELINE_SUCCEEDED
self.send_response(200)
else:
test_json['state'] = STATE_REBASELINE_FAILED
self.send_response(500)
self.send_header('Content-type', 'text/plain')
self.end_headers()
self.wfile.write('\n'.join(log))
def test_result(self):
test_name, _ = os.path.splitext(self.query['test'][0])
mode = self.query['mode'][0]
if mode == 'expected-image':
file_name = test_name + '-expected.png'
elif mode == 'actual-image':
file_name = test_name + '-actual.png'
if mode == 'expected-checksum':
file_name = test_name + '-expected.checksum'
elif mode == 'actual-checksum':
file_name = test_name + '-actual.checksum'
elif mode == 'diff-image':
file_name = test_name + '-diff.png'
if mode == 'expected-text':
file_name = test_name + '-expected.txt'
elif mode == 'actual-text':
file_name = test_name + '-actual.txt'
elif mode == 'diff-text':
file_name = test_name + '-diff.txt'
elif mode == 'diff-text-pretty':
file_name = test_name + '-pretty-diff.html'
file_path = os.path.join(self.server.test_config.results_directory, file_name)
# Let results be cached for 60 seconds, so that they can be pre-fetched
# by the UI
self._serve_file(file_path, cacheable_seconds=60)
| leighpauls/k2cro4 | third_party/WebKit/Tools/Scripts/webkitpy/tool/servers/rebaselineserver.py | Python | bsd-3-clause | 11,698 |
# This file is part of Rubber and thus covered by the GPL
# (c) Emmanuel Beffara, 2002--2006
# Modified by Olivier Verdier <[email protected]>
import re
import string
import codecs
# The function `_' is defined here to prepare for internationalization.
def _ (txt): return txt
re_loghead = re.compile("This is [0-9a-zA-Z-]*(TeX|Omega)")
re_rerun = re.compile("LaTeX Warning:.*Rerun")
re_file = re.compile("(\\((?P<file>[^ \n\t(){}]*)|\\))")
re_badbox = re.compile(r"(Ov|Und)erfull \\[hv]box ")
re_line = re.compile(r"(l\.(?P<line>[0-9]+)( (?P<code>.*))?$|<\*>)")
re_cseq = re.compile(r".*(?P<seq>\\[^ ]*) ?$")
re_page = re.compile("\[(?P<num>[0-9]+)\]")
re_atline = re.compile(
"( detected| in paragraph)? at lines? (?P<line>[0-9]*)(--(?P<last>[0-9]*))?")
re_reference = re.compile("LaTeX Warning: Reference `(?P<ref>.*)' \
on page (?P<page>[0-9]*) undefined on input line (?P<line>[0-9]*)\\.$")
re_citation = re.compile("^.*Citation `(?P<cite>.*)' on page (?P<page>[0-9]*) undefined on input line (?P<line>[0-9]*)\\.$")
re_label = re.compile("LaTeX Warning: (?P<text>Label .*)$")
re_warning = re.compile(
"(LaTeX|Package)( (?P<pkg>.*))? Warning: (?P<text>.*)$")
re_online = re.compile("(; reported)? on input line (?P<line>[0-9]*)")
re_ignored = re.compile("; all text was ignored after line (?P<line>[0-9]*).$")
re_missing_character = re.compile('^Missing character: There is no (?P<missing>\S)', flags=re.UNICODE)
class LogCheck (object):
"""
This class performs all the extraction of information from the log file.
For efficiency, the instances contain the whole file as a list of strings
so that it can be read several times with no disk access.
"""
#-- Initialization {{{2
def __init__ (self):
self.lines = None
def read (self, name):
"""
Read the specified log file, checking that it was produced by the
right compiler. Returns true if the log file is invalid or does not
exist.
"""
self.lines = None
with codecs.open(name, encoding='utf-8', errors='replace') as log_file:
self.lines = log_file.readlines()
if not self.lines:
raise ValueError("Empty file")
line = self.lines[0]
if not re_loghead.match(line):
raise ValueError("This doesn't seem to be a tex log file")
#-- Process information {{{2
def errors (self):
"""
Returns true if there was an error during the compilation.
"""
skipping = 0
for line in self.lines:
if line.strip() == "":
skipping = 0
continue
if skipping:
continue
m = re_badbox.match(line)
if m:
skipping = 1
continue
if line[0] == "!":
# We check for the substring "pdfTeX warning" because pdfTeX
# sometimes issues warnings (like undefined references) in the
# form of errors...
if string.find(line, "pdfTeX warning") == -1:
return True
return False
def run_needed (self):
"""
Returns true if LaTeX indicated that another compilation is needed.
"""
for line in self.lines:
if re_rerun.match(line):
return True
return False
#-- Information extraction {{{2
def continued (self, line):
"""
Check if a line in the log is continued on the next line. This is
needed because TeX breaks messages at 79 characters per line. We make
this into a method because the test is slightly different in Metapost.
"""
return len(line) == 79 and line[-3:] != '...'
def parse (self, errors=False, boxes=False, refs=False, warnings=False):
"""
Parse the log file for relevant information. The named arguments are
booleans that indicate which information should be extracted:
- errors: all errors
- boxes: bad boxes
- refs: warnings about references
- warnings: all other warnings
The function returns a generator. Each generated item is a dictionary
that contains (some of) the following entries:
- kind: the kind of information ("error", "box", "ref", "warning")
- text: the text of the error or warning
- code: the piece of code that caused an error
- file, line, last, pkg: as used by Message.format_pos.
"""
if not self.lines:
return
last_file = None
pos = [last_file]
page = 1
parsing = False # True if we are parsing an error's text
skipping = False # True if we are skipping text until an empty line
something = False # True if some error was found
prefix = None # the prefix for warning messages from packages
accu = "" # accumulated text from the previous line
for line in self.lines:
line = line[:-1] # remove the line feed
# TeX breaks messages at 79 characters, just to make parsing
# trickier...
if self.continued(line):
accu += line
continue
line = accu + line
accu = ""
# Text that should be skipped (from bad box messages)
if prefix is None and line == "":
skipping = False
continue
if skipping:
continue
# Errors (including aborted compilation)
if parsing:
if error == "Undefined control sequence.":
# This is a special case in order to report which control
# sequence is undefined.
m = re_cseq.match(line)
if m:
error = "Undefined control sequence %s." % m.group("seq")
m = re_line.match(line)
if m:
parsing = False
skipping = True
pdfTeX = string.find(line, "pdfTeX warning") != -1
if (pdfTeX and warnings) or (errors and not pdfTeX):
if pdfTeX:
d = {
"kind": "warning",
"pkg": "pdfTeX",
"text": error[error.find(":")+2:]
}
else:
d = {
"kind": "error",
"text": error
}
d.update( m.groupdict() )
m = re_ignored.search(error)
if m:
d["file"] = last_file
if "code" in d:
del d["code"]
d.update( m.groupdict() )
elif pos[-1] is None:
d["file"] = last_file
else:
d["file"] = pos[-1]
yield d
elif line[0] == "!":
error = line[2:]
elif line[0:3] == "***":
parsing = False
skipping = True
if errors:
yield {
"kind": "abort",
"text": error,
"why" : line[4:],
"file": last_file
}
elif line[0:15] == "Type X to quit ":
parsing = False
skipping = False
if errors:
yield {
"kind": "error",
"text": error,
"file": pos[-1]
}
continue
if len(line) > 0 and line[0] == "!":
error = line[2:]
parsing = True
continue
if line == "Runaway argument?":
error = line
parsing = True
continue
# Long warnings
if prefix is not None:
if line[:len(prefix)] == prefix:
text.append(string.strip(line[len(prefix):]))
else:
text = " ".join(text)
m = re_online.search(text)
if m:
info["line"] = m.group("line")
text = text[:m.start()] + text[m.end():]
if warnings:
info["text"] = text
d = { "kind": "warning" }
d.update( info )
yield d
prefix = None
continue
# Undefined references
m = re_reference.match(line)
if m:
if refs:
d = {
"kind": "warning",
"text": _("Reference `%s' undefined.") % m.group("ref"),
"file": pos[-1]
}
d.update( m.groupdict() )
yield d
continue
m = re_citation.match(line)
if m:
if refs:
d = {
"kind": "warning",
"text": _("Citation `%s' undefined.") % m.group("cite"),
"file": pos[-1]
}
d.update( m.groupdict() )
yield d
continue
m = re_label.match(line)
if m:
if refs:
d = {
"kind": "warning",
"file": pos[-1]
}
d.update( m.groupdict() )
yield d
continue
missing_char = re_missing_character.match(line)
if missing_char:
mpos = { "file": pos[-1], "page": page }
if warnings:
info = missing_char.groupdict()
missing_char = info['missing']
## raise Exception(info)
d = {'kind': 'warning', 'text': u'Missing character: "{}"'.format(missing_char)}
d.update(mpos)
yield d
continue
# Other warnings
if line.find("Warning") != -1:
m = re_warning.match(line)
if m:
info = m.groupdict()
info["file"] = pos[-1]
info["page"] = page
if info["pkg"] is None:
del info["pkg"]
prefix = ""
else:
prefix = ("(%s)" % info["pkg"])
prefix = prefix.ljust(m.start("text"))
text = [info["text"]]
continue
# Bad box messages
m = re_badbox.match(line)
if m:
if boxes:
mpos = { "file": pos[-1], "page": page }
m = re_atline.search(line)
if m:
md = m.groupdict()
for key in "line", "last":
if md[key]: mpos[key] = md[key]
line = line[:m.start()]
d = {
"kind": "warning",
"text": line
}
d.update( mpos )
yield d
skipping = True
continue
# If there is no message, track source names and page numbers.
last_file = self.update_file(line, pos, last_file)
page = self.update_page(line, page)
def get_errors (self):
return self.parse(errors=True)
def get_boxes (self):
return self.parse(boxes=True)
def get_references (self):
return self.parse(refs=True)
def get_warnings (self):
return self.parse(warnings=True)
def update_file (self, line, stack, last):
"""
Parse the given line of log file for file openings and closings and
update the list `stack'. Newly opened files are at the end, therefore
stack[1] is the main source while stack[-1] is the current one. The
first element, stack[0], contains the value None for errors that may
happen outside the source. Return the last file from which text was
read (the new stack top, or the one before the last closing
parenthesis).
"""
m = re_file.search(line)
while m:
if line[m.start()] == '(':
last = m.group("file")
stack.append(last)
else:
last = stack[-1]
del stack[-1]
line = line[m.end():]
m = re_file.search(line)
return last
def update_page (self, line, before):
"""
Parse the given line and return the number of the page that is being
built after that line, assuming the current page before the line was
`before'.
"""
ms = re_page.findall(line)
if ms == []:
return before
return int(ms[-1]) + 1
if __name__ == '__main__':
parser = LogCheck()
parser.read('short.log')
errs = list(parser.get_errors())
| alexvh/pydflatex | pydflatex/latexlogparser.py | Python | bsd-3-clause | 10,271 |
#!/usr/bin/env python
""" script that ingests the job launch scripts """
from datetime import datetime, timedelta
import os
import re
from supremm.scripthelpers import getdbconnection
import sys
import logging
import glob
from getopt import getopt
from supremm.config import Config
MAX_SCRIPT_LEN = (64 * 1024) - 1
class DbHelper(object):
""" Helper class to interact with the database """
def __init__(self, dwconfig, tablename):
# The database schema should be created with utf8-unicode encoding.
self.con = getdbconnection(dwconfig, False, {'charset': 'utf8', 'use_unicode': True})
self.tablename = tablename
self.query = "INSERT IGNORE INTO " + tablename + " (resource_id,local_job_id,script) VALUES(%s,%s,%s)"
self.buffered = 0
def insert(self, data):
""" try to insert a record """
cur = self.con.cursor()
cur.execute(self.query, data)
self.buffered += 1
if self.buffered > 100:
self.con.commit()
self.buffered = 0
def postinsert(self):
""" call this to flush connection """
self.con.commit()
def getmostrecent(self, resource_id):
""" return the timestamp of the most recent entry for the resource """
query = "SELECT coalesce(MAX(updated),MAKEDATE(1970, 1)) FROM " + self.tablename + " WHERE resource_id = %s"
data = (resource_id, )
cur = self.con.cursor()
cur.execute(query, data)
return cur.fetchone()[0]
def pathfilter(path, mindate):
""" return whether path should not be processed based on mindate
return value of False indicates no filtering
return value of true indicates the path should be filtered
"""
if mindate == None:
return False
subdir = os.path.basename(path)
try:
if datetime.strptime(subdir, "%Y%m%d") < mindate:
logging.debug("Skip(1) subdir %s", subdir)
return True
except ValueError:
logging.debug("Skip(2) subdir %s", subdir)
return True
return False
def processfor(resource_id, respath, dbif, timedeltadays):
""" find and ingest all job scripts for the given resource """
count = 0
fglob = re.compile(r"^([0-9]*)\.savescript")
logging.debug("Processing path %s", respath)
if timedeltadays == None:
mindate = None
else:
mindate = dbif.getmostrecent(resource_id) - timedelta(days=timedeltadays)
logging.debug("Start date is %s", mindate)
for path in glob.glob(respath + "/[0-9]*"):
if pathfilter(path, mindate):
continue
logging.debug("processing files in %s", path)
for root, _, files in os.walk(path, topdown=True):
for filename in files:
mtch = fglob.match(filename)
if mtch == None:
logging.debug("Ignore file %s", filename)
continue
with open(os.path.join(root, filename), "rb") as scriptfile:
# Note: if non utf-8 characters are present in the file, they are encoded
scriptdata = scriptfile.read(MAX_SCRIPT_LEN).decode("utf-8", "replace")
if len(scriptdata) > MAX_SCRIPT_LEN:
# Could happen if the script contains non-utf-8 chars
scriptdata = scriptdata[:MAX_SCRIPT_LEN]
dbif.insert([resource_id, int(mtch.group(1)), scriptdata])
count += 1
return count
DAY_DELTA = 2
def usage():
""" print usage """
print "usage: {0} [OPTS]".format(os.path.basename(__file__))
print " -r --resource=RES process only archive files for the specified resource, if absent then all resources are processed"
print " -c --config=PATH specify the path to the configuration directory"
print " -D --daydelta=DAYS specify the number of days overlap from the last ingest (default", DAY_DELTA, "days ago)"
print " -a --all process all scripts regardless of age"
print " -d --debug set log level to debug"
print " -q --quiet only log errors"
print " -h --help print this help message"
def getoptions():
""" process comandline options """
retdata = {
"log": logging.INFO,
"resource": None,
"config": None,
"deltadays": DAY_DELTA
}
opts, _ = getopt(sys.argv[1:], "r:c:D:adqh", ["resource=", "config=", "daydelta=", "all", "debug", "quiet", "help"])
for opt in opts:
if opt[0] in ("-r", "--resource"):
retdata['resource'] = opt[1]
if opt[0] in ("-d", "--debug"):
retdata['log'] = logging.DEBUG
if opt[0] in ("-q", "--quiet"):
retdata['log'] = logging.ERROR
elif opt[0] in ("-c", "--config"):
retdata['config'] = opt[1]
elif opt[0] in ("-D", "--daydelta"):
retdata['deltadays'] = int(opt[1])
elif opt[0] in ("-a", "--all"):
retdata['deltadays'] = None
if opt[0] in ("-h", "--help"):
usage()
sys.exit(0)
return retdata
def main():
"""
main entry point for script
"""
opts = getoptions()
logging.basicConfig(format='%(asctime)s [%(levelname)s] %(message)s', datefmt='%Y-%m-%dT%H:%M:%S', level=opts['log'])
if sys.version.startswith("2.7"):
logging.captureWarnings(True)
config = Config(opts['config'])
dwconfig = config.getsection("datawarehouse")
dbif = DbHelper(dwconfig, 'modw_supremm.batchscripts')
for resourcename, settings in config.resourceconfigs():
if opts['resource'] in (None, resourcename, str(settings['resource_id'])):
logging.debug("Processing %s (id=%s)", resourcename, settings['resource_id'])
if "script_dir" in settings:
total = processfor(settings['resource_id'], settings['script_dir'], dbif, opts['deltadays'])
logging.info("Processed %s files for %s", total, resourcename)
else:
logging.debug("Skip resource %s no script dir defined", resourcename)
dbif.postinsert()
if __name__ == "__main__":
main()
| iMurfyD/supremm | supremm/ingest_jobscripts.py | Python | lgpl-3.0 | 6,249 |
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
{
'name': 'Google Maps',
'category': 'Website/Website',
'summary': 'Show your company address on Google Maps',
'version': '1.0',
'description': """
Show your company address/partner address on Google Maps. Configure an API key in the Website settings.
""",
'depends': ['base_geolocalize', 'website_partner'],
'data': [
'views/google_map_templates.xml',
],
'installable': True,
}
| ygol/odoo | addons/website_google_map/__manifest__.py | Python | agpl-3.0 | 526 |
#!/usr/bin/env python
# -*- coding: UTF-8 -*-
from setuptools import setup
def get_version():
import os
import re
version_file = os.path.join("theoldreader", "__init__.py")
initfile_lines = open(version_file, 'rt').readlines()
version_reg = r"^__version__ = ['\"]([^'\"]*)['\"]"
for line in initfile_lines:
mo = re.search(version_reg, line, re.M)
if mo:
return mo.group(1)
raise RuntimeError(
u"Unable to find version string in {}".format(version_file)
)
def get_file(path):
with open(path, 'r') as f:
return f.read()
version = get_version()
try:
requirements = get_file("requirements.txt").split("\n")
except:
print("Could not load requirements")
requirements = []
readme = get_file("README.rst")
history = get_file("HISTORY.rst")
setup(
name="theoldreader",
version=version,
description="Wrapper for TheOldReader api",
long_description=readme + "\n\n" + history,
author="Qra",
# TODO: add author email
# author_email="",
url="https://github.com/KurochkinVasiliy/theoldreader",
packages=[
"theoldreader"
],
install_requires=requirements,
license="MIT License",
keywords="theoldreader api",
classifiers=[
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Natural Language :: English",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
]
)
| KurochkinVasiliy/theoldreader | setup.py | Python | mit | 1,752 |
# -*- coding: utf-8 -*-
'''
Copyright (C) 2016 Rafael Picanço.
The present file is distributed under the terms of the GNU General Public License (GPL v3.0).
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
import os,sys,inspect
currentdir = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe())))
parentdir = os.path.dirname(currentdir)
sys.path.insert(0,parentdir)
import cv2
import numpy as np
_RETR_TREE = 0
# Constants for the hierarchy[_RETR_TREE][contour][{next,back,child,parent}]
_ID_NEXT = 0
_ID_BACK = 1
_ID_CHILD = 2
_ID_PARENT = 3
# Channel constants
_CH_B = 0
_CH_G = 1
_CH_R = 2
_CH_0 = 3
def find_edges(img, threshold, cv2_thresh_mode):
blur = cv2.GaussianBlur(img,(5,5),0)
#gray = cv2.cvtColor(blur,cv2.COLOR_BGR2GRAY)
edges = []
# channels = (blur[:,:,_CH_B], blur[:,:,_CH_G], blur[:,:,_CH_R])
# channels = cv2.split(blur)
for gray in (blur[:,:,_CH_B], blur[:,:,_CH_G], blur[:,:,_CH_R]):
if threshold == 0:
edg = cv2.Canny(gray, 0, 50, apertureSize = 5)
edg = cv2.dilate(edg, None)
edges.append(edg)
else:
retval, edg = cv2.threshold(gray, threshold, 255, cv2_thresh_mode)
edges.append(edg)
return edges
def ellipses_from_findContours(img, cv2_thresh_mode, delta_area_threshold, threshold, mode=True):
candidate_ellipses = []
debug_contours_output = []
merge = []
if mode:
height = img.shape[0]
width = img.shape[1]
edges = find_edges(img, threshold, cv2.THRESH_TOZERO)
edges.append(np.zeros((height, width, 1), np.uint8))
edges_edt = cv2.max(edges[_CH_B], edges[_CH_G])
edges_edt = cv2.max(edges_edt, edges[_CH_R])
edges = cv2.merge([edges_edt, edges_edt, edges_edt])
merge = [edges_edt, edges_edt, edges_edt]
edges = cv2.cvtColor(edges,cv2.COLOR_BGR2GRAY)
else:
gray_img = cv2.cvtColor(img,cv2.COLOR_BGR2GRAY)
edges = cv2.adaptiveThreshold(gray_img, 255,
adaptiveMethod = cv2.ADAPTIVE_THRESH_GAUSSIAN_C,
thresholdType = cv2_thresh_mode,
blockSize = 5,
C = -1)
#f = open('/home/rafael/Downloads/pupil-3x/pupil_src/player/data2.txt', 'w')
#f.write(str(edges))
#f.close
#raise
# cv2.findContours supports only black and white images (8uC1 and 32sC1 image)
contours, hierarchy = cv2.findContours(edges,mode = cv2.RETR_TREE,method = cv2.CHAIN_APPROX_NONE,offset = (0,0)) #TC89_KCOS
# remove extra encapsulation
if hierarchy != None:
if mode:
hierarchy = hierarchy[_RETR_TREE]
# turn outmost list into array
contours = np.array(contours)
# keep only contours with parents and children
contained_contours = contours[np.logical_or(hierarchy[:, _ID_PARENT] >= 0, hierarchy[:,2] >= _ID_CHILD)]
debug_contours_output = contained_contours
if contained_contours != None:
contained_contours = [c for c in contained_contours if len(c) >= 5]
if contained_contours != None:
contained_contours = [c for c in contained_contours if cv2.contourArea(c) > 1000]
if contained_contours != None:
ellipses = [cv2.fitEllipse(c) for c in contained_contours]
if ellipses != None:
# filter for ellipses that have similar area as the source contour
for e,c in zip(ellipses, contained_contours):
a,b = e[1][0] / 2., e[1][1] / 2.
if abs(cv2.contourArea(c) - np.pi * a * b) < delta_area_threshold:
candidate_ellipses.append(e)
else:
hierarchy = hierarchy[_RETR_TREE]
# turn outmost list into array
contours = np.array(contours)
# keep only contours with parents and children
contained_contours = contours[np.logical_and(hierarchy[:, _ID_PARENT] >= 0, hierarchy[:,2] >= _ID_CHILD)]
debug_contours_output = contained_contours
#debug_contours_output = contained_contours
# need at least 5 points to fit ellipse
contained_contours = [c for c in contained_contours if len(c) >= 5]
if contained_contours != None:
contained_contours = [c for c in contained_contours if cv2.contourArea(c) > 1000]
ellipses = [cv2.fitEllipse(c) for c in contained_contours]
candidate_ellipses = []
# filter for ellipses that have similar area as the source contour
for e,c in zip(ellipses, contained_contours):
a,b = e[1][0] / 2., e[1][1] / 2.
if abs(cv2.contourArea(c) - np.pi * a * b) < delta_area_threshold:
candidate_ellipses.append(e)
return candidate_ellipses, merge, debug_contours_output
img_path = '/home/rafael/documents/doutorado/data_doc/003-Natan/2015-05-13/export_images/frame_1758.png'
img = cv2.imread(img_path)
ellipses = []
merge = []
contained_contours = []
delta_area_threshold = 20
threshold = 180
ellipses, merge, contained_contours = ellipses_from_findContours(img, cv2_thresh_mode=cv2.THRESH_BINARY,
delta_area_threshold=delta_area_threshold,
threshold=threshold,
mode=False)
alfa = 2
#img = cv2.merge(merge)
#cv2.drawContours(img, contained_contours,-1, (0,0,255))
if ellipses:
for ellipse in ellipses:
center = ( int(round( ellipse[0][0] )), int( round( ellipse[0][1] )))
axes = ( int( round( ellipse[1][0]/alfa )), int( round( ellipse[1][1]/alfa )))
angle = int( round(ellipse[2] ))
cv2.ellipse(img, center, axes, angle, startAngle=0, endAngle=359, color=(255, 0, 0), thickness=1, lineType=8, shift= 0)
#cv2.namedWindow("output", cv2.CV_WINDOW_AUTOSIZE)
while True:
cv2.imshow("input", img)
ch = 0xFF & cv2.waitKey(1)
if ch == 27:
break
cv2.destroyAllWindows() | cpicanco/player_plugins | self_contained/ellipse_detector.py | Python | gpl-3.0 | 6,498 |
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution Addon
# Copyright (C) 2009-2013 IRSID (<http://irsid.ru>),
# Paul Korotkov ([email protected]).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp import models, fields, api
class edu_module(models.Model):
_name = 'edu.module'
_description = 'Education Module'
_inherit = ['base.doc']
# _inherit = 'edu.doc'
# _track = {
# 'state': {
# 'irsid_edu.mt_module_updated': lambda self, cr, uid, obj, ctx=None: True,
# },
# }
_order = 'program,section,subsection,name'
# Naming Functions
# def name_get(self, cr, uid, ids, context=None):
# if not len(ids):
# return []
# modules = self.browse(cr, uid, ids, context=context)
# res = []
# for module in modules:
# res.append((module.id, module.code + ': ' + module.name))
# return res
#
# def name_search(self, cr, user, name, args=None, operator='ilike', context=None, limit=100):
# if not args:
# args = []
# if context is None:
# context = {}
# ids = self.search(cr, user, [
# '|',('code','ilike',name),('name','ilike',name),
# ] + args, limit=limit, context=context)
# return self.name_get(cr, user, ids, context=context)
# Workflow Functions
# def set_case_default(self, cr, uid, ids, context=None):
# module = self.browse(cr, uid, ids[0], context=context)
# modules = self.pool.get('edu.module').search(cr, uid, [('parent_id', '=', module.parent_id.id)])
# self.write(cr, uid, modules, {'case_default': False}, context=context)
# self.write(cr, uid, ids, {'case_default': True}, context=context)
# return True
# def set_draft(self, cr, uid, ids, context=None):
# for module in self.browse(cr, uid, ids, context=context):
# self.write(cr, uid, module.id, {
# 'state': 'draft',
# 'date_approved': False,
# }, context=context)
# work_obj = self.pool.get('edu.module.work')
# works = work_obj.search(cr, uid, [('module', 'in', ids)], context=context)
# work_obj.set_draft(cr, uid, works, context=context)
# return True
#
# def set_confirmed(self, cr, uid, ids, context=None):
# for module in self.browse(cr, uid, ids, context=context):
# self.write(cr, uid, module.id, {
# 'state': 'confirmed',
# }, context=context)
# work_obj = self.pool.get('edu.module.work')
# works = work_obj.search(cr, uid, [('module', 'in', ids)], context=context)
# work_obj.set_confirmed(cr, uid, works, context=context)
# return True
#
# def set_validated(self, cr, uid, ids, context=None):
# for module in self.browse(cr, uid, ids, context=context):
# self.write(cr, uid, module.id, {
# 'state': 'validated',
# }, context=context)
# work_obj = self.pool.get('edu.module.work')
# works = work_obj.search(cr, uid, [('module', 'in', ids)], context=context)
# work_obj.set_validated(cr, uid, works, context=context)
# return True
# def set_approved(self, cr, uid, ids, context=None):
# for module in self.browse(cr, uid, ids, context=context):
# self.write(cr, uid, module.id, {
# 'state': 'approved',
# 'date_approved': fields.date.context_today(self, cr, uid, context=context),
# 'user_approved': uid,
# 'code': module.code =='/' and self.pool.get('ir.sequence').get(cr, uid, 'edu.module') or module.code or '/'
# }, context=context)
# work_obj = self.pool.get('edu.module.work')
# works = work_obj.search(cr, uid, [('module', 'in', ids)], context=context)
# work_obj.set_approved(cr, uid, works, context=context)
# return True
# def set_canceled(self, cr, uid, ids, context=None):
# for module in self.browse(cr, uid, ids, context=context):
# self.write(cr, uid, module.id, {
# 'state': 'canceled',
# }, context=context)
# work_obj = self.pool.get('edu.module.work')
# works = work_obj.search(cr, uid, [('module', 'in', ids)], context=context)
# work_obj.set_canceled(cr, uid, works, context=context)
# return True
# def set_rejected(self, cr, uid, ids, context=None):
# for module in self.browse(cr, uid, ids, context=context):
# self.write(cr, uid, module.id, {
# 'state': 'rejected',
# }, context=context)
# work_obj = self.pool.get('edu.module.work')
# works = work_obj.search(cr, uid, [('module', 'in', ids)], context=context)
# work_obj.set_rejected(cr, uid, works, context=context)
# return True
# Access Functions
# def create(self, cr, uid, vals, context=None):
# if context is None:
# context = {}
# if vals.get('code', '/') == '/':
# vals['code'] = self.pool.get('ir.sequence').get(cr, uid, 'edu.module') or '/'
# context.update({'mail_create_nolog': True})
# new_id = super(edu_module, self).create(cr, uid, vals, context=context)
# return new_id
# def copy(self, cr, uid, id, default=None, context=None):
# default = default or {}
# default.update({
# 'case_default': False,
# 'code': self.pool.get('ir.sequence').get(cr, uid, 'edu.module'),
# 'plan_ids':False,
# })
# return super(edu_module, self).copy(cr, uid, id, default, context=context)
# def _get_employee(self, cr, uid, context):
# ids = self.pool.get('hr.employee').search(cr, uid, [('resource_id.user_id','=',uid)], context=context)
# if not len(ids):
# return False
# return ids[0]
@api.one
@api.depends('works.st_hours','works.seance_hours')
def _compute_hours(self):
self.eff_st_hours = sum(work.st_hours for work in self.works)
self.eff_credits = self.eff_st_hours / 36
self.eff_seance_hours = sum(work.seance_hours for work in self.works)
# Onchange Functions
# def onchange_parent_id(self, cr, uid, ids, module, context=None):
# if module:
# module = self.pool.get('edu.module').browse(cr, uid, module, context=context)
# return {'value': {
# 'name': module.name,
# 'short_name': module.short_name,
# 'program': module.program.id,
# 'section': module.section.id,
# 'subsection': module.subsection.id,
# 'employee': module.employee.id,
# 'location_id': module.location_id.id,
# 'credits': module.credits,
# 'st_hours': module.st_hours,
# 'seance_hours': module.seance_hours,
# }}
# return {'value': {}}
# OpenChatter functions
# def _needaction_domain_get(self, cr, uid, context=None):
# if self.pool.get('res.users').has_group(cr, uid, 'irsid_edu.group_edu_prorector'):
# dom = [('state', '=', 'validated')]
# return dom
# if self.pool.get('res.users').has_group(cr, uid, 'irsid_edu.group_edu_manager'):
# dom = [('state', '=', 'confirmed')]
# return dom
# if self.pool.get('res.users').has_group(cr, uid, 'irsid_edu.group_edu_teacher'):
# dom = [('state', 'in', ['draft'])]
# return dom
# return False
# Fields
program = fields.Many2one(
comodel_name = 'edu.program',
string = 'Program',
required = True,
ondelete = 'cascade',
readonly = True,
states = {'draft': [('readonly', False)]},
)
section = fields.Many2one(
comodel_name = 'edu.program.section',
string = 'Program Section',
readonly = True,
states = {'draft': [('readonly', False)]},
)
subsection = fields.Many2one(
comodel_name = 'edu.program.subsection',
string = 'Program Subsection',
readonly = True,
states = {'draft': [('readonly', False)]},
)
parent_id = fields.Many2one(
comodel_name = 'edu.module',
string = 'Parent Module',
readonly = True,
states = {'draft': [('readonly', False)]},
)
child_ids = fields.One2many(
comodel_name = 'edu.module',
inverse_name = 'parent_id',
string = 'Child Modules',
readonly = True,
states = {'draft': [('readonly', False)]},
)
prior_ids = fields.Many2many(
comodel_name = 'edu.module',
relation = 'edu_module_priority_rel',
column1 = 'module',
column2 = 'prior_id',
string='Prior Modules',
readonly = True,
states = {'draft': [('readonly', False)]},
)
posterior_ids = fields.Many2many(
comodel_name = 'edu.module',
relation = 'edu_module_priority_rel',
column1 = 'prior_id',
column2 = 'module',
string = 'Posterior Modules',
readonly = True,
states = {'draft': [('readonly', False)]},
)
competences = fields.Many2many(
comodel_name = 'edu.competence',
string = 'Competences',
readonly = True,
states = {'draft': [('readonly', False)]},
)
location = fields.Many2one(
comodel_name = 'stock.location',
string = 'Location',
readonly = True,
states = {'draft': [('readonly', False)]},
)
employee = fields.Many2one(
comodel_name = 'hr.employee',
string = 'Employee',
readonly = True,
states = {'draft': [('readonly', False)]},
)
credits = fields.Float(
string = 'Credits',
required=True,
readonly = True,
states = {'draft': [('readonly', False)]},
)
st_hours = fields.Float(
string = 'Student Hours',
required=True,
readonly = True,
states = {'draft': [('readonly', False)]},
)
seance_hours = fields.Float(
string = 'Seance Hours',
required=True,
readonly = True,
states = {'draft': [('readonly', False)]},
)
eff_credits = fields.Float(
string = 'Effective Credits',
readonly = True,
compute = _compute_hours,
)
eff_st_hours = fields.Float(
string = 'Effective Student Hours',
readonly = True,
compute = _compute_hours,
)
eff_seance_hours = fields.Float(
string = 'Effective Seance Hours',
readonly = True,
compute = _compute_hours,
)
works = fields.One2many(
comodel_name = 'edu.module.work',
inverse_name = 'module',
string = 'Module Work',
readonly = True,
states = {'draft': [('readonly', False)]},
)
plans = fields.Many2many(
comodel_name = 'edu.plan',
string = 'Plans',
readonly = True,
states = {'draft': [('readonly', False)]},
)
description = fields.Html(
string='Description',
readonly = True,
states = {'draft': [('readonly', False)]},
)
| prospwro/odoo | addons/irsid_edu/models/module.py | Python | agpl-3.0 | 11,983 |
# © Christian Sommerfeldt Øien
# All rights reserved
class Pause:
def __init__(self, d):
self.span = d
class Note(Pause):
def __init__(self, d, i, p):
Pause.__init__(self, d)
self.duration = self.span
self.label = i
self.params = p
def __call__(self):
return (self.duration, self.label, self.params)
class ImpliedDurationNote(Note):
def __call__(self):
return (self.label, self.params)
class CompositionFilter:
def __init__(self, i, p):
self.label = i
self.params = p
def __call__(self, duration):
return (duration, self.label, self.params)
class NoteComposition(Note):
def __init__(self):
Pause.__init__(self, 0)
self.score = []
self.filters = []
def sequence(self, t, notes):
r = []
for n in notes:
n.time = t
if isinstance(n, Note):
r.append(n)
t += n.span
if self.span < t:
self.span = t
self.score.extend(r)
def __call__(self):
r = [[f(self.span) for f in self.filters]]
for note in self.score:
r.append((note.time, note()))
# improve: collapse if note is NoteComposition with
# no filters, onto self.score offset by note.time
# (but wait with this; optimization)
return r
| biotty/rmg | sound/py/music.py | Python | bsd-2-clause | 1,436 |
#
# Copyright (C) 2013, 2014 Red Hat, Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301 USA.
#
"""
Configuration variables that can be set at build time
"""
import ConfigParser
import os
cfg = ConfigParser.ConfigParser()
_filepath = os.path.abspath(__file__)
_srcdir = os.path.abspath(os.path.join(os.path.dirname(_filepath), ".."))
cfgpath = os.path.join(os.path.dirname(_filepath), "cli.cfg")
_istest = "VIRTINST_TEST_SUITE" in os.environ
if os.path.exists(cfgpath):
cfg.read(cfgpath)
def _split_list(commastr):
return [d for d in commastr.split(",") if d]
def _get_param(name, default):
if _istest:
return default
try:
return cfg.get("config", name)
except (ConfigParser.NoOptionError, ConfigParser.NoSectionError):
return default
def _setup_gsettings_path(schemadir):
"""
If running from the virt-manager.git srcdir, compile our gsettings
schema and use it directly
"""
import subprocess
os.environ["GSETTINGS_SCHEMA_DIR"] = schemadir
ret = subprocess.call(["glib-compile-schemas", "--strict", schemadir])
if ret != 0:
raise RuntimeError("Failed to compile local gsettings schemas")
__version__ = "0.10.0"
__snapshot__ = 0
_usr_version = _get_param("pkgversion", "")
if _usr_version is not None and _usr_version != "":
__version__ = _usr_version
# We should map this into the config somehow but I question if anyone cares
prefix = _get_param("prefix", "/usr")
gettext_dir = os.path.join(prefix, "share", "locale")
install_asset_dir = os.path.join(prefix, "share", "virt-manager")
if os.getcwd() == _srcdir:
asset_dir = _srcdir
icon_dir = os.path.join(_srcdir, "data")
_setup_gsettings_path(icon_dir)
else:
asset_dir = install_asset_dir
icon_dir = os.path.join(asset_dir, "icons")
default_qemu_user = _get_param("default_qemu_user", "root")
stable_defaults = bool(int(_get_param("stable_defaults", "0")))
preferred_distros = _split_list(_get_param("preferred_distros", ""))
hv_packages = _split_list(_get_param("hv_packages", ""))
askpass_package = _split_list(_get_param("askpass_packages", ""))
libvirt_packages = _split_list(_get_param("libvirt_packages", ""))
default_graphics = _get_param("default_graphics", "spice")
| giuseppe/virt-manager | virtcli/cliconfig.py | Python | gpl-2.0 | 2,910 |
# coding=utf-8
# Copyright 2000-2021 JetBrains s.r.o. and contributors. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
import pytest
from distutils import version
import sys
from _pytest.config import get_plugin_manager
from pkg_resources import iter_entry_points
from _jb_runner_tools import jb_patch_separator, jb_doc_args, JB_DISABLE_BUFFERING, start_protocol, parse_arguments, \
set_parallel_mode
from teamcity import pytest_plugin
if __name__ == '__main__':
path, targets, additional_args = parse_arguments()
sys.argv += additional_args
joined_targets = jb_patch_separator(targets, fs_glue="/", python_glue="::", fs_to_python_glue=".py::")
# When file is launched in pytest it should be file.py: you can't provide it as bare module
joined_targets = [t + ".py" if ":" not in t else t for t in joined_targets]
sys.argv += [path] if path else joined_targets
# plugin is discovered automatically in 3, but not in 2
# to prevent "plugin already registered" problem we check it first
plugins_to_load = []
if not get_plugin_manager().hasplugin("pytest-teamcity"):
if "pytest-teamcity" not in map(lambda e: e.name, iter_entry_points(group='pytest11', name=None)):
plugins_to_load.append(pytest_plugin)
args = sys.argv[1:]
if "--jb-show-summary" in args:
args.remove("--jb-show-summary")
elif version.LooseVersion(pytest.__version__) >= version.LooseVersion("6.0"):
args += ["--no-header", "--no-summary", "-q"]
if JB_DISABLE_BUFFERING and "-s" not in args:
args += ["-s"]
jb_doc_args("pytest", args)
class Plugin:
@staticmethod
def pytest_configure(config):
if getattr(config.option, "numprocesses", None):
set_parallel_mode()
start_protocol()
sys.exit(pytest.main(args, plugins_to_load + [Plugin]))
| siosio/intellij-community | python/helpers/pycharm/_jb_pytest_runner.py | Python | apache-2.0 | 1,933 |
# !/usr/bin/env python
# -*- coding: utf-8 -*-
from algos import CollectionException, Collection, UNLIMITED
class QueueException(CollectionException):
pass
class Queue(Collection):
def __init__(self, capacity=UNLIMITED):
if capacity == 0 or capacity < UNLIMITED:
raise QueueException(
'Capacity must be an int > 0 or Queue.UNLIMITED')
self._capacity = capacity
self._queue = []
def enqueue(self, item):
"""Currently implemented with a O(n) solution"""
if self.full():
raise QueueException('already full')
self._queue.insert(0, item)
def dequeue(self):
if self.empty():
raise QueueException('already empty')
return self._queue.pop()
def peek(self):
if self.empty():
raise QueueException('already empty')
return self._queue[-1]
def empty(self):
return len(self._queue) == 0
def full(self):
return len(self._queue) == self._capacity
| DiogoNeves/PyAlgo | algos/queue.py | Python | mit | 1,027 |
# -*- coding: utf-8 -*-
'''
Exodus Add-on
Copyright (C) 2016 Exodus
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
import re,urllib,urlparse
from resources.lib.modules import cleantitle
from resources.lib.modules import client
from resources.lib.modules import debrid
class source:
def __init__(self):
self.domains = ['scenedown.in']
self.base_link = 'http://scenedown.in'
self.search_link = '/search/%s/feed/rss2/'
def movie(self, imdb, title, year):
try:
url = {'imdb': imdb, 'title': title, 'year': year}
url = urllib.urlencode(url)
return url
except:
return
def tvshow(self, imdb, tvdb, tvshowtitle, year):
try:
url = {'imdb': imdb, 'tvdb': tvdb, 'tvshowtitle': tvshowtitle, 'year': year}
url = urllib.urlencode(url)
return url
except:
return
def episode(self, url, imdb, tvdb, title, premiered, season, episode):
try:
if url == None: return
url = urlparse.parse_qs(url)
url = dict([(i, url[i][0]) if url[i] else (i, '') for i in url])
url['title'], url['premiered'], url['season'], url['episode'] = title, premiered, season, episode
url = urllib.urlencode(url)
return url
except:
return
def sources(self, url, hostDict, hostprDict):
try:
sources = []
if url == None: return sources
if debrid.status() == False: raise Exception()
data = urlparse.parse_qs(url)
data = dict([(i, data[i][0]) if data[i] else (i, '') for i in data])
title = data['tvshowtitle'] if 'tvshowtitle' in data else data['title']
hdlr = 'S%02dE%02d' % (int(data['season']), int(data['episode'])) if 'tvshowtitle' in data else data['year']
query = '%s S%02dE%02d' % (data['tvshowtitle'], int(data['season']), int(data['episode'])) if 'tvshowtitle' in data else '%s %s' % (data['title'], data['year'])
query = re.sub('(\\\|/| -|:|;|\*|\?|"|\'|<|>|\|)', ' ', query)
url = self.search_link % urllib.quote_plus(query)
url = urlparse.urljoin(self.base_link, url)
r = client.request(url)
posts = client.parseDOM(r, 'item')
hostDict = hostprDict + hostDict
items = []
for post in posts:
try:
t = client.parseDOM(post, 'title')[0]
u = client.parseDOM(post, 'enclosure', ret='url', attrs={'type': 'video.+?'})
if not u: raise Exception()
c = client.parseDOM(post, 'content.+?')[0]
s = re.findall('((?:\d+\.\d+|\d+\,\d+|\d+) (?:GB|GiB|MB|MiB))', c)
s = s[0] if s else '0'
u = client.parseDOM(c, 'a', ret='href')
items += [(t, i, s) for i in u]
except:
pass
for item in items:
try:
name = item[0]
name = client.replaceHTMLCodes(name)
t = re.sub('(\.|\(|\[|\s)(\d{4}|S\d*E\d*|S\d*|3D)(\.|\)|\]|\s|)(.+|)', '', name)
if not cleantitle.get(t) == cleantitle.get(title): raise Exception()
y = re.findall('[\.|\(|\[|\s](\d{4}|S\d*E\d*|S\d*)[\.|\)|\]|\s]', name)[-1].upper()
if not y == hdlr: raise Exception()
fmt = re.sub('(.+)(\.|\(|\[|\s)(\d{4}|S\d*E\d*|S\d*)(\.|\)|\]|\s)', '', name.upper())
fmt = re.split('\.|\(|\)|\[|\]|\s|\-', fmt)
fmt = [i.lower() for i in fmt]
if any(i.endswith(('subs', 'sub', 'dubbed', 'dub')) for i in fmt): raise Exception()
if any(i in ['extras'] for i in fmt): raise Exception()
if '1080p' in fmt: quality = '1080p'
elif '720p' in fmt: quality = 'HD'
else: quality = 'SD'
if any(i in ['dvdscr', 'r5', 'r6'] for i in fmt): quality = 'SCR'
elif any(i in ['camrip', 'tsrip', 'hdcam', 'hdts', 'dvdcam', 'dvdts', 'cam', 'telesync', 'ts'] for i in fmt): quality = 'CAM'
info = []
if '3d' in fmt: info.append('3D')
try:
size = re.findall('((?:\d+\.\d+|\d+\,\d+|\d+) (?:GB|GiB|MB|MiB))', item[2])[-1]
div = 1 if size.endswith(('GB', 'GiB')) else 1024
size = float(re.sub('[^0-9|/.|/,]', '', size))/div
size = '%.2f GB' % size
info.append(size)
except:
pass
if any(i in ['hevc', 'h265', 'x265'] for i in fmt): info.append('HEVC')
info = ' | '.join(info)
url = item[1]
if any(x in url for x in ['.rar', '.zip', '.iso']): raise Exception()
url = client.replaceHTMLCodes(url)
url = url.encode('utf-8')
host = re.findall('([\w]+[.][\w]+)$', urlparse.urlparse(url.strip().lower()).netloc)[0]
if not host in hostDict: raise Exception()
host = client.replaceHTMLCodes(host)
host = host.encode('utf-8')
sources.append({'source': host, 'quality': quality, 'provider': 'Scenedown', 'url': url, 'info': info, 'direct': False, 'debridonly': True})
except:
pass
check = [i for i in sources if not i['quality'] == 'CAM']
if check: sources = check
return sources
except:
return sources
def resolve(self, url):
return url
| JamesLinEngineer/RKMC | addons/plugin.video.phstreams/resources/lib/sources/scenedown_mv_tv.py | Python | gpl-2.0 | 6,721 |
from setuptools import setup, find_packages
setup(
name='hello_cnn',
version='0.0.1',
description='A text classifier that leveragge CNN',
url='https://github.com/nryotaro/hello_cnn',
author='Nakamura, Ryotaro',
author_email='[email protected]',
license='Copyright (c) 2017 Nakamura, Ryotaro',
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Topic :: Scientific/Engineering :: Artificial Intelligence',
'Programming Language :: Python :: 3.6'],
packages=find_packages(exclude=['tests']),
install_requires=['tensorflow', 'gensim', 'nltk'],
python_requires='>=3.6.1',
extras_require={'dev': ['pytest', 'jupyter']},
entry_points={
'console_scripts': [
'hello_cnn=hello_cnn:main',
],
})
| nryotaro/hello_cnn | setup.py | Python | mit | 903 |
# Copyright (c) 2008 Yann Ramin
# This file is part of quickmovie.
#
# quickmovie is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# quickmovie is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with quickmovie. If not, see <http://www.gnu.org/licenses/>.
import cherrypy
from mako.template import Template
from mako.lookup import TemplateLookup
import gettext
def template(name, args):
look = TemplateLookup(directories=['template'], output_encoding = 'utf-8')
temp = look.get_template(name)
args['session'] = cherrypy.session # Give cherrypy session
t = gettext.translation('quickmovie', fallback = True)
args['_'] = t.ugettext
args['url'] = cherrypy.url
return temp.render_unicode(**args)
| theatrus/quickmovie | quickmovie/template.py | Python | gpl-3.0 | 1,168 |
from django.conf import settings
from xbrowse.variant_search import utils as search_utils
from xbrowse_server.api.utils import add_extra_info_to_variants_project
from xbrowse_server.mall import get_reference, get_mall
def get_variants_in_gene(family_group, gene_id, variant_filter=None, quality_filter=None):
"""
"""
variants_by_family = []
for family in family_group.get_families():
variant_list = list(get_mall(family.project).variant_store.get_variants_in_gene(
family.project.project_id,
family.family_id,
gene_id,
variant_filter=variant_filter
))
variant_list = search_utils.filter_gene_variants_by_variant_filter(variant_list, gene_id, variant_filter)
add_extra_info_to_variants_project(get_reference(), family.project, variant_list, add_family_tags=True, add_populations=True)
variants_by_family.append({
'variants': [v.toJSON() for v in variant_list],
'family_id': family.family_id,
'project_id': family.project.project_id,
'family_name': str(family),
})
return variants_by_family
| macarthur-lab/xbrowse | xbrowse_server/analysis/family_group.py | Python | agpl-3.0 | 1,157 |
# Copyright 2012 Nebula, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.conf.urls import include
from django.conf.urls import patterns
from django.conf.urls import url
from openstack_dashboard.dashboards.project.volumes.backups \
import urls as backups_urls
from openstack_dashboard.dashboards.project.volumes.snapshots \
import urls as snapshot_urls
from openstack_dashboard.dashboards.project.volumes import views
from openstack_dashboard.dashboards.project.volumes.volumes \
import urls as volume_urls
urlpatterns = patterns(
'',
url(r'^$', views.IndexView.as_view(), name='index'),
url(r'^\?tab=volumes_and_snapshots__snapshots_tab$',
views.IndexView.as_view(), name='snapshots_tab'),
url(r'^\?tab=volumes_and_snapshots__volumes_tab$',
views.IndexView.as_view(), name='volumes_tab'),
url(r'^\?tab=volumes_and_snapshots__backups_tab$',
views.IndexView.as_view(), name='backups_tab'),
url(r'', include(volume_urls, namespace='volumes')),
url(r'backups/', include(backups_urls, namespace='backups')),
url(r'snapshots/', include(snapshot_urls, namespace='snapshots')),
)
| FNST-OpenStack/horizon | openstack_dashboard/dashboards/project/volumes/urls.py | Python | apache-2.0 | 1,686 |
from django.conf.urls import include, url
from django.contrib.auth.views import logout
from django.conf import settings
from . import views
urlpatterns = [
url(r'^login/', views.user_login, name='login'),
url(r'^logout/$', logout, {'next_page': settings.LOGOUT_REDIRECT_URL}, name='logout'),
url(r'^settings/', views.user_settings, name='settings'),
url(r'^help/', views.help, name='help'),
url(r'^commands/(?P<reservationId>\d+)/poweron/', views.powerOnMachine, name='power_on_machine'),
url(r'^commands/(?P<reservationId>\d+)/poweroff/', views.powerOffMachine, name='power_off_machine'),
url(r'^commands/(?P<reservationId>\d+)/markbroken/', views.markMachineBroken, name='mark_machine_broken'),
url(r'^myreservations/(?P<reservationId>\d+)/setupwizard/', views.setup_wizard, name='setup_wizard'),
url(r'^myreservations/(?P<reservationId>\d+)/release/', views.my_reservation_release, name='my_reservation_release'),
url(r'^myreservations/(?P<reservationId>\d+)/extend/', views.my_reservation_extend, name='my_reservation_extend'),
url(r'^myreservations/(?P<reservationId>\d+)/delete/', views.my_reservation_delete, name='my_reservation_delete'),
url(r'^myreservations/(?P<reservationId>\d+)/', views.my_reservation, name='my_reservation'),
url(r'^myreservations/', views.my_reservations, name='my_reservations'),
url(r'^machines/(?P<machineId>.+)/failed/', views.reserve_machine_failed, name='reserve_machine_failed'),
url(r'^machines/(?P<machineId>.+)/done/', views.reserve_machine_done, name='reserve_machine_done'),
url(r'^machines/(?P<machineId>.+)/', views.show_reservations_machine, name='show_reservations_machine'),
url(r'^machines/', views.show_machines, name='show_machines'),
url(r'^', views.dashboard, name='dashboard'),
]
| greenstatic/maas-reservation-system | maas_reservation_system/reservation_system/urls.py | Python | mit | 1,810 |
from __future__ import absolute_import
from django.db import models
from django.test import TestCase
from django.utils import six
from .models import (
First, Third, Parent, Child, Category, Record, Relation, Car, Driver)
class ManyToOneRegressionTests(TestCase):
def test_object_creation(self):
Third.objects.create(id='3', name='An example')
parent = Parent(name='fred')
parent.save()
Child.objects.create(name='bam-bam', parent=parent)
def test_fk_assignment_and_related_object_cache(self):
# Tests of ForeignKey assignment and the related-object cache (see #6886).
p = Parent.objects.create(name="Parent")
c = Child.objects.create(name="Child", parent=p)
# Look up the object again so that we get a "fresh" object.
c = Child.objects.get(name="Child")
p = c.parent
# Accessing the related object again returns the exactly same object.
self.assertTrue(c.parent is p)
# But if we kill the cache, we get a new object.
del c._parent_cache
self.assertFalse(c.parent is p)
# Assigning a new object results in that object getting cached immediately.
p2 = Parent.objects.create(name="Parent 2")
c.parent = p2
self.assertTrue(c.parent is p2)
# Assigning None succeeds if field is null=True.
p.bestchild = None
self.assertTrue(p.bestchild is None)
# bestchild should still be None after saving.
p.save()
self.assertTrue(p.bestchild is None)
# bestchild should still be None after fetching the object again.
p = Parent.objects.get(name="Parent")
self.assertTrue(p.bestchild is None)
# Assigning None fails: Child.parent is null=False.
self.assertRaises(ValueError, setattr, c, "parent", None)
# You also can't assign an object of the wrong type here
self.assertRaises(ValueError, setattr, c, "parent", First(id=1, second=1))
# Nor can you explicitly assign None to Child.parent during object
# creation (regression for #9649).
self.assertRaises(ValueError, Child, name='xyzzy', parent=None)
self.assertRaises(ValueError, Child.objects.create, name='xyzzy', parent=None)
# Creation using keyword argument should cache the related object.
p = Parent.objects.get(name="Parent")
c = Child(parent=p)
self.assertTrue(c.parent is p)
# Creation using keyword argument and unsaved related instance (#8070).
p = Parent()
c = Child(parent=p)
self.assertTrue(c.parent is p)
# Creation using attname keyword argument and an id will cause the
# related object to be fetched.
p = Parent.objects.get(name="Parent")
c = Child(parent_id=p.id)
self.assertFalse(c.parent is p)
self.assertEqual(c.parent, p)
def test_multiple_foreignkeys(self):
# Test of multiple ForeignKeys to the same model (bug #7125).
c1 = Category.objects.create(name='First')
c2 = Category.objects.create(name='Second')
c3 = Category.objects.create(name='Third')
r1 = Record.objects.create(category=c1)
r2 = Record.objects.create(category=c1)
r3 = Record.objects.create(category=c2)
r4 = Record.objects.create(category=c2)
r5 = Record.objects.create(category=c3)
r = Relation.objects.create(left=r1, right=r2)
r = Relation.objects.create(left=r3, right=r4)
r = Relation.objects.create(left=r1, right=r3)
r = Relation.objects.create(left=r5, right=r2)
r = Relation.objects.create(left=r3, right=r2)
q1 = Relation.objects.filter(left__category__name__in=['First'], right__category__name__in=['Second'])
self.assertQuerysetEqual(q1, ["<Relation: First - Second>"])
q2 = Category.objects.filter(record__left_set__right__category__name='Second').order_by('name')
self.assertQuerysetEqual(q2, ["<Category: First>", "<Category: Second>"])
p = Parent.objects.create(name="Parent")
c = Child.objects.create(name="Child", parent=p)
self.assertRaises(ValueError, Child.objects.create, name="Grandchild", parent=c)
def test_fk_instantiation_outside_model(self):
# Regression for #12190 -- Should be able to instantiate a FK outside
# of a model, and interrogate its related field.
cat = models.ForeignKey(Category)
self.assertEqual('id', cat.rel.get_related_field().name)
def test_relation_unsaved(self):
# Test that the <field>_set manager does not join on Null value fields (#17541)
Third.objects.create(name='Third 1')
Third.objects.create(name='Third 2')
th = Third(name="testing")
# The object isn't saved an thus the relation field is null - we won't even
# execute a query in this case.
with self.assertNumQueries(0):
self.assertEqual(th.child_set.count(), 0)
th.save()
# Now the model is saved, so we will need to execute an query.
with self.assertNumQueries(1):
self.assertEqual(th.child_set.count(), 0)
def test_related_null_to_field(self):
c1 = Car.objects.create()
c2 = Car.objects.create()
d1 = Driver.objects.create()
self.assertIs(d1.car, None)
with self.assertNumQueries(0):
self.assertEqual(list(c1.drivers.all()), [])
| atruberg/django-custom | tests/many_to_one_regress/tests.py | Python | bsd-3-clause | 5,479 |
# -*- coding: utf-8 -*-
"""
Provides textual descriptions for :mod:`behave.model` elements.
"""
from behave.textutil import indent
# -----------------------------------------------------------------------------
# FUNCTIONS:
# -----------------------------------------------------------------------------
def escape_cell(cell):
"""
Escape table cell contents.
:param cell: Table cell (as unicode string).
:return: Escaped cell (as unicode string).
"""
cell = cell.replace(u'\\', u'\\\\')
cell = cell.replace(u'\n', u'\\n')
cell = cell.replace(u'|', u'\\|')
return cell
def escape_triple_quotes(text):
"""
Escape triple-quotes, used for multi-line text/doc-strings.
"""
return text.replace(u'"""', u'\\"\\"\\"')
# -----------------------------------------------------------------------------
# CLASS:
# -----------------------------------------------------------------------------
class ModelDescriptor(object):
@staticmethod
def describe_table(table, indentation=None):
"""
Provide a textual description of the table (as used w/ Gherkin).
:param table: Table to use (as :class:`behave.model.Table`)
:param indentation: Line prefix to use (as string, if any).
:return: Textual table description (as unicode string).
"""
# -- STEP: Determine output size of all cells.
cell_lengths = []
all_rows = [table.headings] + table.rows
for row in all_rows:
lengths = [len(escape_cell(c)) for c in row]
cell_lengths.append(lengths)
# -- STEP: Determine max. output size for each column.
max_lengths = []
for col in range(0, len(cell_lengths[0])):
max_lengths.append(max([c[col] for c in cell_lengths]))
# -- STEP: Build textual table description.
lines = []
for r, row in enumerate(all_rows):
line = u"|"
for c, (cell, max_length) in enumerate(zip(row, max_lengths)):
pad_size = max_length - cell_lengths[r][c]
line += u" %s%s |" % (escape_cell(cell), " " * pad_size)
line += u"\n"
lines.append(line)
if indentation:
return indent(lines, indentation)
# -- OTHERWISE:
return u"".join(lines)
@staticmethod
def describe_docstring(doc_string, indentation=None):
"""
Provide a textual description of the multi-line text/triple-quoted
doc-string (as used w/ Gherkin).
:param doc_string: Multi-line text to use.
:param indentation: Line prefix to use (as string, if any).
:return: Textual table description (as unicode string).
"""
text = escape_triple_quotes(doc_string)
text = u'"""\n' + text + '\n"""\n'
if indentation:
text = indent(text, indentation)
return text
class ModelPrinter(ModelDescriptor):
def __init__(self, stream):
super(ModelPrinter, self).__init__()
self.stream = stream
def print_table(self, table, indentation=None):
self.stream.write(self.describe_table(table, indentation))
self.stream.flush()
def print_docstring(self, text, indentation=None):
self.stream.write(self.describe_docstring(text, indentation))
self.stream.flush()
| WillisXChen/django-oscar | oscar/lib/python2.7/site-packages/behave/model_describe.py | Python | bsd-3-clause | 3,364 |
# -*- coding: utf-8 -*-
from decimal import Decimal
import logging
from django.core.cache import cache
from django.db.models import Sum
log = logging.getLogger(__name__)
CACHE_PREFIX = 'dgsproxy_stats_'
class ProxyStats(object):
def __init__(self):
self.hits_proxied = 0
self.hits_cached = 0
self.num_resources = 0
self.cache_size = 0
self.rate_limit = 0
self.rate_limit_remain = 0
def build_stats(self):
# Get values from cache
self.hits_proxied = cache.get('%s_hits_backend' % CACHE_PREFIX, 0)
self.hits_cached = cache.get('%s_hits_cache' % CACHE_PREFIX, 0)
self.rate_limit = cache.get('%s_rate_limt' % CACHE_PREFIX, 0)
self.rate_limit_remain = cache.get('%s_rate_limt_remain' % CACHE_PREFIX, 0)
# Get values from db
from dgsproxy.models import CachedResource
self.num_resources = CachedResource.objects.count()
try:
total_size = int(CachedResource.objects.aggregate(Sum('filesize'))['filesize__sum'])
except:
total_size = 0
self.cache_size = total_size
def get_stats(self):
self.build_stats()
stats = {
'hits_proxied': self.hits_proxied,
'hits_cached': self.hits_cached,
'num_resources': self.num_resources,
'cache_size': '%.2f' % (float(self.cache_size) / 1024 / 1024),
'rate_limit': self.rate_limit,
'rate_limit_remain': self.rate_limit_remain
}
return stats
def set_rate_limit(self, limit, remain):
pass
def set_rate_limit(limit, remain):
log.debug('Update rate-limit: %s / %s' % (remain, limit))
# Write values to cache
cache.set('%s_rate_limt' % CACHE_PREFIX, int(limit))
cache.set('%s_rate_limt_remain' % CACHE_PREFIX, int(remain))
pass
def set_hit(target):
if target in ('cache', 'backend'):
log.debug('Add %s hit' % target)
key = '%s_hits_%s' % (CACHE_PREFIX, target)
if cache.get(key):
cache.incr(key)
else:
cache.set(key, 1)
| hzlf/discogs-proxy | website/apps/dgsproxy/stats.py | Python | mit | 2,133 |
import os
import time
from datetime import date
import osgtest.library.core as core
import osgtest.library.files as files
import osgtest.library.osgunittest as osgunittest
import osgtest.library.service as service
class TestStartPBS(osgunittest.OSGTestCase):
pbs_config = """
create queue batch queue_type=execution
set queue batch started=true
set queue batch enabled=true
set queue batch resources_default.nodes=1
set queue batch resources_default.walltime=3600
set server default_queue=batch
set server keep_completed = 600
set server job_nanny = True
set server scheduling=true
set server acl_hosts += *
set server acl_host_enable = True
"""
required_rpms = ['torque',
'torque-mom',
'torque-server',
'torque-scheduler',
'torque-client', # for qmgr
'munge']
def test_01_start_mom(self):
core.state['pbs_mom.started-service'] = False
core.skip_ok_unless_installed(*self.required_rpms, by_dependency=True)
self.skip_ok_if(service.is_running('pbs_mom'), 'PBS mom already running')
core.config['torque.mom-config'] = '/var/lib/torque/mom_priv/config'
files.write(core.config['torque.mom-config'],
"$pbsserver %s\n" % core.get_hostname(),
owner='pbs')
core.config['torque.mom-layout'] = '/var/lib/torque/mom_priv/mom.layout'
files.write(core.config['torque.mom-layout'],
"nodes=0",
owner='pbs')
service.check_start('pbs_mom')
def test_02_start_pbs_sched(self):
core.state['pbs_sched.started-service'] = False
core.skip_ok_unless_installed(*self.required_rpms, by_dependency=True)
self.skip_ok_if(service.is_running('pbs_sched'), 'PBS sched already running')
service.check_start('pbs_sched')
def test_03_start_trqauthd(self):
core.state['trqauthd.started-service'] = False
core.config['torque.pbs-servername-file'] = '/var/lib/torque/server_name'
core.skip_ok_unless_installed(*self.required_rpms, by_dependency=True)
self.skip_ok_if(service.is_running('trqauthd'), 'trqauthd is already running')
# set hostname as servername instead of localhost
# config required before starting trqauthd
files.write(core.config['torque.pbs-servername-file'],
"%s" % core.get_hostname(),
owner='pbs')
service.check_start('trqauthd')
def test_04_configure_pbs(self):
core.config['torque.pbs-nodes-file'] = '/var/lib/torque/server_priv/nodes'
core.config['torque.pbs-serverdb'] = '/var/lib/torque/server_priv/serverdb'
core.skip_ok_unless_installed(*self.required_rpms, by_dependency=True)
self.skip_bad_unless(service.is_running('trqauthd'), 'pbs_server requires trqauthd')
self.skip_ok_if(service.is_running('pbs_server'), 'pbs server already running')
files.preserve(core.config['torque.pbs-serverdb'], 'pbs')
if not os.path.exists(core.config['torque.pbs-serverdb']):
command = ('/usr/sbin/pbs_server -d /var/lib/torque -t create -f && '
'sleep 10 && /usr/bin/qterm')
stdout, _, fail = core.check_system(command, 'create initial pbs serverdb config', shell=True)
self.assert_(stdout.find('error') == -1, fail)
# This gets wiped if we write it before the initial 'service pbs_server create'
# However, this file needs to be in place before the service is started so we
# restart the service after 'initial configuration'
files.write(core.config['torque.pbs-nodes-file'], # add the local node as a compute node
"%s np=1 num_node_boards=1\n" % core.get_hostname(),
owner='pbs')
def test_05_start_pbs(self):
core.state['pbs_server.started-service'] = False
core.state['torque.nodes-up'] = False
core.skip_ok_unless_installed(*self.required_rpms, by_dependency=True)
self.skip_bad_unless(service.is_running('trqauthd'), 'pbs_server requires trqauthd')
self.skip_ok_if(service.is_running('pbs_server'), 'pbs server already running')
server_log = '/var/log/torque/server_logs/' + date.today().strftime('%Y%m%d')
try:
server_log_stat = os.stat(server_log)
except OSError:
server_log_stat = None
service.check_start('pbs_server')
# Wait until the server is up before writing the rest of the config
core.monitor_file(server_log, server_log_stat, '.*Server Ready.*', 60.0)
core.check_system("echo '%s' | qmgr %s" % (self.pbs_config, core.get_hostname()),
"Configuring pbs server",
shell=True)
# wait up to 5 minutes for the server to recognize the node
start_time = time.time()
while (time.time() - start_time) < 600:
command = ('/usr/bin/qnodes', '-s', core.get_hostname())
stdout, _, fail = core.check_system(command, 'Get pbs node info')
self.assert_(stdout.find('error') == -1, fail)
if stdout.find('state = free'):
core.state['torque.nodes-up'] = True
break
if not core.state['torque.nodes-up']:
self.fail('PBS nodes not coming up')
| efajardo/osg-test | osgtest/tests/test_170_pbs.py | Python | apache-2.0 | 5,425 |
from django.dispatch import receiver
from django.core.urlresolvers import get_callable
from dbconnect.signals import connection_created_to
from signals import namespace_changed
from dbconnect.plugins.namespace.conf import settings
@receiver(connection_created_to)
def switch_namepsace(sender, alias=None, connection=None, wrapper=None, *args, **kwargs):
if not wrapper.vendor in settings.DBCONNECT_NAMESPACE_SUPPORTED_BACKENDS:
return
search_paths = get_callable(settings.DBCONNECT_NAMESPACE_ALIAS_MAPPER)(alias=alias, connection=connection, wrapper=wrapper)
if search_paths is None:
search_paths = []
else:
search_paths = list(search_paths)
if settings.DBCONNECT_NAMESPACE_FALLBACK_PATH:
search_paths = search_paths + list(settings.DBCONNECT_NAMESPACE_FALLBACK_PATH)
if not search_paths:
return
cursor = connection.cursor()
if settings.DBCONNECT_NAMESPACE_PRECHECK:
cursor.execute('SHOW search_path')
paths = cursor.fetchall()[0][0].split(',')
if paths == search_paths:
return
cursor.execute('SET search_path = %s;' % ','.join(['%s'] * len(search_paths)), search_paths)
namespace_changed.send(sender=sender, alias=alias, connection=connection, namespace=search_paths, **kwargs) | allanlei/django-dbconnect | dbconnect/plugins/namespace/models.py | Python | bsd-2-clause | 1,320 |
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import pecan
from barbican import api
from barbican.api import controllers
from barbican.common import hrefs
from barbican.common import resources as res
from barbican.common import utils
from barbican.common import validators
from barbican import i18n as u
from barbican.model import models
from barbican.model import repositories as repo
from barbican.queue import client as async_client
LOG = utils.getLogger(__name__)
def _order_not_found():
"""Throw exception indicating order not found."""
pecan.abort(404, u._('Not Found. Sorry but your order is in '
'another castle.'))
def _secret_not_in_order():
"""Throw exception that secret info is not available in the order."""
pecan.abort(400, u._("Secret metadata expected but not received."))
def _order_update_not_supported():
"""Throw exception that PUT operation is not supported for orders."""
pecan.abort(405, u._("Order update is not supported."))
def _order_update_not_supported_for_type(order_type):
"""Throw exception that update is not supported."""
pecan.abort(400, u._("Updates are not supported for order type "
"{0}.").format(order_type))
def _order_cannot_be_updated_if_not_pending(order_status):
"""Throw exception that order cannot be updated if not PENDING."""
pecan.abort(400, u._("Only PENDING orders can be updated. Order is in the"
"{0} state.").format(order_status))
def order_cannot_modify_order_type():
"""Throw exception that order type cannot be modified."""
pecan.abort(400, u._("Cannot modify order type."))
class OrderController(controllers.ACLMixin):
"""Handles Order retrieval and deletion requests."""
def __init__(self, order, queue_resource=None):
self.order = order
self.order_repo = repo.get_order_repository()
self.queue = queue_resource or async_client.TaskClient()
self.type_order_validator = validators.TypeOrderValidator()
@pecan.expose(generic=True)
def index(self, **kwargs):
pecan.abort(405) # HTTP 405 Method Not Allowed as default
@index.when(method='GET', template='json')
@controllers.handle_exceptions(u._('Order retrieval'))
@controllers.enforce_rbac('order:get')
def on_get(self, external_project_id):
return hrefs.convert_to_hrefs(self.order.to_dict_fields())
@index.when(method='PUT')
@controllers.handle_exceptions(u._('Order update'))
@controllers.enforce_rbac('order:put')
@controllers.enforce_content_types(['application/json'])
def on_put(self, external_project_id, **kwargs):
body = api.load_body(pecan.request,
validator=self.type_order_validator)
project = res.get_or_create_project(external_project_id)
order_type = body.get('type')
if self.order.type != order_type:
order_cannot_modify_order_type()
if models.OrderType.CERTIFICATE != self.order.type:
_order_update_not_supported_for_type(order_type)
if models.States.PENDING != self.order.status:
_order_cannot_be_updated_if_not_pending(self.order.status)
updated_meta = body.get('meta')
validators.validate_ca_id(project.id, updated_meta)
# TODO(chellygel): Put 'meta' into a separate order association
# entity.
self.queue.update_order(order_id=self.order.id,
project_id=external_project_id,
updated_meta=updated_meta)
@index.when(method='DELETE')
@utils.allow_all_content_types
@controllers.handle_exceptions(u._('Order deletion'))
@controllers.enforce_rbac('order:delete')
def on_delete(self, external_project_id, **kwargs):
self.order_repo.delete_entity_by_id(
entity_id=self.order.id,
external_project_id=external_project_id)
class OrdersController(controllers.ACLMixin):
"""Handles Order requests for Secret creation."""
def __init__(self, queue_resource=None):
LOG.debug('Creating OrdersController')
self.order_repo = repo.get_order_repository()
self.queue = queue_resource or async_client.TaskClient()
self.type_order_validator = validators.TypeOrderValidator()
@pecan.expose()
def _lookup(self, order_id, *remainder):
# NOTE(jaosorior): It's worth noting that even though this section
# actually does a lookup in the database regardless of the RBAC policy
# check, the execution only gets here if authentication of the user was
# previously successful.
controllers.assert_is_valid_uuid_from_uri(order_id)
ctx = controllers._get_barbican_context(pecan.request)
order = self.order_repo.get(entity_id=order_id,
external_project_id=ctx.project,
suppress_exception=True)
if not order:
_order_not_found()
return OrderController(order, self.order_repo), remainder
@pecan.expose(generic=True)
def index(self, **kwargs):
pecan.abort(405) # HTTP 405 Method Not Allowed as default
@index.when(method='GET', template='json')
@controllers.handle_exceptions(u._('Order(s) retrieval'))
@controllers.enforce_rbac('orders:get')
def on_get(self, external_project_id, **kw):
LOG.debug('Start orders on_get '
'for project-ID %s:', external_project_id)
result = self.order_repo.get_by_create_date(
external_project_id, offset_arg=kw.get('offset', 0),
limit_arg=kw.get('limit', None), meta_arg=kw.get('meta', None),
suppress_exception=True)
orders, offset, limit, total = result
if not orders:
orders_resp_overall = {'orders': [],
'total': total}
else:
orders_resp = [
hrefs.convert_to_hrefs(o.to_dict_fields())
for o in orders
]
orders_resp_overall = hrefs.add_nav_hrefs('orders',
offset, limit, total,
{'orders': orders_resp})
orders_resp_overall.update({'total': total})
return orders_resp_overall
@index.when(method='PUT', template='json')
@controllers.handle_exceptions(u._('Order update'))
@controllers.enforce_rbac('orders:put')
def on_put(self, external_project_id, **kwargs):
_order_update_not_supported()
@index.when(method='POST', template='json')
@controllers.handle_exceptions(u._('Order creation'))
@controllers.enforce_rbac('orders:post')
@controllers.enforce_content_types(['application/json'])
def on_post(self, external_project_id, **kwargs):
project = res.get_or_create_project(external_project_id)
body = api.load_body(pecan.request,
validator=self.type_order_validator)
order_type = body.get('type')
order_meta = body.get('meta')
request_type = order_meta.get('request_type')
LOG.debug('Processing order type %s, request type %s',
order_type, request_type)
if order_type == models.OrderType.CERTIFICATE:
validators.validate_ca_id(project.id, body.get('meta'))
if request_type == 'stored-key':
container_ref = order_meta.get('container_ref')
validators.validate_stored_key_rsa_container(
external_project_id,
container_ref, pecan.request)
new_order = models.Order()
new_order.meta = body.get('meta')
new_order.type = order_type
new_order.project_id = project.id
ctxt = controllers._get_barbican_context(pecan.request)
if ctxt:
new_order.creator_id = ctxt.user
self.order_repo.create_from(new_order)
# Grab our id before commit due to obj expiration from sqlalchemy
order_id = new_order.id
# Force commit to avoid async issues with the workers
repo.commit()
self.queue.process_type_order(order_id=order_id,
project_id=external_project_id)
url = hrefs.convert_order_to_href(order_id)
pecan.response.status = 202
pecan.response.headers['Location'] = url
return {'order_ref': url}
| MCDong/barbican | barbican/api/controllers/orders.py | Python | apache-2.0 | 9,049 |
import unittest
import shutil
import tempfile
import os.path
import sqlite3
import createrepo_c as cr
from fixtures import *
class TestCaseSqlite(unittest.TestCase):
def setUp(self):
self.tmpdir = tempfile.mkdtemp(prefix="createrepo_ctest-")
def tearDown(self):
shutil.rmtree(self.tmpdir)
def test_sqlite_basic_operations(self):
db_pri = cr.Sqlite(self.tmpdir+"/primary.db", cr.DB_PRIMARY)
self.assertTrue(db_pri)
self.assertTrue(os.path.isfile(self.tmpdir+"/primary.db"))
db_pri = cr.PrimarySqlite(self.tmpdir+"/primary2.db")
self.assertTrue(db_pri)
self.assertTrue(os.path.isfile(self.tmpdir+"/primary2.db"))
db_fil = cr.Sqlite(self.tmpdir+"/filelists.db", cr.DB_FILELISTS)
self.assertTrue(db_fil)
self.assertTrue(os.path.isfile(self.tmpdir+"/filelists.db"))
db_fil = cr.FilelistsSqlite(self.tmpdir+"/filelists2.db")
self.assertTrue(db_fil)
self.assertTrue(os.path.isfile(self.tmpdir+"/filelists2.db"))
db_oth = cr.Sqlite(self.tmpdir+"/other.db", cr.DB_OTHER)
self.assertTrue(db_oth)
self.assertTrue(os.path.isfile(self.tmpdir+"/other.db"))
db_oth = cr.OtherSqlite(self.tmpdir+"/other2.db")
self.assertTrue(db_oth)
self.assertTrue(os.path.isfile(self.tmpdir+"/other2.db"))
def test_sqlite_error_cases(self):
self.assertRaises(cr.CreaterepoCError, cr.Sqlite, self.tmpdir, cr.DB_PRIMARY)
self.assertRaises(ValueError, cr.Sqlite, self.tmpdir+"/foo.db", 55)
self.assertRaises(TypeError, cr.Sqlite, self.tmpdir+"/foo.db", None)
self.assertRaises(TypeError, cr.Sqlite, None, cr.DB_PRIMARY)
def test_sqlite_operations_on_closed_db(self):
pkg = cr.package_from_rpm(PKG_ARCHER_PATH)
path = os.path.join(self.tmpdir, "primary.db")
db = cr.Sqlite(path, cr.DB_PRIMARY)
self.assertTrue(db)
db.close()
self.assertRaises(cr.CreaterepoCError, db.add_pkg, pkg)
self.assertRaises(cr.CreaterepoCError, db.dbinfo_update, "somechecksum")
db.close() # No error shoud be raised
del db # No error shoud be raised
def test_sqlite_primary_schema(self):
path = os.path.join(self.tmpdir, "primary.db")
cr.PrimarySqlite(path)
self.assertTrue(os.path.isfile(path))
con = sqlite3.connect(path)
# Check tables
self.assertEqual(con.execute("""select name from sqlite_master where type="table";""").fetchall(),
[(u'db_info',),
(u'packages',),
(u'files',),
(u'requires',),
(u'provides',),
(u'conflicts',),
(u'obsoletes',),
(u'suggests',),
(u'enhances',),
(u'recommends',),
(u'supplements',),
])
# Check indexes
self.assertEqual(con.execute("""select name from sqlite_master where type="index";""").fetchall(),
[(u'packagename',),
(u'packageId',),
(u'filenames',),
(u'pkgfiles',),
(u'pkgrequires',),
(u'requiresname',),
(u'pkgprovides',),
(u'providesname',),
(u'pkgconflicts',),
(u'pkgobsoletes',),
(u'pkgsuggests',),
(u'pkgenhances',),
(u'pkgrecommends',),
(u'pkgsupplements',),
])
# Check triggers
self.assertEqual(con.execute("""select name from sqlite_master where type="trigger";""").fetchall(),
[(u'removals',)])
def test_sqlite_filelists_schema(self):
path = os.path.join(self.tmpdir, "filelists.db")
cr.FilelistsSqlite(path)
self.assertTrue(os.path.isfile(path))
con = sqlite3.connect(path)
# Check tables
self.assertEqual(con.execute("""select name from sqlite_master where type="table";""").fetchall(),
[(u'db_info',), (u'packages',), (u'filelist',)])
# Check indexes
self.assertEqual(con.execute("""select name from sqlite_master where type="index";""").fetchall(),
[(u'keyfile',), (u'pkgId',), (u'dirnames',)])
# Check triggers
self.assertEqual(con.execute("""select name from sqlite_master where type="trigger";""").fetchall(),
[(u'remove_filelist',)])
def test_sqlite_other_schema(self):
path = os.path.join(self.tmpdir, "other.db")
cr.OtherSqlite(path)
self.assertTrue(os.path.isfile(path))
con = sqlite3.connect(path)
# Check tables
self.assertEqual(con.execute("""select name from sqlite_master where type="table";""").fetchall(),
[(u'db_info',), (u'packages',), (u'changelog',)])
# Check indexes
self.assertEqual(con.execute("""select name from sqlite_master where type="index";""").fetchall(),
[(u'keychange',), (u'pkgId',)])
# Check triggers
self.assertEqual(con.execute("""select name from sqlite_master where type="trigger";""").fetchall(),
[(u'remove_changelogs',)])
def test_sqlite_primary(self):
path = os.path.join(self.tmpdir, "primary.db")
db = cr.Sqlite(path, cr.DB_PRIMARY)
pkg = cr.package_from_rpm(PKG_ARCHER_PATH)
db.add_pkg(pkg)
self.assertRaises(TypeError, db.add_pkg, None)
self.assertRaises(TypeError, db.add_pkg, 123)
self.assertRaises(TypeError, db.add_pkg, "foo")
db.dbinfo_update("somechecksum")
self.assertRaises(TypeError, db.dbinfo_update, pkg)
self.assertRaises(TypeError, db.dbinfo_update, None)
self.assertRaises(TypeError, db.dbinfo_update, 123)
db.close()
self.assertTrue(os.path.isfile(path))
con = sqlite3.connect(path)
# Check packages table
res = con.execute("select * from packages").fetchall()
self.assertEqual(res,
[(1, u'4e0b775220c67f0f2c1fd2177e626b9c863a098130224ff09778ede25cea9a9e',
u'Archer', u'x86_64', u'3.4.5', u'2', u'6', u'Complex package.',
u'Archer package', u'http://soo_complex_package.eu/',
res[0][10], 1365416480, u'GPL', u'ISIS', u'Development/Tools',
u'localhost.localdomain', u'Archer-3.4.5-6.src.rpm', 280, 2865,
u'Sterling Archer', 3101, 0, 544, None, None, u'sha256')])
# Check provides table
self.assertEqual(con.execute("select * from provides").fetchall(),
[(u'bara', u'LE', u'0', u'22', None, 1),
(u'barb', u'GE', u'0', u'11.22.33', u'44', 1),
(u'barc', u'EQ', u'0', u'33', None, 1),
(u'bard', u'LT', u'0', u'44', None, 1),
(u'bare', u'GT', u'0', u'55', None, 1),
(u'Archer', u'EQ', u'2', u'3.4.5', u'6', 1),
(u'Archer(x86-64)', u'EQ', u'2', u'3.4.5', u'6', 1)])
# Check conflicts table
self.assertEqual(con.execute("select * from conflicts").fetchall(),
[(u'bba', u'LE', u'0', u'2222', None, 1),
(u'bbb', u'GE', u'0', u'1111.2222.3333', u'4444', 1),
(u'bbc', u'EQ', u'0', u'3333', None, 1),
(u'bbd', u'LT', u'0', u'4444', None, 1),
(u'bbe', u'GT', u'0', u'5555', None, 1)])
# Check obsoletes table
self.assertEqual(con.execute("select * from obsoletes").fetchall(),
[(u'aaa', u'LE', u'0', u'222', None, 1),
(u'aab', u'GE', u'0', u'111.2.3', u'4', 1),
(u'aac', u'EQ', u'0', u'333', None, 1),
(u'aad', u'LT', u'0', u'444', None, 1),
(u'aae', u'GT', u'0', u'555', None, 1)])
# Check requires table
self.assertEqual(con.execute("select * from requires").fetchall(),
[(u'fooa', u'LE', u'0', u'2', None, 1, u'FALSE'),
(u'foob', u'GE', u'0', u'1.0.0', u'1', 1, u'FALSE'),
(u'fooc', u'EQ', u'0', u'3', None, 1, u'FALSE'),
(u'food', u'LT', u'0', u'4', None, 1, u'FALSE'),
(u'fooe', u'GT', u'0', u'5', None, 1, u'FALSE'),
(u'foof', u'EQ', u'0', u'6', None, 1, u'TRUE')])
# Check files table
self.assertEqual(con.execute("select * from files").fetchall(),
[(u'/usr/bin/complex_a', u'file', 1)])
# Check db_info table
self.assertEqual(con.execute("select * from db_info").fetchall(),
[(10, u'somechecksum')])
def test_sqlite_filelists(self):
path = os.path.join(self.tmpdir, "filelists.db")
db = cr.Sqlite(path, cr.DB_FILELISTS)
pkg = cr.package_from_rpm(PKG_ARCHER_PATH)
db.add_pkg(pkg)
db.dbinfo_update("somechecksum2")
db.close()
self.assertTrue(os.path.isfile(path))
con = sqlite3.connect(path)
# Check packages table
self.assertEqual(con.execute("select * from packages").fetchall(),
[(1, u'4e0b775220c67f0f2c1fd2177e626b9c863a098130224ff09778ede25cea9a9e')])
# Check files table
self.assertEqual(set(con.execute("select * from filelist").fetchall()),
set([(1, u'/usr/share/doc', u'Archer-3.4.5', u'd'),
(1, u'/usr/bin', u'complex_a', u'f'),
(1, u'/usr/share/doc/Archer-3.4.5', u'README', u'f')]))
# Check db_info table
self.assertEqual(con.execute("select * from db_info").fetchall(),
[(10, u'somechecksum2')])
def test_sqlite_other(self):
path = os.path.join(self.tmpdir, "other.db")
db = cr.Sqlite(path, cr.DB_FILELISTS)
pkg = cr.package_from_rpm(PKG_ARCHER_PATH)
db.add_pkg(pkg)
db.dbinfo_update("somechecksum3")
db.close()
self.assertTrue(os.path.isfile(path))
con = sqlite3.connect(path)
# Check packages table
self.assertEqual(con.execute("select * from packages").fetchall(),
[(1, u'4e0b775220c67f0f2c1fd2177e626b9c863a098130224ff09778ede25cea9a9e')])
# Check filelist table
self.assertEqual(set(con.execute("select * from filelist").fetchall()),
set([(1, u'/usr/share/doc', u'Archer-3.4.5', u'd'),
(1, u'/usr/bin', u'complex_a', u'f'),
(1, u'/usr/share/doc/Archer-3.4.5', u'README', u'f')]))
# Check db_info table
self.assertEqual(con.execute("select * from db_info").fetchall(),
[(10, u'somechecksum3')])
| lmacken/createrepo_c | tests/python/tests/test_sqlite.py | Python | gpl-2.0 | 10,463 |
#!/usr/bin/python2.4
#
# Copyright (C) 2009 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Unit tests for the wavelet module."""
import unittest
import blip
import element
import ops
import wavelet
import waveservice
import simplejson
import testdata
class TestWavelet(unittest.TestCase):
"""Tests the wavelet class."""
def setUp(self):
self.waveservice = waveservice.WaveService()
def testWaveletProperties(self):
operation_queue = ops.OperationQueue()
TEST_DATA = simplejson.loads(testdata.json_string)
w = self.waveservice._wavelet_from_json(TEST_DATA,
operation_queue)
self.assertEquals(TEST_DATA['wavelet']['waveId'], w.wave_id)
self.assertEquals(TEST_DATA['wavelet']['rootThread']['id'],
w.root_thread.id)
self.assertEquals(TEST_DATA['wavelet']['rootThread']['location'],
w.root_thread.location)
self.assertEquals(len(TEST_DATA['wavelet']['rootThread']['blipIds']),
len(w.root_thread.blips))
b = w.root_blip
self.assertEquals(len(TEST_DATA['blips']['b+IvD7RCuWB']['replyThreadIds']),
len(b.reply_threads))
def testWaveletBlipMethods(self):
operation_queue = ops.OperationQueue()
TEST_DATA = simplejson.loads(testdata.json_string)
w = self.waveservice._wavelet_from_json(TEST_DATA,
operation_queue)
root_blip = w.root_blip
blip = root_blip.continue_thread()
self.assertEquals(blip.parent_blip_id, root_blip.parent_blip_id)
self.assertEquals(8, len(w.blips))
self.assertEquals(4, len(w.root_thread.blips))
if __name__ == '__main__':
unittest.main()
| frickler/WaveBots | waveapi/waveservice_test.py | Python | apache-2.0 | 2,240 |
import datetime
import json
import os
from flask import Flask, abort, g, render_template, session, request
from flask_bootstrap import Bootstrap
from flask_login import LoginManager, current_user
from flask_mail import Mail
from flask_moment import Moment
from flask_sqlalchemy import SQLAlchemy
from config import config
bootstrap = Bootstrap()
moment = Moment()
db = SQLAlchemy()
mail = Mail()
login_manager = LoginManager()
login_manager.login_view = 'auth.login'
def create_app(config_name):
app = Flask(__name__)
app.config.from_object(config[config_name])
config[config_name].init_app(app)
bootstrap.init_app(app)
moment.init_app(app)
db.init_app(app)
mail.init_app(app)
login_manager.init_app(app)
if app.config['USE_SAML']:
login_manager.login_view = 'auth.saml'
else:
login_manager.login_view = 'auth.login'
from .main import main
app.register_blueprint(main)
from .auth import auth
app.register_blueprint(auth, url_prefix="/auth")
@app.before_request
def before_request():
app.permanent_session_lifetime = datetime.timedelta(minutes=15)
session.permanent = True
session.modified = True
g.user = current_user
@app.errorhandler(503)
def maintenance(e):
with open(os.path.join(app.instance_path, 'maintenance.json')) as f:
maintenance_info = json.load(f)
return render_template('error/maintenance.html',
description=maintenance_info['description'],
outage_time=maintenance_info['outage_time'])
@app.before_request
def check_maintenance_mode():
if os.path.exists(os.path.join(app.instance_path, 'maintenance.json')):
if not request.cookies.get('authorized_maintainer', None):
return abort(503)
return app
| nycrecords/intranet | app/__init__.py | Python | mit | 1,884 |
from StrongHold import WeightedNewick
with open('data/data.dat') as input_data:
trees = [line.split('\n') for line in input_data.read().strip().split('\n\n')]
# The majority of the work is done by the Weighted Newick class in the Data Structures script.
distances = [str(WeightedNewick(tree[0]).distance(*tree[1].split())) for tree in trees]
# Print and save the answer.
print ' '.join(distances) | crf1111/Bio-Informatics-Learning | Bio-StrongHold/src/Newick_Format_with_Edge_Weights.py | Python | mit | 400 |
"""Automatic keyboard layout switcher"""
import functools
import logging
import subprocess
from typing import Iterable
from typing import Set
import xkbgroup
import swytcher.settings as settings
import swytcher.xwindow as xwindow
from swytcher.util import suppress_err
log = logging.getLogger(__name__) # pylint: disable=invalid-name
# Move this to swytcher.system
@suppress_err(FileNotFoundError, log)
def notify(title: str, msg: str = '') -> None: # pragma: no cover
"""Use notify-send (if available) to inform user of layout switch."""
if not settings.NOTIFY:
return
cmd = [
'notify-send',
'--urgency=low',
'--expire-time=2000',
title,
msg
]
subprocess.call(cmd)
def change_layout(xkb: xkbgroup.XKeyboard, layout: str) -> bool:
"""Set layout; returns True if layout was changed, False otherwise"""
if xkb.group_name == layout: # check against current layout
log.debug("%r is already the active layout", layout)
return False # don't change layout if it's already correct
log.info("setting layout %r", layout)
xkb.group_name = layout
notify("Changed layout", layout)
return True
def _match_substrings(name_list: Iterable[str],
substrings: Iterable[str]) -> set:
"""Substring filter match"""
found_matches = set()
for name in name_list:
for substring in substrings:
if substring in name:
log.debug("Substring filter match: %r in %r", substring, name)
found_matches.update([name])
return found_matches
def matches(name_list: Iterable[str], strings: Iterable[str],
substrings: Iterable[str]) -> Set[str]:
"""Returns True if any of the strings in the two filters `strings` and
`substrings` occur in `name_list`."""
matched = (set(strings) & set(name_list) or
_match_substrings(name_list, substrings or {}))
if matched:
log.debug('%r matched %r from %r or %r',
name_list, matched, strings, substrings)
return matched
def change_callback(name_list, xkb, layouts: list) -> None: # pragma: no cover
"""Event handler when active window is changed"""
# NOTE: These extracted variables should be removed later
primary_filter = layouts[0]['strings']
primary_substrings = layouts[0]['substrings']
primary = layouts[0]['name']
secondary_filter = layouts[1]['strings']
secondary_substrings = layouts[1]['substrings']
secondary = layouts[1]['name']
# matched_layout = match_layout(name_list, layouts)
# if matched_layout:
# change_layout(xkb, matched_layout)
# else:
# change_layout(xkb, last_remembered_layout_for_window)
if matches(name_list, secondary_filter, secondary_substrings):
change_layout(xkb, secondary)
elif matches(name_list, primary_filter, primary_substrings):
change_layout(xkb, primary)
else:
log.debug("%r: No match, using default layout", name_list)
change_layout(xkb, xkb.groups_names[0])
def main(args=None): # pragma: no cover
"""Main"""
if not args:
pass
xkb = xkbgroup.XKeyboard()
layouts = settings.setup_layouts(xkb, settings.CONFIG_INI)
log.info("Layouts configured by setxkbmap: %s", layouts)
partial_cb = functools.partial(change_callback, xkb=xkb, layouts=layouts)
xwindow.run(partial_cb)
| eddie-dunn/swytcher | swytcher/swytcher.py | Python | mit | 3,436 |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""
Module: preprocess_data
Reference: https://github.com/rizkiarm/LipNet
"""
# pylint: disable=too-many-locals, no-self-use, c-extension-no-member
import os
import fnmatch
import errno
import numpy as np
from scipy import ndimage
from scipy.misc import imresize
from skimage import io
import skvideo.io
import dlib
def mkdir_p(path):
"""
Make a directory
"""
try:
os.makedirs(path)
except OSError as exc: # Python >2.5
if exc.errno == errno.EEXIST and os.path.isdir(path):
pass
else:
raise
def find_files(directory, pattern):
"""
Find files
"""
for root, _, files in os.walk(directory):
for basename in files:
if fnmatch.fnmatch(basename, pattern):
filename = os.path.join(root, basename)
yield filename
class Video(object):
"""
Preprocess for Video
"""
def __init__(self, vtype='mouth', face_predictor_path=None):
if vtype == 'face' and face_predictor_path is None:
raise AttributeError('Face video need to be accompanied with face predictor')
self.face_predictor_path = face_predictor_path
self.vtype = vtype
self.face = None
self.mouth = None
self.data = None
self.length = None
def from_frames(self, path):
"""
Read from frames
"""
frames_path = sorted([os.path.join(path, x) for x in os.listdir(path)])
frames = [ndimage.imread(frame_path) for frame_path in frames_path]
self.handle_type(frames)
return self
def from_video(self, path):
"""
Read from videos
"""
frames = self.get_video_frames(path)
self.handle_type(frames)
return self
def from_array(self, frames):
"""
Read from array
"""
self.handle_type(frames)
return self
def handle_type(self, frames):
"""
Config video types
"""
if self.vtype == 'mouth':
self.process_frames_mouth(frames)
elif self.vtype == 'face':
self.process_frames_face(frames)
else:
raise Exception('Video type not found')
def process_frames_face(self, frames):
"""
Preprocess from frames using face detector
"""
detector = dlib.get_frontal_face_detector()
predictor = dlib.shape_predictor(self.face_predictor_path)
mouth_frames = self.get_frames_mouth(detector, predictor, frames)
self.face = np.array(frames)
self.mouth = np.array(mouth_frames)
if mouth_frames[0] is not None:
self.set_data(mouth_frames)
def process_frames_mouth(self, frames):
"""
Preprocess from frames using mouth detector
"""
self.face = np.array(frames)
self.mouth = np.array(frames)
self.set_data(frames)
def get_frames_mouth(self, detector, predictor, frames):
"""
Get frames using mouth crop
"""
mouth_width = 100
mouth_height = 50
horizontal_pad = 0.19
normalize_ratio = None
mouth_frames = []
for frame in frames:
dets = detector(frame, 1)
shape = None
for det in dets:
shape = predictor(frame, det)
i = -1
if shape is None: # Detector doesn't detect face, just return None
return [None]
mouth_points = []
for part in shape.parts():
i += 1
if i < 48: # Only take mouth region
continue
mouth_points.append((part.x, part.y))
np_mouth_points = np.array(mouth_points)
mouth_centroid = np.mean(np_mouth_points[:, -2:], axis=0)
if normalize_ratio is None:
mouth_left = np.min(np_mouth_points[:, :-1]) * (1.0 - horizontal_pad)
mouth_right = np.max(np_mouth_points[:, :-1]) * (1.0 + horizontal_pad)
normalize_ratio = mouth_width / float(mouth_right - mouth_left)
new_img_shape = (int(frame.shape[0] * normalize_ratio),
int(frame.shape[1] * normalize_ratio))
resized_img = imresize(frame, new_img_shape)
mouth_centroid_norm = mouth_centroid * normalize_ratio
mouth_l = int(mouth_centroid_norm[0] - mouth_width / 2)
mouth_r = int(mouth_centroid_norm[0] + mouth_width / 2)
mouth_t = int(mouth_centroid_norm[1] - mouth_height / 2)
mouth_b = int(mouth_centroid_norm[1] + mouth_height / 2)
mouth_crop_image = resized_img[mouth_t:mouth_b, mouth_l:mouth_r]
mouth_frames.append(mouth_crop_image)
return mouth_frames
def get_video_frames(self, path):
"""
Get video frames
"""
videogen = skvideo.io.vreader(path)
frames = np.array([frame for frame in videogen])
return frames
def set_data(self, frames):
"""
Prepare the input of model
"""
data_frames = []
for frame in frames:
#frame H x W x C
frame = frame.swapaxes(0, 1) # swap width and height to form format W x H x C
if len(frame.shape) < 3:
frame = np.array([frame]).swapaxes(0, 2).swapaxes(0, 1) # Add grayscale channel
data_frames.append(frame)
frames_n = len(data_frames)
data_frames = np.array(data_frames) # T x W x H x C
data_frames = np.rollaxis(data_frames, 3) # C x T x W x H
data_frames = data_frames.swapaxes(2, 3) # C x T x H x W = NCDHW
self.data = data_frames
self.length = frames_n
def preprocess(from_idx, to_idx, _params):
"""
Preprocess: Convert a video into the mouth images
"""
source_exts = '*.mpg'
src_path = _params['src_path']
tgt_path = _params['tgt_path']
face_predictor_path = './shape_predictor_68_face_landmarks.dat'
succ = set()
fail = set()
for idx in range(from_idx, to_idx):
s_id = 's' + str(idx) + '/'
source_path = src_path + '/' + s_id
target_path = tgt_path + '/' + s_id
fail_cnt = 0
for filepath in find_files(source_path, source_exts):
print("Processing: {}".format(filepath))
filepath_wo_ext = os.path.splitext(filepath)[0].split('/')[-2:]
target_dir = os.path.join(tgt_path, '/'.join(filepath_wo_ext))
if os.path.exists(target_dir):
continue
try:
video = Video(vtype='face', \
face_predictor_path=face_predictor_path).from_video(filepath)
mkdir_p(target_dir)
i = 0
if video.mouth[0] is None:
continue
for frame in video.mouth:
io.imsave(os.path.join(target_dir, "mouth_{0:03d}.png".format(i)), frame)
i += 1
except ValueError as error:
print(error)
fail_cnt += 1
if fail_cnt == 0:
succ.add(idx)
else:
fail.add(idx)
return (succ, fail)
if __name__ == '__main__':
import argparse
from multi import multi_p_run, put_worker
PARSER = argparse.ArgumentParser()
PARSER.add_argument('--src_path', type=str, default='../data/mp4s')
PARSER.add_argument('--tgt_path', type=str, default='../data/datasets')
PARSER.add_argument('--n_process', type=int, default=1)
CONFIG = PARSER.parse_args()
N_PROCESS = CONFIG.n_process
PARAMS = {'src_path':CONFIG.src_path,
'tgt_path':CONFIG.tgt_path}
os.makedirs('{tgt_path}'.format(tgt_path=PARAMS['tgt_path']), exist_ok=True)
if N_PROCESS == 1:
RES = preprocess(0, 35, PARAMS)
else:
RES = multi_p_run(35, put_worker, preprocess, PARAMS, N_PROCESS)
| dmlc/mxnet | example/gluon/lipnet/utils/preprocess_data.py | Python | apache-2.0 | 8,781 |
# This file is part of Indico.
# Copyright (C) 2002 - 2017 European Organization for Nuclear Research (CERN).
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# Indico is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Indico; if not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
from io import BytesIO
from itertools import ifilter
import icalendar as ical
from flask import session
from lxml import html
from lxml.etree import ParserError
from pyatom import AtomFeed
from sqlalchemy.orm import joinedload, load_only, subqueryload, undefer
from werkzeug.urls import url_parse
from indico.core.config import config
from indico.modules.categories import Category
from indico.modules.events import Event
from indico.util.date_time import now_utc
def serialize_categories_ical(category_ids, user, event_filter=True, event_filter_fn=None, update_query=None):
"""Export the events in a category to iCal
:param category_ids: Category IDs to export
:param user: The user who needs to be able to access the events
:param event_filter: A SQLalchemy criterion to restrict which
events will be returned. Usually something
involving the start/end date of the event.
:param event_filter_fn: A callable that determines which events to include (after querying)
:param update_query: A callable that can update the query used to retrieve the events.
Must return the updated query object.
"""
own_room_strategy = joinedload('own_room')
own_room_strategy.load_only('building', 'floor', 'number', 'name')
own_room_strategy.lazyload('owner')
own_venue_strategy = joinedload('own_venue').load_only('name')
query = (Event.query
.filter(Event.category_chain_overlaps(category_ids),
~Event.is_deleted,
event_filter)
.options(load_only('id', 'category_id', 'start_dt', 'end_dt', 'title', 'description', 'own_venue_name',
'own_room_name', 'protection_mode', 'access_key'),
subqueryload('acl_entries'),
joinedload('person_links'),
own_room_strategy,
own_venue_strategy)
.order_by(Event.start_dt))
if update_query:
query = update_query(query)
it = iter(query)
if event_filter_fn:
it = ifilter(event_filter_fn, it)
events = list(it)
# make sure the parent categories are in sqlalchemy's identity cache.
# this avoids query spam from `protection_parent` lookups
_parent_categs = (Category._get_chain_query(Category.id.in_({e.category_id for e in events}))
.options(load_only('id', 'parent_id', 'protection_mode'),
joinedload('acl_entries'))
.all())
cal = ical.Calendar()
cal.add('version', '2.0')
cal.add('prodid', '-//CERN//INDICO//EN')
now = now_utc(False)
for event in events:
if not event.can_access(user):
continue
location = ('{} ({})'.format(event.room_name, event.venue_name)
if event.venue_name and event.room_name
else (event.venue_name or event.room_name))
cal_event = ical.Event()
cal_event.add('uid', u'indico-event-{}@{}'.format(event.id, url_parse(config.BASE_URL).host))
cal_event.add('dtstamp', now)
cal_event.add('dtstart', event.start_dt)
cal_event.add('dtend', event.end_dt)
cal_event.add('url', event.external_url)
cal_event.add('summary', event.title)
cal_event.add('location', location)
description = []
if event.person_links:
speakers = [u'{} ({})'.format(x.full_name, x.affiliation) if x.affiliation else x.full_name
for x in event.person_links]
description.append(u'Speakers: {}'.format(u', '.join(speakers)))
if event.description:
desc_text = unicode(event.description) or u'<p/>' # get rid of RichMarkup
try:
description.append(unicode(html.fromstring(desc_text).text_content()))
except ParserError:
# this happens e.g. if desc_text contains only a html comment
pass
description.append(event.external_url)
cal_event.add('description', u'\n'.join(description))
cal.add_component(cal_event)
return BytesIO(cal.to_ical())
def serialize_category_atom(category, url, user, event_filter):
"""Export the events in a category to Atom
:param category: The category to export
:param url: The URL of the feed
:param user: The user who needs to be able to access the events
:param event_filter: A SQLalchemy criterion to restrict which
events will be returned. Usually something
involving the start/end date of the event.
"""
query = (Event.query
.filter(Event.category_chain_overlaps(category.id),
~Event.is_deleted,
event_filter)
.options(load_only('id', 'category_id', 'start_dt', 'title', 'description', 'protection_mode',
'access_key'),
subqueryload('acl_entries'))
.order_by(Event.start_dt))
events = [e for e in query if e.can_access(user)]
feed = AtomFeed(feed_url=url, title='Indico Feed [{}]'.format(category.title))
for event in events:
feed.add(title=event.title,
summary=unicode(event.description), # get rid of RichMarkup
url=event.external_url,
updated=event.start_dt)
return BytesIO(feed.to_string().encode('utf-8'))
def serialize_category(category, with_favorite=False, with_path=False, parent_path=None, child_path=None):
data = {
'id': category.id,
'title': category.title,
'is_protected': category.is_protected,
'has_events': category.has_events,
'deep_category_count': category.deep_children_count,
'deep_event_count': category.deep_events_count,
'can_access': category.can_access(session.user),
'can_create_events': category.can_create_events(session.user),
}
if with_path:
if child_path:
data['path'] = child_path[:]
for __ in reversed(child_path):
data['path'].pop()
if not data['path'] or data['path'][-1]['id'] == category.id:
break
elif parent_path:
data['path'] = parent_path[:]
data['path'].append({'id': category.id, 'title': category.title})
else:
data['path'] = category.chain
data['parent_path'] = data['path'][:-1]
if with_favorite:
data['is_favorite'] = session.user and category in session.user.favorite_categories
return data
def serialize_category_chain(category, include_children=False, include_parents=False):
data = {'category': serialize_category(category, with_path=True)}
if include_children:
data['subcategories'] = [serialize_category(c, with_path=True, parent_path=data['category']['path'])
for c in category.children]
if include_parents:
query = (category.parent_chain_query
.options(undefer('deep_events_count'), undefer('deep_children_count')))
data['supercategories'] = [serialize_category(c, with_path=True, child_path=data['category']['path'])
for c in query]
return data
| nop33/indico | indico/modules/categories/serialize.py | Python | gpl-3.0 | 8,146 |
"""
The basic iterator-classes for iterating over the blocks and transactions of
the blockchain.
"""
from collections import deque
from sortedcontainers import SortedList
from .defs import GENESIS_PREV_BLOCK_HASH, HEIGHT_SAFETY_MARGIN
from .misc import hash_hex_to_bytes, FilePos, Bunch
from .rawfiles import RawDataIterator
from .block import StoredBlock, deserialize_block
from .loggers import logger
################################################################################
# filtering
class BlockFilter:
"""
Represents start/stop criteria for blocks to include, based on height,
timestamp, and specific block identified by its hash.
"Start" is inclusive, "stop" is exclusive.
:note: Block timestamp is approximate. Blocks are not strictly ordered by timestamp.
"""
def __init__(self,
start_block_height = None, stop_block_height = None,
start_block_time = None, stop_block_time = None,
start_block_hash = None, stop_block_hash = None,
):
if start_block_height is not None or stop_block_height is not None:
self.block_height = ( start_block_height, stop_block_height )
else:
self.block_height = None
if start_block_time is not None or stop_block_time is not None:
self.block_time = ( start_block_time, stop_block_time )
else:
self.block_time = None
if start_block_hash is not None or stop_block_hash is not None:
# str to bytes
start_block_hash = hash_hex_to_bytes(start_block_hash) if isinstance(start_block_hash, str) else start_block_hash
stop_block_hash = hash_hex_to_bytes(stop_block_hash) if isinstance(stop_block_hash, str) else stop_block_hash
self.block_hash = ( start_block_hash, stop_block_hash )
else:
self.block_hash = None
def check_block(self, block, is_started):
"""
:return: True if need to include, False if need to exclude (i.e. before "start")
:raise: StopIteration if need to break (i.e. after "stop")
"""
if self.block_height is not None:
if not self._check(block.height, self.block_height, is_started):
return False
if self.block_time is not None:
if not self._check(block.timestamp, self.block_time, is_started):
return False
if self.block_hash is not None:
if not self._check(block.block_hash, self.block_hash, is_started, is_ordered = False):
return False
return True
def _check(self, value, boundaries, is_started, is_ordered = True):
# True, False, or raise StopIteration
start, stop = boundaries
if start is not None and not is_started:
# check if should start
if is_ordered:
if value < start:
# before the start
return False
else:
if value != start:
# before the start (haven't seen "start" yet)
return False
if stop is not None and is_started:
# check if should stop (note: stop is exclusive)
if is_ordered:
if value >= stop:
# at or after the end
raise StopIteration
else:
if value == stop:
# at the end
raise StopIteration
return True
def __repr__(self):
boundaries_str = ', '.join(
'%s.%s=%s' % (attr, side, v)
for attr, values in sorted(self.__dict__.items())
if values is not None
for side, v in zip(['start', 'stop'], values)
if v is not None
)
if not boundaries_str:
boundaries_str = '[include all]'
return '<%s %s>' % ( type(self).__name__, boundaries_str)
class _WorkingBlockFilter:
"""
A BlockFilter along with the state needed to apply it.
"""
def __init__(self, block_filter):
self.filter = block_filter
self.is_started = False
self.is_ended = False
def check_block(self, block):
"""
:return: True if need to include, False if need to exclude (i.e. before "start")
:raise: StopIteration if need to break (i.e. after "stop")
"""
if self.is_ended:
raise StopIteration
try:
should_include = self.filter.check_block(block, is_started = self.is_started)
if should_include:
self.is_started = True
return should_include
except StopIteration:
self.is_ended = True
raise
def __repr__(self):
return repr(self.filter).replace('BlockFilter', type(self).__name__)
################################################################################
# Blocks
class RawFileBlockIterator:
"""
Iterates over ALL blocks from `blk*.dat` files -- not only blocks included in
the longest-chain.
Blocks appear in "storage order", which is not necessarily chronological/topological
order.
No processing, validation, etc., is done on the blocks.
Element type is `StoredBlock`.
:note: Height is set to -1 for all blocks.
:note: This iterator is resumable and refreshable.
"""
def __init__(self, raw_data_iter = None, **kwargs):
"""
:param raw_data_iter: a RawDataIterator
:param kwargs: extra kwargs for RawDataIterator (ignored unless raw_data_iter is None)
"""
if raw_data_iter is None:
raw_data_iter = RawDataIterator(**kwargs)
self.raw_data_iter = raw_data_iter
# state
self._cur_blob = b''
self._cur_offset = 0
self._cur_filename = None
def __next__(self):
if self._cur_offset >= len(self._cur_blob):
# we're done with this blob. read the next one.
#if self._cur_blob is not None:
# assert self._cur_offset == len(self._cur_blob), (self._cur_offset, len(self._cur_blob))
self._read_next_blob() # raises StopIteration if no more files
block_offset = self._cur_offset
block = deserialize_block(self._cur_blob[block_offset : ], -1)
if block is None:
# past last block (in the last blk.dat file)
# refresh: check if new data was added to this blob since we read it
if self.refresh:
self._reread_blob()
block = deserialize_block(self._cur_blob[block_offset : ], -1)
if block is None:
# no new data, even after refreshing
raise StopIteration
self._cur_offset += 8 + block.rawsize
return StoredBlock(
block = block,
filepos = FilePos(self._cur_filename, block_offset),
)
def _read_next_blob(self):
data = self.raw_data_iter.__next__() # raises StopIteration if no more files . # easier to profile with x.__next__() instead of next(x)...
self._cur_blob = data.blob
self._cur_filename = data.filename
self._cur_offset = 0
def _reread_blob(self):
if self._cur_filename is not None:
# note: not updating self._cur_filename and self._cur_offset, because
# we need to keep reading from the same offset in the same file.
self._cur_blob = self.raw_data_iter.get_data(self._cur_filename).blob
def __iter__(self):
return self
@property
def refresh(self):
return self.raw_data_iter.refresh
class TopologicalBlockIterator:
"""
Iterates over *all* blocks from `blk*.dat` files (not only from longest chain).
Blocks are generated according to a topological order. This means
it is guaranteed a block will not appear before its "prev block" (indicated
by its "prev_block_hash").
Other than that, blocks from different forks can be generated in any order.
Element type is `Block`.
:note: This iterator is resumable and refreshable.
"""
def __init__(self, rawfile_block_iter = None, **kwargs):
"""
:param rawfile_block_iter: a RawFileBlockIterator
:param kwargs: extra kwargs for RawFileBlockIterator (ignored unless rawfile_block_iter is None)
"""
if rawfile_block_iter is None:
rawfile_block_iter = RawFileBlockIterator(**kwargs)
self.rawfile_block_iter = rawfile_block_iter
# state
self._height_by_hash = { GENESIS_PREV_BLOCK_HASH: -1 } # genesis is 0, so its prev is -1
self._orphans = {} # block_hash -> a list of orphan blocks waiting for it to appear
self._ready_blocks = deque() # blocks which can be released on next call to __next__()
def __next__(self):
# read more data if necessary
while not self._ready_blocks:
self._read_another_block()
# release a block
return self._get_next_block_to_release()
def _read_another_block(self):
# note: block.height is not set by RawFileBlockIterator
block = self.rawfile_block_iter.__next__().block # easier to profile with x.__next__() instead of next(x)...
#logger.debug('prev-block-reference: %s -> %s', block.block_hash_hex, block.prev_block_hash_hex) # commented out because hex() takes time...
# handle new block either as "ready" or "orphan":
height_by_hash = self._height_by_hash
prev_block_hash = block.prev_block_hash
prev_height = height_by_hash.get(prev_block_hash)
if prev_height is None:
# prev not found. orphan.
self._orphans.setdefault(prev_block_hash, []).append(block)
return False
else:
# prev found. block is "ready".
self._disorphanate_block(block, prev_height + 1)
return True
def _get_next_block_to_release(self):
# release a block from _ready_blocks, and disorphanate its children
block = self._ready_blocks.popleft()
self._disorphanate_children_of(block)
return block
def _disorphanate_children_of(self, block):
children = self._orphans.pop(block.block_hash, ())
child_height = block.height + 1
for child_block in children:
self._disorphanate_block(child_block, child_height)
def _disorphanate_block(self, child_block, height):
# block's height is known now. set it:
child_block.height = height
self._height_by_hash[child_block.block_hash] = height
# no longer orphan. it is ready for releasing:
self._ready_blocks.append(child_block) # appendright
def __iter__(self):
return self
################################################################################
# Longest chain
class LongestChainBlockIterator:
"""
Linearly iterates over blocks in the longest chain.
Denoting `B(i)` and `B(i+1)` as the i-th and i+1-th blocks in the sequence, this
iterator guarantees::
- `B(i+1).prev_block_hash == B(i).block_hash`
- `B(i+1).height == B(i).height + 1`
The height of the first block (genesis) is 0, and its `prev_block_hash` is all zeros.
Element type is `Block`.
:note: This iterator is resumable and refreshable.
"""
# TBD: an option to generate_unsafe_tail
DEFAULT_HEIGHT_SAFETY_MARGIN = HEIGHT_SAFETY_MARGIN
_DUMMY_PRE_GENESIS_BLOCK = Bunch(height = -1, block_hash = GENESIS_PREV_BLOCK_HASH)
def __init__(self, block_iter = None, height_safety_margin = None, block_filter = None, **kwargs):
"""
:param block_iter: a TopologicalBlockIterator
:param height_safety_margin:
how much longer should a fork be than a competing fork before we
can safely conclude it is the eventual "winner" fork.
:param block_filter: a BlockFilter, indicating blocks to start/stop at.
:param kwargs: extra kwargs for TopologicalBlockIterator (ignored unless block_iter is None)
"""
if block_iter is None:
block_iter = TopologicalBlockIterator(**kwargs)
self.block_iter = block_iter
if height_safety_margin is None:
height_safety_margin = self.DEFAULT_HEIGHT_SAFETY_MARGIN
self.height_safety_margin = height_safety_margin
if block_filter is not None:
block_filter = _WorkingBlockFilter(block_filter)
self.block_filter = block_filter
# state
root_block = self._DUMMY_PRE_GENESIS_BLOCK
self._root_block = root_block # the previous block released
self._last_block = root_block # the most recent block seen (not released yet)
self._blocks_by_hash = { root_block.block_hash: root_block } # block_hash -> block
self._block_children = { root_block.block_hash: []} # block_hash -> list of child blocks
self._leaf_heights = SortedList([ root_block.height ]) # block heights, of the leaf blocks only
def __next__(self):
while True:
block = self._get_next_block_to_release()
if block is not None:
self._root_block = block
if self._check_block(block):
return block
# no next block in pending blocks. need to read more data
self._read_another_block()
def _get_next_block_to_release(self):
if not self._check_heights_gap():
# longest chain not determined yet
return None
last_block = self._last_block
root_block = self._root_block
leaf_heights = self._leaf_heights
# since there's now another block to generate, it must be _last_block which tipped it over
assert last_block.height == leaf_heights[-1], (last_block.height, leaf_heights[-1])
# find next block to generate -- search backwards from leaf to root
next_block = self._find_child_from(last_block, root_block)
# trim the neglected chains
logger.debug('generating next root block %s', next_block)
self._discard_tree(root_block, survivor_child = next_block)
return next_block
def _discard_block(self, block):
"""
Remove a block from the data-structures representing the iterator state.
:return: the children of the block discarded
"""
block_hash = block.block_hash
logger.debug('discarding block %s', block_hash)
# remove from _blocks_by_hash:
self._blocks_by_hash.pop(block_hash)
# remove from _block_children:
children = self._block_children.pop(block_hash)
# remove from _leaf_heights (if there):
if not children:
# block is a leaf. need to remove it from _leaf_heights
self._leaf_heights.remove(block.height)
return children
def _discard_tree(self, block, survivor_child = None):
"""
recursively (DFS) discard a block and its children, except for its
"survivor" child, the one included in the longest chain.
"""
children = self._discard_block(block)
for child in children:
if child is not survivor_child:
self._discard_tree(child)
#else: don't discard the survivor
def _check_heights_gap(self):
"""
Is the longest fork leading by enough over the 2nd longest?
"""
leaf_heights = self._leaf_heights
height1 = leaf_heights[-1]
height2 = leaf_heights[-2] if len(leaf_heights) >= 2 else self._root_block.height
assert height1 >= height2, (height1, height2)
if height1 - height2 >= self.height_safety_margin:
# fork is leading by a large gap. can safely release next block
logger.debug('found next block to generate (cur leaf height = %s)', height1)
return True
else:
# don't generate next block yet
logger.debug('no next block to generate (cur leaf height = %s)', height1)
return False
def _find_child_from(self, block, root_block):
"""
:return: the direct child of `root_block`, in the route from `root_block` to `block`.
"""
blocks_by_hash = self._blocks_by_hash
root_block_hash = root_block.block_hash
while True:
prev_block_hash = block.prev_block_hash
if prev_block_hash == root_block_hash:
return block
block = blocks_by_hash[prev_block_hash]
def _read_another_block(self):
blocks_by_hash = self._blocks_by_hash
block_children = self._block_children
leaf_heights = self._leaf_heights
# fetch another block
block = self.block_iter.__next__() # easier to profile with x.__next__() instead of next(x)...
block_height = block.height
block_hash = block.block_hash
prev_block_hash = block.prev_block_hash
# find new block's prev block
try:
prev_block = blocks_by_hash[prev_block_hash]
if prev_block is not None:
assert prev_block.height + 1 == block_height, (prev_block.height, block_height)
except KeyError:
# already neglected
logger.info('block ignored (must be from a fork already deemed inferior): %s', block.block_hash_hex)
return
# update data structures with new block
logger.debug('adding block: %s', block)
self._last_block = block
blocks_by_hash[block_hash] = block
block_children[block_hash] = [] # no children seen yet, because each block appears before its children
prev_block_children = block_children[prev_block_hash]
is_prev_leaf = not prev_block_children
prev_block_children.append(block)
if is_prev_leaf:
# prev is not longer a leaf. need to remove it from leaf_heights
leaf_heights.remove(block_height - 1)
leaf_heights.add(block_height)
def _check_block(self, block):
"""
apply `block_filter` to `block`
"""
if self.block_filter is None:
return True
return self.block_filter.check_block(block)
def __iter__(self):
return self
def __repr__(self):
return '<%s at block #%r>' % ( type(self).__name__, self._root_block.height )
################################################################################
# Transactions
class TxIterator:
"""
Iterates over all transactions in longest chain.
Roughly equivalent to::
for block in LongestChainBlockIterator():
yield from block.iter_txs()
Element type is `Tx` (or `TxInBlock`, if `include_block_context=True`.
:note: This iterator is resumable and refreshable.
"""
def __init__(self, include_block_context = False, include_tx_blob = False, block_iter = None, **kwargs):
"""
:param block_iter: a LongestChainBlockIterator
:param kwargs: extra kwargs for LongestChainBlockIterator (ignored unless block_iter is None)
"""
if block_iter is None:
block_iter = LongestChainBlockIterator(**kwargs)
self.block_iter = block_iter
self.include_block_context = include_block_context
self.include_tx_blob = include_tx_blob
# state
self._block_txs = iter(()) # iterator over an empty sequence
def __next__(self):
while True:
try:
# return the next tx in this block:
tx = self._block_txs.__next__() # easier to profile with x.__next__() instead of next(x)...
return tx
except StopIteration:
# done with this block
pass
# proceed to next block:
self._block_txs = self._get_iter_of_next_block()
def _get_iter_of_next_block(self):
txs = self.block_iter.__next__().txs # easier to profile with x.__next__() instead of next(x)...
if self.include_block_context:
return txs.iter_txs_in_block(include_tx_blob = self.include_tx_blob)
else:
return txs.iter_txs(include_tx_blob = self.include_tx_blob)
def __iter__(self):
return self
def __repr__(self):
return '<%s at %r>' % ( type(self).__name__, self.block_iter )
################################################################################
| fungibit/chainscan | chainscan/scan.py | Python | mit | 20,770 |
# -*- coding: utf-8 -*-
import pickle
class Enum(set):
def __getattr__(self, name):
if name in self:
return name
raise AttributeError
class ItemSet(object):
def __iter__(self):
return self.items.__iter__()
def __next__(self):
return self.items.__next__()
def next(self):
return self.items.next()
def __getitem__(self, item):
if hasattr(self.items, '__getitem__'):
return self.items[item]
else:
raise AttributeError('Item set has no __getitem__ implemented.')
def __len__(self):
return len(self.items)
def save(self, file_path):
with open(file_path, 'wb'):
pickle.dump(self.__dict__, file_path, pickle.HIGHEST_PROTOCOL)
def load_from_pickle(self, file_path):
with open(file_path, 'rb'):
adict = pickle.load(file_path)
pickle.dump(self.__dict__, file_path, pickle.HIGHEST_PROTOCOL)
self.__dict__.update(adict)
| PythonSanSebastian/docstamp | docstamp/collections.py | Python | apache-2.0 | 1,012 |
import os
from django.core.management.base import NoArgsCommand
from reviews.models import Screenshot
class Command(NoArgsCommand):
def handle_noargs(self, **options):
prefix = os.path.join("images", "uploaded")
new_prefix = os.path.join("uploaded", "images")
for screenshot in Screenshot.objects.all():
if screenshot.image.startswith(prefix):
screenshot.image = \
os.path.join(new_prefix,
os.path.basename(screenshot.image))
screenshot.save()
| Khan/reviewboard | reviewboard/reviews/management/commands/fixscreenshots.py | Python | mit | 574 |
class BankException(Exception):
pass
class WrongPINException(Exception):
pass
class ShopWizardException(Exception):
pass
class ItemOutOfStockException(ShopWizardException):
def __init__(self, item_name):
super(ItemOutOfStockException, self).__init__('Item is out of stock: ' + item_name)
class UserShopException(Exception):
pass
class NotEnoughMoneyInTillException(UserShopException):
pass
class ShopStockPageIndexError(UserShopException):
pass | Smetterleen/Neopets-Python-API | neopapi/shops/Exceptions.py | Python | gpl-3.0 | 487 |
import datetime
from django.test import SimpleTestCase
import sqlalchemy
from ..sql import get_indicator_table
from ..views import process_url_params
from .test_data_source_config import get_sample_data_source
class ParameterTest(SimpleTestCase):
def setUp(self):
config = get_sample_data_source()
self.columns = get_indicator_table(config, sqlalchemy.MetaData()).columns
def test_no_parameters(self):
params = process_url_params({}, self.columns)
self.assertEqual(params.format, 'unzipped-csv')
self.assertEqual(params.keyword_filters, {})
self.assertEqual(params.sql_filters, [])
def test_lastndays(self):
params = process_url_params({'date-lastndays': '30'}, self.columns)
result_filter = params.sql_filters[0]
end = datetime.date.today()
start = end - datetime.timedelta(days=30)
date_column = self.columns['date']
desired_filter = date_column.between(start, end)
self.assertTrue(result_filter.compare(desired_filter))
def test_range_filter(self):
params = process_url_params({'count-range': '10..30'}, self.columns)
result_filter = params.sql_filters[0]
count_column = self.columns['count']
desired_filter = count_column.between('10', '30')
self.assertTrue(result_filter.compare(desired_filter))
| dimagi/commcare-hq | corehq/apps/userreports/tests/test_export.py | Python | bsd-3-clause | 1,374 |
# -*- coding: utf-8 -*-
###############################################################################
#
# ShowGroup
# Shows information for an existing group.
#
# Python versions 2.6, 2.7, 3.x
#
# Copyright 2014, Temboo Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
# either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
#
#
###############################################################################
from temboo.core.choreography import Choreography
from temboo.core.choreography import InputSet
from temboo.core.choreography import ResultSet
from temboo.core.choreography import ChoreographyExecution
import json
class ShowGroup(Choreography):
def __init__(self, temboo_session):
"""
Create a new instance of the ShowGroup Choreo. A TembooSession object, containing a valid
set of Temboo credentials, must be supplied.
"""
super(ShowGroup, self).__init__(temboo_session, '/Library/Zendesk/Groups/ShowGroup')
def new_input_set(self):
return ShowGroupInputSet()
def _make_result_set(self, result, path):
return ShowGroupResultSet(result, path)
def _make_execution(self, session, exec_id, path):
return ShowGroupChoreographyExecution(session, exec_id, path)
class ShowGroupInputSet(InputSet):
"""
An InputSet with methods appropriate for specifying the inputs to the ShowGroup
Choreo. The InputSet object is used to specify input parameters when executing this Choreo.
"""
def set_Email(self, value):
"""
Set the value of the Email input for this Choreo. ((required, string) The email address you use to login to your Zendesk account.)
"""
super(ShowGroupInputSet, self)._set_input('Email', value)
def set_GroupID(self, value):
"""
Set the value of the GroupID input for this Choreo. ((required, string) The ID of the group to show.)
"""
super(ShowGroupInputSet, self)._set_input('GroupID', value)
def set_Password(self, value):
"""
Set the value of the Password input for this Choreo. ((required, password) Your Zendesk password.)
"""
super(ShowGroupInputSet, self)._set_input('Password', value)
def set_Server(self, value):
"""
Set the value of the Server input for this Choreo. ((required, string) Your Zendesk domain and subdomain (e.g., temboocare.zendesk.com).)
"""
super(ShowGroupInputSet, self)._set_input('Server', value)
class ShowGroupResultSet(ResultSet):
"""
A ResultSet with methods tailored to the values returned by the ShowGroup Choreo.
The ResultSet object is used to retrieve the results of a Choreo execution.
"""
def getJSONFromString(self, str):
return json.loads(str)
def get_Response(self):
"""
Retrieve the value for the "Response" output from this Choreo execution. ((json) The response from Zendesk.)
"""
return self._output.get('Response', None)
class ShowGroupChoreographyExecution(ChoreographyExecution):
def _make_result_set(self, response, path):
return ShowGroupResultSet(response, path)
| jordanemedlock/psychtruths | temboo/core/Library/Zendesk/Groups/ShowGroup.py | Python | apache-2.0 | 3,603 |
# ===============================================================================
# Copyright 2016 Jake Ross
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ===============================================================================
# ============= enthought library imports =======================
from __future__ import absolute_import
from __future__ import print_function
import os
import pickle
# ============= standard library imports ========================
from numpy import Inf
from traits.api import Bool, Float, Property, Instance, Event, Button, Enum
# ============= local library imports ==========================
from pychron.core.helpers.timer import Timer
from pychron.graph.graph import Graph
from pychron.managers.data_managers.csv_data_manager import CSVDataManager
from pychron.managers.manager import Manager
from pychron.paths import paths
class StreamGraphManager(Manager):
graph = Instance(Graph)
graph_scale = Enum('linear', 'log')
graph_y_auto = Bool
graph_ymin = Property(Float, depends_on='_graph_ymin')
graph_ymax = Property(Float, depends_on='_graph_ymax')
_graph_ymin = Float
_graph_ymax = Float
graph_scan_width = Float(enter_set=True, auto_set=False) # in minutes
clear_button = Event
start_record_button = Button
stop_record_button = Button
snapshot_button = Button
snapshot_output = Enum('png', 'pdf')
# add_visual_marker_button = Button('Add Visual Marker')
# marker_text = Str
add_marker_button = Button('Add Marker')
clear_all_markers_button = Button
use_vertical_markers = Bool
record_label = Property(depends_on='_recording')
_recording = Bool(False)
record_data_manager = Instance(CSVDataManager)
timer = None
update_period = 2
_signal_failed_cnt = 0
def reset_scan_timer(self, func=None):
self.info('reset scan timer')
self._signal_failed_cnt = 0
self.timer = self._timer_factory(func=func)
def load_settings(self):
self.info('load scan settings')
params = self.get_settings()
if params:
self._set_graph_attrs(params)
self._load_settings(params)
else:
self.warning('no scan settings')
def dump_settings(self):
self.info('dump scan settings')
p = os.path.join(paths.hidden_dir, '{}.p'.format(self.settings_name))
with open(p, 'wb') as wfile:
d = dict()
for ki in self.graph_attr_keys:
d[ki] = getattr(self, ki)
self._dump_settings(d)
pickle.dump(d, wfile)
def get_settings(self):
p = os.path.join(paths.hidden_dir, '{}.p'.format(self.settings_name))
if os.path.isfile(p):
with open(p, 'rb') as f:
try:
return pickle.load(f, encoding='utf-8')
except (pickle.PickleError, EOFError, UnicodeDecodeError) as e:
self.warning('Failed unpickling scan settings file {}'.format(p))
self.debug(e)
return
else:
self.warning('No scan settings file {}'.format(p))
# private
def _get_graph_y_min_max(self, plotid=0):
mi, ma = Inf, -Inf
for k, plot in self.graph.plots[plotid].plots.items():
plot = plot[0]
if plot.visible:
ys = plot.value.get_data()
try:
ma = max(ma, max(ys))
mi = min(mi, min(ys))
except ValueError:
mi, ma = None, None
return mi, ma
def _update_scan_graph(self):
pass
def _timer_factory(self, func=None):
if func is None:
func = self._update_scan_graph
if self.timer:
self.timer.Stop()
self.timer.wait_for_completion()
mult = 1000
return Timer(self.update_period * mult, func)
def _graph_factory(self, *args, **kw):
raise NotImplementedError
def _record_data_manager_factory(self):
return CSVDataManager()
def _reset_graph(self):
self.graph = self._graph_factory()
# trigger a timer reset. set to 0 then default
self.reset_scan_timer()
def _update_graph_limits(self, name, new):
if 'high' in name:
self._graph_ymax = max(new, self._graph_ymin)
else:
self._graph_ymin = min(new, self._graph_ymax)
# handlers
def _clear_button_fired(self):
self._reset_graph()
def _graph_y_auto_changed(self, new):
p = self.graph.plots[0]
if not new:
p.value_range.low_setting = self.graph_ymin
p.value_range.high_setting = self.graph_ymax
self.graph.redraw()
def _graph_scale_changed(self, new):
p = self.graph.plots[0]
p.value_scale = new
self.graph.redraw()
def _graph_scan_width_changed(self):
g = self.graph
n = self.graph_scan_width
n = max(n, 1 / 60.)
mins = n * 60
g.set_data_limits(1.8 * mins)
g.set_scan_widths(mins)
def _clear_all_markers_button_fired(self):
self.graph.clear_markers()
def _start_record_button_fired(self):
self._start_recording()
self._recording = True
def _stop_record_button_fired(self):
self._stop_recording()
self._recording = False
def _snapshot_button_fired(self):
self.debug('snapshot button fired')
self.graph.save()
def _add_marker_button_fired(self):
xs = self.graph.plots[0].data.get_data('x0')
self.record_data_manager.write_to_frame(tuple(' '))
self.graph.add_vertical_rule(xs[-1])
# ===============================================================================
# property get/set
# ===============================================================================
def _validate_graph_ymin(self, v):
try:
v = float(v)
if v < self.graph_ymax:
return v
except ValueError:
pass
def _validate_graph_ymax(self, v):
try:
v = float(v)
if v > self.graph_ymin:
return v
except ValueError:
pass
def _get_graph_ymin(self):
return self._graph_ymin
def _get_graph_ymax(self):
return self._graph_ymax
def _set_graph_ymin(self, v):
if v is not None:
self._graph_ymin = v
p = self.graph.plots[0]
p.value_range.low_setting = v
self.graph.redraw()
def _set_graph_ymax(self, v):
if v is not None:
self._graph_ymax = v
p = self.graph.plots[0]
p.value_range.high_setting = v
self.graph.redraw()
def _get_record_label(self):
return 'Record' if not self._recording else 'Stop'
@property
def graph_attr_keys(self):
return ['graph_scale',
'graph_ymin',
'graph_ymax',
'graph_y_auto',
'graph_scan_width']
def _dump_settings(self):
pass
def _set_graph_attrs(self, params):
for pi in self.graph_attr_keys:
try:
setattr(self, pi, params[pi])
except KeyError as e:
print('sm load settings', pi, e)
# ===============================================================================
# defaults
# ===============================================================================
def _graph_default(self):
g = self._graph_factory()
# self.graphs.append(g)
return g
# ============= EOF =============================================
| UManPychron/pychron | pychron/managers/stream_graph_manager.py | Python | apache-2.0 | 8,276 |
# -*- coding: utf-8 -*-
import copy
import os
from django.contrib import auth
from django.contrib.auth.models import User
from django.core.files.uploadedfile import SimpleUploadedFile
from django.db.models import QuerySet
from django.test import TestCase, Client, mock
from django.urls import reverse
from ..forms import AddBookForm
from ..models import (TheUser, Category, Author, Language, Book,
AddedBook, BookRating, BookComment, Post, SupportMessage, BookRelatedData)
from .utils import Utils
TEST_DIR = os.path.dirname(os.path.abspath(__file__))
TEST_DATA_DIR = os.path.join(TEST_DIR, 'fixtures')
# ----------------------------------------------------------------------------------------------------------------------
class ModelTest(TestCase):
# ------------------------------------------------------------------------------------------------------------------
@classmethod
def setUpTestData(cls):
cls.setup_users()
cls.setup_categories()
cls.setup_authors()
cls.setup_languages()
cls.setup_books()
cls.setup_added_books()
cls.setup_book_rating()
cls.setup_book_comment()
cls.setup_post_messages()
cls.setup_support_messages()
# ------------------------------------------------------------------------------------------------------------------
@classmethod
def setup_users(cls):
client = Client()
cls.anonymous_user = auth.get_user(client)
cls.user1 = User.objects.create_user('user1', '[email protected]', 'testpassword1')
cls.user2 = User.objects.create_user('user2', '[email protected]', 'testpassword2')
cls.user3 = User.objects.create_user('user3', '[email protected]', 'testpassword3')
cls.user4 = User.objects.create_user('user4', '[email protected]', 'testpassword4')
cls.user5 = User.objects.create_user('user5', '[email protected]', 'testpassword5')
cls.user6 = User.objects.create_user('user6', '[email protected]', 'testpassword6')
cls.the_user1 = TheUser.objects.get(id_user=cls.user1)
cls.the_user2 = TheUser.objects.get(id_user=cls.user2)
cls.the_user5 = TheUser.objects.get(id_user=cls.user5)
cls.the_user6 = TheUser.objects.get(id_user=cls.user6)
# ------------------------------------------------------------------------------------------------------------------
@classmethod
def setup_categories(cls):
cls.category1 = Category.objects.create(category_name='category1')
cls.category2 = Category.objects.create(category_name='category2')
# ------------------------------------------------------------------------------------------------------------------
@classmethod
def setup_authors(cls):
cls.author1 = Author.objects.create(author_name='Best Author 1')
cls.author2 = Author.objects.create(author_name='trueAuthorNew')
cls.author3 = Author.objects.create(author_name='zlast author')
cls.author4 = Author.objects.create(author_name='<AuthorSpecialSymbols>&"')
cls.author5 = Author.objects.create(author_name="O'Connor")
# ------------------------------------------------------------------------------------------------------------------
@classmethod
def setup_languages(cls):
cls.language_en = Language.objects.create(language='English')
cls.language_ru = Language.objects.create(language='Russian')
# ------------------------------------------------------------------------------------------------------------------
@classmethod
def setup_books(cls):
test_book_path = os.path.join(TEST_DATA_DIR, 'test_book.pdf')
test_book_image_path = os.path.join(TEST_DATA_DIR, 'test_book_image.png')
books_setup = [
{
'name': 'First Book',
'author': cls.author1,
'category': cls.category1,
'language': cls.language_en,
'file': SimpleUploadedFile('test_book.pdf', open(test_book_path, 'rb').read()),
'photo': SimpleUploadedFile('test_book_image.png', open(test_book_image_path, 'rb').read()),
'who_added': cls.the_user1,
'private': True
},
{
'name': 'Second Book',
'author': cls.author2,
'category': cls.category1,
'language': cls.language_en,
'file': SimpleUploadedFile('test_book.pdf', open(test_book_path, 'rb').read()),
'who_added': cls.the_user2,
'blocked_book': True
},
{
'name': 'Third Book',
'author': cls.author2,
'category': cls.category1,
'language': cls.language_ru,
'file': SimpleUploadedFile('test_book.pdf', open(test_book_path, 'rb').read()),
'photo': SimpleUploadedFile('test_book_image.png', open(test_book_image_path, 'rb').read()),
'who_added': cls.the_user1,
'blocked_book': True
},
{
'name': 'Fourth Book',
'author': cls.author1,
'category': cls.category1,
'language': cls.language_ru,
'file': SimpleUploadedFile('test_book.pdf', open(test_book_path, 'rb').read()),
'photo': SimpleUploadedFile('test_book_image.png', open(test_book_image_path, 'rb').read()),
'who_added': cls.the_user2,
'blocked_book': True
},
{
'name': 'Fifth Book',
'author': cls.author1,
'category': cls.category2,
'language': cls.language_ru,
'file': SimpleUploadedFile('test_book.pdf', open(test_book_path, 'rb').read()),
'who_added': cls.the_user1,
'private': True
},
{
'name': 'Sixth Book',
'author': cls.author2,
'category': cls.category2,
'language': cls.language_en,
'file': SimpleUploadedFile('test_book.pdf', open(test_book_path, 'rb').read()),
'photo': SimpleUploadedFile('test_book_image.png', open(test_book_image_path, 'rb').read()),
'who_added': cls.the_user2
},
{
'name': 'Seventh Book<>&"',
'author': cls.author4,
'category': cls.category2,
'language': cls.language_en,
'file': SimpleUploadedFile('test_book.pdf', open(test_book_path, 'rb').read()),
'photo': SimpleUploadedFile('test_book_image.png', open(test_book_image_path, 'rb').read()),
'who_added': cls.the_user2
}
]
for book in books_setup:
Book.objects.create(
book_name=book['name'],
id_author=book['author'],
id_category=book['category'],
description='TEST description',
language=book['language'],
book_file=book['file'],
photo=book.get('photo', False),
who_added=book['who_added'],
private_book=book.get('private', False),
blocked_book=book.get('blocked_book', False)
)
# ------------------------------------------------------------------------------------------------------------------
@classmethod
def setup_added_books(cls):
AddedBook.objects.create(id_user=cls.the_user1, id_book=Book.objects.get(book_name='Third Book'))
AddedBook.objects.create(id_user=cls.the_user1, id_book=Book.objects.get(book_name='Sixth Book'))
AddedBook.objects.create(id_user=cls.the_user1, id_book=Book.objects.get(book_name='Fourth Book'))
AddedBook.objects.create(id_user=cls.the_user2, id_book=Book.objects.get(book_name='Third Book'))
AddedBook.objects.create(id_user=cls.the_user2, id_book=Book.objects.get(book_name='Sixth Book'))
AddedBook.objects.create(id_user=cls.the_user2, id_book=Book.objects.get(book_name='Second Book'))
AddedBook.objects.create(id_user=cls.the_user5, id_book=Book.objects.get(book_name='Sixth Book'))
AddedBook.objects.create(id_user=cls.the_user6, id_book=Book.objects.get(book_name='Sixth Book'))
# ------------------------------------------------------------------------------------------------------------------
@classmethod
def setup_book_rating(cls):
BookRating.objects.create(id_book=Book.objects.get(book_name='Third Book'), id_user=cls.the_user1, rating=10)
BookRating.objects.create(id_book=Book.objects.get(book_name='Third Book'), id_user=cls.the_user2, rating=5)
BookRating.objects.create(id_book=Book.objects.get(book_name='Third Book'), id_user=cls.the_user5, rating=3)
BookRating.objects.create(id_book=Book.objects.get(book_name='Fourth Book'), id_user=cls.the_user1, rating=7)
BookRating.objects.create(id_book=Book.objects.get(book_name='Sixth Book'), id_user=cls.the_user1, rating=4)
BookRating.objects.create(id_book=Book.objects.get(book_name='Second Book'), id_user=cls.the_user2, rating=7)
# ------------------------------------------------------------------------------------------------------------------
@classmethod
def setup_book_comment(cls):
second_book = Book.objects.get(book_name='Second Book')
third_book = Book.objects.get(book_name='Third Book')
fourth_book = Book.objects.get(book_name='Fourth Book')
BookComment.objects.create(id_book=second_book, id_user=cls.the_user1, text='Test book 2 user 1')
BookComment.objects.create(id_book=second_book, id_user=cls.the_user2, text='Test book 2 user 2')
BookComment.objects.create(id_book=third_book, id_user=cls.the_user1, text='Test book 3 user 1')
BookComment.objects.create(id_book=fourth_book, id_user=cls.the_user1, text='Test book 4 user 1')
BookComment.objects.create(id_book=fourth_book, id_user=cls.the_user5, text='Test book 4 user 5')
# ------------------------------------------------------------------------------------------------------------------
@classmethod
@mock.patch('app.signals.email_dispatch.apply_async', new=mock.Mock())
def setup_post_messages(cls):
Post.objects.create(user=cls.the_user1, heading='post 1', text='Posted test text 1')
Post.objects.create(user=cls.the_user1, heading='post 2', text='Posted test text 2')
Post.objects.create(user=cls.the_user2, heading='post 3', text='Posted test text 3')
# ------------------------------------------------------------------------------------------------------------------
@classmethod
def setup_support_messages(cls):
SupportMessage.objects.create(email='[email protected]', text='Test text1')
SupportMessage.objects.create(email='[email protected]', text='Test text2')
SupportMessage.objects.create(email='[email protected]', text='Test text3')
SupportMessage.objects.create(email='[email protected]', text='Test text4')
# ------------------------------------------------------------------------------------------------------------------
def test_the_user_str(self):
self.assertEqual(str(self.the_user1), 'user1')
self.assertEqual(str(self.the_user2), 'user2')
# ------------------------------------------------------------------------------------------------------------------
def test_creating_the_user_objects(self):
"""
Must create 'app.models.TheUser' instance after django User instance was created.
"""
self.assertEqual(User.objects.all().count(), 6)
self.assertEqual(User.objects.all().count(), TheUser.objects.all().count())
self.assertNotEqual(self.the_user1.auth_token, '')
self.assertNotEqual(self.the_user1.auth_token, self.the_user2.auth_token)
# ------------------------------------------------------------------------------------------------------------------
def test_the_user_get_api_reminders(self):
reminders = self.the_user1.get_api_reminders()
reminders_keys_correct = ['vk', 'fb_group', 'fb_page', 'twitter', 'disabled_all', 'app_rate']
self.assertTrue(isinstance(reminders, dict))
self.assertEqual(sorted(list(reminders.keys())), sorted(reminders_keys_correct))
# ------------------------------------------------------------------------------------------------------------------
def test_the_user_get_web_reminders(self):
reminders = self.the_user1.get_web_reminders()
reminders_keys_correct = ['vk', 'fb_group', 'fb_page', 'twitter', 'disabled_all', 'app_download']
self.assertTrue(isinstance(reminders, dict))
self.assertEqual(sorted(list(reminders.keys())), sorted(reminders_keys_correct))
# ------------------------------------------------------------------------------------------------------------------
def test_the_user_update_reminder(self):
reminders = self.the_user1.get_web_reminders()
self.assertTrue(isinstance(reminders, dict))
self.assertEqual(reminders['vk'], True)
self.assertEqual(reminders['app_download'], True)
self.the_user1.update_reminder('vk', False)
self.the_user1.update_reminder('app_download', False)
updated_reminders = self.the_user1.get_web_reminders()
self.assertTrue(isinstance(updated_reminders, dict))
self.assertEqual(updated_reminders['vk'], False)
self.assertEqual(updated_reminders['app_download'], False)
# ------------------------------------------------------------------------------------------------------------------
def test_removing_user_objects(self):
"""
Must remove django User instance after 'app.models.TheUser' objects was deleted.
"""
the_user3 = TheUser.objects.get(id_user__username='user3')
the_user4 = TheUser.objects.get(id_user__email='[email protected]')
the_user3.delete()
the_user4.delete()
self.assertEqual(User.objects.all().count(), 4)
self.assertEqual(User.objects.all().count(), TheUser.objects.all().count())
# ------------------------------------------------------------------------------------------------------------------
def test_created_categories(self):
self.assertEqual(Category.objects.all().count(), 2)
self.assertNotEqual(self.category1, self.category2)
# ------------------------------------------------------------------------------------------------------------------
def test_categories_str(self):
self.assertEqual(str(self.category1), 'category1')
self.assertEqual(str(self.category2), 'category2')
# ------------------------------------------------------------------------------------------------------------------
def test_created_authors(self):
self.assertEqual(Author.objects.all().count(), 5)
self.assertNotEqual(self.author1, self.author2)
# ------------------------------------------------------------------------------------------------------------------
def test_get_authors_list(self):
"""
Must return authors list depending on different letters/letter case/words/symbols.
"""
self.assertEqual(Author.get_authors_list('bEst'), ['Best Author 1'])
self.assertEqual(Author.get_authors_list('1'), ['Best Author 1'])
self.assertEqual(Author.get_authors_list(' '), ['Best Author 1', 'zlast author'])
self.assertEqual(Author.get_authors_list('new'), ['trueAuthorNew'])
self.assertEqual(Author.get_authors_list('TRUE'), ['trueAuthorNew'])
self.assertEqual(Author.get_authors_list('Best Author 1'), ['Best Author 1'])
self.assertEqual(Author.get_authors_list('trueAuthorNew'), ['trueAuthorNew'])
# ------------------------------------------------------------------------------------------------------------------
def test_get_authors_list_with_escaping(self):
self.assertEqual(Author.get_authors_list("'", True), ['O'Connor'])
self.assertEqual(Author.get_authors_list("Connor", True), ['O'Connor'])
self.assertEqual(
Author.get_authors_list('b', True),
['Best Author 1', '<AuthorSpecialSymbols>&"']
)
self.assertEqual(
Author.get_authors_list('e', True),
['Best Author 1', 'trueAuthorNew', '<AuthorSpecialSymbols>&"']
)
self.assertEqual(
Author.get_authors_list('author', True),
['Best Author 1', 'trueAuthorNew', 'zlast author', '<AuthorSpecialSymbols>&"']
)
# ------------------------------------------------------------------------------------------------------------------
def test_get_authors_list_without_escaping(self):
self.assertEqual(Author.get_authors_list("'"), ["O'Connor"])
self.assertEqual(Author.get_authors_list("Connor", False), ["O'Connor"])
self.assertEqual(Author.get_authors_list('b'), ['Best Author 1', '<AuthorSpecialSymbols>&"'])
self.assertEqual(
Author.get_authors_list('e'),
['Best Author 1', 'trueAuthorNew', '<AuthorSpecialSymbols>&"']
)
self.assertEqual(
Author.get_authors_list('author', False),
['Best Author 1', 'trueAuthorNew', 'zlast author', '<AuthorSpecialSymbols>&"']
)
# ------------------------------------------------------------------------------------------------------------------
def test_created_language(self):
self.assertEqual(Language.objects.all().count(), 2)
self.assertNotEqual(self.author1, self.author2)
# ------------------------------------------------------------------------------------------------------------------
def test_created_books(self):
books = Book.objects.all()
self.assertEqual(books.count(), 7)
self.assertEqual(books.filter(private_book=True).count(), 2)
self.assertEqual(books.filter(id_category=self.category1).count(), 4)
self.assertEqual(books.filter(id_author=self.author1).count(), 3)
self.assertEqual(books.filter(language=self.language_en).count(), 4)
self.assertEqual(books.filter(photo=False).count(), 2)
self.assertEqual(books.filter(who_added=self.the_user1).count(), 3)
self.assertEqual(books.filter(id_category=self.category2, id_author=self.author2).count(), 1)
self.assertEqual(books.filter(id_category=self.category1,
id_author=self.author2,
language=self.language_ru,
who_added=self.the_user1).count(), 1)
self.assertEqual(books.filter(id_category=self.category1,
id_author=self.author2,
language=self.language_ru,
who_added=self.the_user2).count(), 0)
self.assertEqual(books.filter(blocked_book=True).count(), 3)
# ------------------------------------------------------------------------------------------------------------------
def test_get_related_objects_for_create(self):
test_book_path = os.path.join(TEST_DATA_DIR, 'test_book.pdf')
form_data = {
'bookname': 'The new book',
'author': 'trueAuthorNew',
'category': 'category1',
'language': 'English',
'about': 'about book',
'bookfile': SimpleUploadedFile('test_book.pdf', open(test_book_path, 'rb').read()),
}
form_data_new_author = copy.deepcopy(form_data)
form_data_new_author['author'] = 'super new author'
self.assertEqual(Author.objects.all().count(), 5)
form = AddBookForm(data=form_data)
form.is_valid()
form_with_new_author = AddBookForm(data=form_data_new_author)
form_with_new_author.is_valid()
related_data = Book.get_related_objects_for_create(self.user1.id, form)
self.assertTrue(isinstance(related_data, BookRelatedData))
self.assertEqual(len(related_data), 4)
self.assertEqual(related_data.author, Author.objects.get(author_name='trueAuthorNew'))
self.assertEqual(Author.objects.all().count(), 5)
related_data_new_author = Book.get_related_objects_for_create(self.user1.id, form_with_new_author)
self.assertTrue(isinstance(related_data, BookRelatedData))
self.assertEqual(len(related_data_new_author), 4)
self.assertEqual(related_data_new_author.author, Author.objects.get(author_name='super new author'))
self.assertEqual(Author.objects.all().count(), 6)
# ------------------------------------------------------------------------------------------------------------------
def test_get_related_objects_create_api(self):
"""
Must generate Book related data when creates a Book object for API calls.
New author must be returned if it's name not present in the Author model.
"""
test_data = {'author': 'trueAuthorNew', 'category': 'category2', 'language': 'Russian'}
test_data_new_author = {'author': 'NEW AUTHOR', 'category': 'category1', 'language': 'English'}
self.assertEqual(
Book.get_related_objects_create_api(self.the_user1, test_data),
BookRelatedData(self.author2, self.category2, self.language_ru, None)
)
self.assertEqual(Author.objects.all().count(), 5)
self.assertEqual(
Book.get_related_objects_create_api(self.the_user1, test_data_new_author),
BookRelatedData(Author.objects.get(author_name='NEW AUTHOR'), self.category1, self.language_en, None)
)
self.assertEqual(Author.objects.all().count(), 6)
# ------------------------------------------------------------------------------------------------------------------
def test_get_related_objects_selected_book_unknown_user(self):
"""
Must generate selected book related data for unknown (anonymous) users.
"""
third_book = Book.objects.get(book_name='Third Book')
sixth_book = Book.objects.get(book_name='Sixth Book')
self.assertTrue(isinstance(Book.get_related_objects_selected_book(self.anonymous_user, third_book.id), dict))
related_third_book = Book.get_related_objects_selected_book(self.anonymous_user, third_book.id)
related_sixth_book = Book.get_related_objects_selected_book(self.anonymous_user, sixth_book.id)
self.assertEqual(related_third_book['book'], third_book)
self.assertEqual(related_third_book['avg_book_rating'], {'rating__avg': 6.0})
self.assertEqual(related_third_book['book_rating_count'], 3)
self.assertEqual(related_third_book['added_book'], None)
self.assertEqual(related_third_book['comments'].count(), 1)
self.assertEqual(related_third_book['comments'][0],
BookComment.objects.filter(id_book=third_book).order_by('-id')[0])
self.assertEqual(related_sixth_book['book'], sixth_book)
self.assertEqual(related_sixth_book['avg_book_rating'], {'rating__avg': 4.0})
self.assertEqual(related_sixth_book['book_rating_count'], 1)
self.assertEqual(related_sixth_book['added_book'], None)
self.assertEqual(related_sixth_book['comments'].count(), 0)
AddedBook.objects.create(id_user=self.the_user5, id_book=third_book)
BookRating.objects.create(id_user=self.the_user6, id_book=third_book, rating=10)
BookComment.objects.create(id_user=self.the_user6, id_book=third_book, text='TEST TEXT 2')
related_third_book = Book.get_related_objects_selected_book(self.anonymous_user, third_book.id)
self.assertEqual(related_third_book['book'], third_book)
self.assertEqual(related_third_book['avg_book_rating'], {'rating__avg': 7.0})
self.assertEqual(related_third_book['book_rating_count'], 4)
self.assertEqual(related_third_book['added_book'], None)
self.assertEqual(related_third_book['comments'].count(), 2)
# ------------------------------------------------------------------------------------------------------------------
def test_get_related_objects_selected_book_added_user(self):
"""
This case is testing only 'added_book' param, because for
user who is reading the book only this attribute will change relatively to function above.
"""
third_book = Book.objects.get(book_name='Third Book')
sixth_book = Book.objects.get(book_name='Sixth Book')
self.assertTrue(isinstance(Book.get_related_objects_selected_book(self.the_user1.id_user, third_book.id), dict))
related_third_book = Book.get_related_objects_selected_book(self.the_user1.id_user, third_book.id)
related_sixth_book = Book.get_related_objects_selected_book(self.the_user1.id_user, sixth_book.id)
self.assertEqual(related_third_book['added_book'],
AddedBook.objects.get(id_book=third_book, id_user=self.the_user1))
self.assertEqual(related_sixth_book['added_book'],
AddedBook.objects.get(id_book=sixth_book, id_user=self.the_user1))
# ------------------------------------------------------------------------------------------------------------------
def test_get_related_objects_selected_book_with_user_key(self):
"""
Tests returning data for related objects for selected book with 'user_key' attribute, meaning that
user is anonymous (i.e. not logged) but with using user key. Done for API requests access.
"""
third_book = Book.objects.get(book_name='Third Book')
related_third_book = Book.get_related_objects_selected_book(
self.anonymous_user, third_book.id, self.the_user1.auth_token
)
self.assertEqual(related_third_book['book'], third_book)
self.assertEqual(related_third_book['avg_book_rating'], {'rating__avg': 6.0})
self.assertEqual(related_third_book['book_rating_count'], 3)
self.assertEqual(related_third_book['added_book'],
AddedBook.objects.get(id_book=third_book, id_user=self.the_user1))
self.assertEqual(related_third_book['comments'].count(), 1)
self.assertEqual(related_third_book['comments'][0],
BookComment.objects.filter(id_book=third_book).order_by('-id')[0])
# ------------------------------------------------------------------------------------------------------------------
def test_sort_by_book_name_category1(self):
"""
Must generate correct dictionaries for anonymous users, users with private books and without.
Testing first category.
"""
first_book = Book.objects.get(book_name='First Book')
third_book = Book.objects.get(book_name='Third Book')
fourth_book = Book.objects.get(book_name='Fourth Book')
first_book_dict = Utils.generate_sort_dict(first_book)
third_book_dict = Utils.generate_sort_dict(third_book)
fourth_book_dict = Utils.generate_sort_dict(fourth_book)
self.assertTrue(isinstance(Book.sort_by_book_name(self.anonymous_user, self.category1), list))
self.assertEqual(len(Book.sort_by_book_name(self.anonymous_user, self.category1)), 3)
self.assertEqual(Book.sort_by_book_name(self.anonymous_user, self.category1)[0], fourth_book_dict)
self.assertEqual(Book.sort_by_book_name(self.anonymous_user, self.category1)[2], third_book_dict)
self.assertEqual(len(Book.sort_by_book_name(self.the_user2.id_user, self.category1)), 3)
self.assertEqual(Book.sort_by_book_name(self.the_user2.id_user, self.category1)[0], fourth_book_dict)
self.assertEqual(Book.sort_by_book_name(self.the_user2.id_user, self.category1)[2], third_book_dict)
self.assertEqual(len(Book.sort_by_book_name(self.the_user1.id_user, self.category1)), 4)
self.assertEqual(Book.sort_by_book_name(self.the_user1.id_user, self.category1)[0], first_book_dict)
self.assertEqual(Book.sort_by_book_name(self.the_user1.id_user, self.category1)[3], third_book_dict)
# ------------------------------------------------------------------------------------------------------------------
def test_sort_by_book_name_category2(self):
"""
Must generate correct dictionaries for anonymous users, users with private books and without.
Testing first category.
"""
fifth_book = Book.objects.get(book_name='Fifth Book')
seventh_book = Book.objects.get(book_name='Seventh Book<>&"')
fifth_book_dict = Utils.generate_sort_dict(fifth_book)
seventh_book_dict = Utils.generate_sort_dict(seventh_book)
self.assertEqual(len(Book.sort_by_book_name(self.anonymous_user, self.category2)), 2)
self.assertEqual(Book.sort_by_book_name(self.anonymous_user, self.category2)[0], seventh_book_dict)
self.assertEqual(len(Book.sort_by_book_name(self.the_user2.id_user, self.category2)), 2)
self.assertEqual(Book.sort_by_book_name(self.the_user2.id_user, self.category2)[0], seventh_book_dict)
self.assertEqual(len(Book.sort_by_book_name(self.the_user1.id_user, self.category2)), 3)
self.assertEqual(Book.sort_by_book_name(self.the_user1.id_user, self.category2)[0], fifth_book_dict)
self.assertEqual(Book.sort_by_book_name(self.the_user1.id_user, self.category2)[1], seventh_book_dict)
# ------------------------------------------------------------------------------------------------------------------
def test_sort_by_author_category1(self):
"""
Must generate correct dictionaries for anonymous users, users with private books and without.
Testing returned book authors at first category.
"""
self.assertTrue(isinstance(Book.sort_by_author(self.anonymous_user, self.category1), list))
self.assertEqual(len(Book.sort_by_author(self.anonymous_user, self.category1)), 3)
self.assertEqual(Book.sort_by_author(self.anonymous_user, self.category1)[0]['author'],
self.author1.author_name)
self.assertEqual(Book.sort_by_author(self.anonymous_user, self.category1)[2]['author'],
self.author2.author_name)
self.assertEqual(len(Book.sort_by_author(self.the_user2.id_user, self.category1)), 3)
self.assertEqual(Book.sort_by_author(self.the_user2.id_user, self.category1)[0]['author'],
self.author1.author_name)
self.assertEqual(Book.sort_by_author(self.the_user2.id_user, self.category1)[2]['author'],
self.author2.author_name)
self.assertEqual(len(Book.sort_by_author(self.the_user1.id_user, self.category1)), 4)
self.assertEqual(Book.sort_by_author(self.the_user1.id_user, self.category1)[0]['author'],
self.author1.author_name)
self.assertEqual(Book.sort_by_author(self.the_user1.id_user, self.category1)[3]['author'],
self.author2.author_name)
# ------------------------------------------------------------------------------------------------------------------
def test_sort_by_author_category2(self):
"""
Must generate correct dictionaries for anonymous users, users with private books and without.
Testing returned book authors at second category.
"""
escaped_author_name = '<AuthorSpecialSymbols>&"'
self.assertEqual(len(Book.sort_by_author(self.anonymous_user, self.category2)), 2)
self.assertEqual(Book.sort_by_author(self.anonymous_user, self.category2)[0]['author'], escaped_author_name)
self.assertEqual(len(Book.sort_by_author(self.the_user2.id_user, self.category2)), 2)
self.assertEqual(Book.sort_by_author(self.the_user2.id_user, self.category2)[0]['author'], escaped_author_name)
self.assertEqual(len(Book.sort_by_author(self.the_user1.id_user, self.category2)), 3)
self.assertEqual(Book.sort_by_author(self.the_user1.id_user, self.category2)[0]['author'], escaped_author_name)
self.assertEqual(Book.sort_by_author(self.the_user1.id_user, self.category2)[1]['author'],
self.author1.author_name)
# ------------------------------------------------------------------------------------------------------------------
def test_sort_by_estimation_category1(self):
"""
Must generate correct dictionaries for anonymous users, users with private books and without.
Testing returned book rating at first category.
"""
self.assertTrue(isinstance(Book.sort_by_estimation(self.anonymous_user, self.category1), list))
self.assertEqual(len(Book.sort_by_estimation(self.anonymous_user, self.category1)), 3)
self.assertEqual(Book.sort_by_estimation(self.anonymous_user, self.category1)[0]['rating'], 7)
self.assertEqual(Book.sort_by_estimation(self.anonymous_user, self.category1)[2]['rating'], 6)
self.assertEqual(len(Book.sort_by_estimation(self.the_user2.id_user, self.category1)), 3)
self.assertEqual(Book.sort_by_estimation(self.the_user2.id_user, self.category1)[0]['rating'], 7)
self.assertEqual(Book.sort_by_estimation(self.the_user2.id_user, self.category1)[2]['rating'], 6)
self.assertEqual(len(Book.sort_by_estimation(self.the_user1.id_user, self.category1)), 4)
self.assertEqual(Book.sort_by_estimation(self.the_user1.id_user, self.category1)[0]['rating'], 7)
self.assertEqual(Book.sort_by_estimation(self.the_user1.id_user, self.category1)[2]['rating'], 6)
# ------------------------------------------------------------------------------------------------------------------
def test_sort_by_estimation_category2(self):
"""
Must generate correct dictionaries for anonymous users, users with private books and without.
Testing returned book rating at second category.
"""
self.assertEqual(len(Book.sort_by_estimation(self.anonymous_user, self.category2)), 2)
self.assertEqual(Book.sort_by_estimation(self.anonymous_user, self.category2)[0]['rating'], 4)
self.assertEqual(len(Book.sort_by_estimation(self.the_user2.id_user, self.category2)), 2)
self.assertEqual(Book.sort_by_estimation(self.the_user2.id_user, self.category2)[0]['rating'], 4)
self.assertEqual(len(Book.sort_by_estimation(self.the_user1.id_user, self.category2)), 3)
self.assertEqual(Book.sort_by_estimation(self.the_user1.id_user, self.category2)[0]['rating'], 4)
self.assertEqual(Book.sort_by_estimation(self.the_user1.id_user, self.category2)[1]['rating'], None)
# ------------------------------------------------------------------------------------------------------------------
def test_sort_by_readable(self):
"""
Must generate correct data by most readable books for anonymous users and users with private books.
Testing count of sorted books with and without selected categories.
"""
sorted_structure = Book.sort_by_readable(self.anonymous_user, self.category1)
self.assertTrue(isinstance(sorted_structure, list))
self.assertTrue(isinstance(sorted_structure[0], dict))
self.assertEqual(set(sorted_structure[0].keys()), {'id', 'name', 'author', 'url'})
self.assertEqual(len(Book.sort_by_readable(user=self.anonymous_user, category=self.category1)), 3)
self.assertEqual(len(Book.sort_by_readable(user=self.anonymous_user, category=self.category1, count=2)), 2)
self.assertEqual(len(Book.sort_by_readable(user=self.the_user1.id_user, category=self.category1)), 3)
self.assertEqual(len(Book.sort_by_readable(user=self.the_user1.id_user, category=self.category1, count=2)), 2)
self.assertEqual(len(Book.sort_by_readable(user=self.the_user2.id_user, category=self.category1)), 3)
self.assertEqual(len(Book.sort_by_readable(user=self.the_user2.id_user, category=self.category1, count=2)), 2)
self.assertEqual(len(Book.sort_by_readable(user=self.anonymous_user)), 4)
self.assertEqual(len(Book.sort_by_readable(user=self.anonymous_user, count=2)), 2)
self.assertEqual(len(Book.sort_by_readable(user=self.the_user1.id_user)), 4)
self.assertEqual(len(Book.sort_by_readable(user=self.the_user1.id_user, count=3)), 3)
self.assertEqual(len(Book.sort_by_readable(user=self.the_user2.id_user)), 4)
self.assertEqual(len(Book.sort_by_readable(user=self.the_user2.id_user, count=2)), 2)
# ------------------------------------------------------------------------------------------------------------------
def test_generate_books(self):
"""
Must generate correct dictionaries for Book data.
"""
books = Book.objects.all()
self.assertTrue(isinstance(Book.generate_books(books), list))
self.assertEqual(len(Book.generate_books(books)), 7)
self.assertEqual(len(Book.generate_books(books)[0].keys()), 5)
self.assertEqual(Book.generate_books(books)[0], Utils.generate_sort_dict(books[0]))
self.assertEqual(Book.generate_books(books)[6], Utils.generate_sort_dict(books[6]))
# ------------------------------------------------------------------------------------------------------------------
def test_fetch_books(self):
"""
Must generate list of dicts with Books data depending on different criteria.
"""
self.assertTrue(isinstance(Book.fetch_books('book'), list))
self.assertEqual(len(Book.fetch_books('Second Book')), 1)
self.assertEqual(len(Book.fetch_books('book')), 7)
self.assertEqual(len(Book.fetch_books('ook')), 7)
self.assertEqual(len(Book.fetch_books('trueAuthorNew')), 3)
self.assertEqual(len(Book.fetch_books('author')), 7)
self.assertEqual(len(Book.fetch_books('new')), 3)
self.assertEqual(len(Book.fetch_books('True')), 3)
# ------------------------------------------------------------------------------------------------------------------
def test_generate_existing_books(self):
"""
Must generate list of dicts with Books data depending on different criteria and excluding private books.
"""
self.assertTrue(isinstance(Book.generate_existing_books('book'), list))
self.assertEqual(len(Book.generate_existing_books('book')), 5)
self.assertEqual(len(Book.generate_existing_books('Book')), 5)
self.assertEqual(len(Book.generate_existing_books('bOoK')), 5)
fourth_book = Book.objects.get(book_name='Fourth Book')
test_book = Book.generate_existing_books('fourth')
self.assertEqual(len(test_book), 1)
self.assertTrue(isinstance(test_book[0], dict))
self.assertEqual(test_book[0], {'url': reverse('book', args=[fourth_book.id]),
'name': fourth_book.book_name})
test_private_book = Book.generate_existing_books('fifth')
self.assertEqual(len(test_private_book), 0)
# ------------------------------------------------------------------------------------------------------------------
def test_exclude_private_books(self):
"""
Must generate query sets or lists with Books depending on user type.
"""
all_books = Book.objects.all()
list_all_books = list(all_books)
self.assertEqual(Book.exclude_private_books(self.the_user1.id_user, all_books).count(), 7)
self.assertEqual(Book.exclude_private_books(self.the_user2.id_user, all_books).count(), 5)
self.assertTrue(isinstance(Book.exclude_private_books(self.the_user1.id_user, all_books), QuerySet))
self.assertTrue(isinstance(Book.exclude_private_books(self.the_user2.id_user, all_books), QuerySet))
self.assertEqual(len(Book.exclude_private_books(self.the_user1.id_user, list_all_books)), 7)
self.assertEqual(len(Book.exclude_private_books(self.the_user2.id_user, list_all_books)), 5)
self.assertTrue(isinstance(Book.exclude_private_books(self.the_user1.id_user, list_all_books), list))
self.assertTrue(isinstance(Book.exclude_private_books(self.the_user2.id_user, list_all_books), list))
self.assertTrue(self.anonymous_user.is_anonymous)
self.assertEqual(Book.exclude_private_books(self.anonymous_user, all_books).count(), 5)
self.assertEqual(len(Book.exclude_private_books(self.anonymous_user, list_all_books)), 5)
self.assertTrue(isinstance(Book.exclude_private_books(self.anonymous_user, all_books), QuerySet))
self.assertTrue(isinstance(Book.exclude_private_books(self.anonymous_user, list_all_books), list))
# ------------------------------------------------------------------------------------------------------------------
def test_added_books(self):
self.assertEqual(AddedBook.objects.all().count(), 8)
self.assertEqual(AddedBook.objects.filter(id_user=self.the_user1).count(), 3)
self.assertEqual(AddedBook.objects.filter(id_user=self.the_user2).count(), 3)
self.assertEqual(AddedBook.objects.filter(id_user=self.the_user5).count(), 1)
self.assertEqual(AddedBook.objects.filter(id_user=self.the_user6).count(), 1)
self.assertEqual(AddedBook.objects.filter(id_book=Book.objects.get(book_name='Sixth Book')).count(), 4)
self.assertEqual(AddedBook.objects.filter(id_book=Book.objects.get(book_name='Third Book')).count(), 2)
self.assertEqual(AddedBook.objects.filter(id_book=Book.objects.get(book_name='Fifth Book')).count(), 0)
self.assertEqual(AddedBook.objects.filter(id_user=self.the_user1,
id_book=Book.objects.get(book_name='Third Book')).count(), 1)
self.assertEqual(AddedBook.objects.filter(id_user=self.the_user1,
id_book=Book.objects.get(book_name='Sixth Book')).count(), 1)
self.assertEqual(AddedBook.objects.filter(id_user=self.the_user2,
id_book=Book.objects.get(book_name='Sixth Book')).count(), 1)
self.assertEqual(AddedBook.objects.filter(id_user=self.the_user2,
id_book=Book.objects.get(book_name='Fourth Book')).count(), 0)
# ------------------------------------------------------------------------------------------------------------------
def test_added_books_change(self):
"""
Must save book page after changing it.
"""
added_book3 = AddedBook.objects.get(id_user=self.the_user1, id_book=Book.objects.get(book_name='Third Book'))
added_book6 = AddedBook.objects.get(id_user=self.the_user2, id_book=Book.objects.get(book_name='Sixth Book'))
self.assertEqual(added_book3.last_page, 1)
self.assertEqual(added_book6.last_page, 1)
added_book3.last_page = 500
added_book3.save()
self.assertEqual(added_book3.last_page, 500)
self.assertEqual(added_book6.last_page, 1)
added_book6.last_page = 256
added_book6.save()
self.assertEqual(added_book3.last_page, 500)
self.assertEqual(added_book6.last_page, 256)
# ------------------------------------------------------------------------------------------------------------------
def test_added_books_delete(self):
added_book_third = AddedBook.objects.get(id_user=self.the_user1,
id_book=Book.objects.get(book_name='Third Book'))
added_book_sixth = AddedBook.objects.get(id_user=self.the_user2,
id_book=Book.objects.get(book_name='Sixth Book'))
added_book_third.delete()
added_book_sixth.delete()
self.assertEqual(AddedBook.objects.all().count(), 6)
self.assertEqual(AddedBook.objects.filter(id_user=self.the_user1).count(), 2)
self.assertEqual(AddedBook.objects.filter(id_user=self.the_user1).count(), 2)
self.assertEqual(AddedBook.objects.filter(id_book=Book.objects.get(book_name='Sixth Book')).count(), 3)
self.assertEqual(AddedBook.objects.filter(id_book=Book.objects.get(book_name='Third Book')).count(), 1)
self.assertEqual(AddedBook.objects.filter(id_user=self.the_user1,
id_book=Book.objects.get(book_name='Third Book')).count(), 0)
self.assertEqual(AddedBook.objects.filter(id_user=self.the_user2,
id_book=Book.objects.get(book_name='Sixth Book')).count(), 0)
# ------------------------------------------------------------------------------------------------------------------
def test_get_user_added_book(self):
"""
Must generate list of books that added by user (reading by user).
"""
self.assertTrue(self.anonymous_user.is_anonymous)
self.assertEqual(len(AddedBook.get_user_added_books(self.anonymous_user)), 0)
self.assertEqual(AddedBook.get_user_added_books(self.anonymous_user), [])
self.assertEqual(AddedBook.get_user_added_books(self.the_user1.id_user).count(), 3)
self.assertEqual(AddedBook.get_user_added_books(self.the_user5.id_user).count(), 1)
self.assertNotEqual(AddedBook.get_user_added_books(self.the_user1.id_user), [])
removed_obj = AddedBook.objects.get(id_book=Book.objects.get(book_name='Sixth Book'),
id_user=self.the_user5)
removed_obj.delete()
self.assertEqual(AddedBook.get_user_added_books(self.the_user5.id_user).count(), 0)
self.assertNotEqual(AddedBook.get_user_added_books(self.the_user5.id_user), [])
# ------------------------------------------------------------------------------------------------------------------
def test_get_count_added(self):
"""
Must return count how many users is reading some book.
"""
third_book = Book.objects.get(book_name='Third Book')
sixth_book = Book.objects.get(book_name='Sixth Book')
not_existing_id = 10000
self.assertEqual(AddedBook.get_count_added(third_book.id), 2)
self.assertEqual(AddedBook.get_count_added(sixth_book.id), 4)
self.assertEqual(AddedBook.get_count_added(not_existing_id), 0)
removed_third = AddedBook.objects.filter(id_user=self.the_user1, id_book=third_book)
removed_third.delete()
removed_sixth = AddedBook.objects.filter(id_user=self.the_user1, id_book=sixth_book)
removed_sixth.delete()
self.assertEqual(AddedBook.get_count_added(third_book.id), 1)
self.assertEqual(AddedBook.get_count_added(sixth_book.id), 3)
self.assertEqual(AddedBook.get_count_added(not_existing_id), 0)
# ------------------------------------------------------------------------------------------------------------------
def test_book_rating(self):
self.assertEqual(BookRating.objects.all().count(), 6)
self.assertEqual(BookRating.objects.filter(id_book=Book.objects.filter(book_name='Third Book')).count(), 3)
self.assertEqual(BookRating.objects.filter(id_user=self.the_user1).count(), 3)
self.assertEqual(BookRating.objects.filter(id_user=self.the_user2).count(), 2)
self.assertEqual(BookRating.objects.filter(rating=7).count(), 2)
self.assertEqual(BookRating.objects.filter(id_book=Book.objects.get(book_name='Third Book'),
id_user=self.the_user1).count(), 1)
self.assertEqual(BookRating.objects.filter(id_book=Book.objects.get(book_name='Third Book'),
id_user=self.the_user6).count(), 0)
self.assertEqual(BookRating.objects.filter(id_book=Book.objects.get(book_name='Fourth Book'),
id_user=self.the_user1,
rating=7).count(), 1)
# ------------------------------------------------------------------------------------------------------------------
def test_changed_book_rating(self):
removed_rating = BookRating.objects.get(id_book=Book.objects.get(book_name='Third Book'),
id_user=self.the_user1)
removed_rating.delete()
self.assertEqual(BookRating.objects.all().count(), 5)
changed_rating1 = BookRating.objects.get(id_book=Book.objects.get(book_name='Second Book'),
id_user=self.the_user2)
changed_rating2 = BookRating.objects.get(id_book=Book.objects.get(book_name='Fourth Book'),
id_user=self.the_user1)
self.assertEqual(BookRating.objects.filter(rating=7).count(), 2)
self.assertEqual(changed_rating1.rating, 7)
self.assertEqual(changed_rating2.rating, 7)
changed_rating1.rating = 4
changed_rating1.save()
changed_rating2.rating = 3
changed_rating2.save()
self.assertEqual(changed_rating1.rating, 4)
self.assertEqual(changed_rating2.rating, 3)
self.assertEqual(BookRating.objects.filter(rating=7).count(), 0)
self.assertEqual(BookRating.objects.filter(rating=4).count(), 2)
self.assertEqual(BookRating.objects.filter(rating=3).count(), 2)
# ------------------------------------------------------------------------------------------------------------------
def test_book_comment(self):
self.assertEqual(BookComment.objects.all().count(), 5)
self.assertEqual(BookComment.objects.filter(id_user=self.the_user1).count(), 3)
self.assertEqual(BookComment.objects.filter(id_book=Book.objects.get(book_name='Second Book')).count(), 2)
self.assertEqual(BookComment.objects.filter(id_book=Book.objects.get(book_name='Fourth Book')).count(), 2)
self.assertEqual(BookComment.objects.filter(id_book=Book.objects.get(book_name='Sixth Book')).count(), 0)
self.assertEqual(BookComment.objects.filter(id_user=self.the_user6).count(), 0)
self.assertEqual(BookComment.objects.filter(id_book=Book.objects.get(book_name='Second Book'),
id_user=self.the_user1).count(), 1)
BookComment.objects.create(id_book=Book.objects.get(book_name='Second Book'),
id_user=self.the_user1,
text='New comment user1 book 2')
self.assertEqual(BookComment.objects.all().count(), 6)
self.assertEqual(BookComment.objects.filter(id_user=self.the_user1).count(), 4)
self.assertEqual(BookComment.objects.filter(id_book=Book.objects.get(book_name='Second Book')).count(), 3)
self.assertEqual(BookComment.objects.filter(id_book=Book.objects.get(book_name='Second Book'),
id_user=self.the_user1).count(), 2)
deleted_comment = BookComment.objects.get(id_book=Book.objects.get(book_name='Fourth Book'),
id_user=self.the_user5)
deleted_comment.delete()
self.assertEqual(BookComment.objects.all().count(), 5)
self.assertEqual(BookComment.objects.filter(id_user=self.the_user5).count(), 0)
self.assertEqual(BookComment.objects.filter(id_book=Book.objects.get(book_name='Fourth Book')).count(), 1)
# ------------------------------------------------------------------------------------------------------------------
def test_post_messages(self):
self.assertEqual(Post.objects.all().count(), 3)
self.assertEqual(Post.objects.filter(user=self.the_user1).count(), 2)
self.assertEqual(Post.objects.filter(user=self.the_user2).count(), 1)
deleted_post = Post.objects.get(user=self.the_user1, heading='post 2')
deleted_post.delete()
self.assertEqual(Post.objects.all().count(), 2)
self.assertEqual(Post.objects.filter(user=self.the_user1).count(), 1)
self.assertEqual(Post.objects.filter(user=self.the_user2).count(), 1)
# ------------------------------------------------------------------------------------------------------------------
def test_support_messages(self):
self.assertEqual(SupportMessage.objects.all().count(), 4)
self.assertEqual(SupportMessage.objects.filter(email='[email protected]').count(), 2)
self.assertEqual(SupportMessage.objects.filter(email='[email protected]').count(), 1)
self.assertEqual(SupportMessage.objects.filter(is_checked=False).count(), 4)
checked_message = SupportMessage.objects.get(email='[email protected]', text='Test text1')
checked_message.is_checked = True
checked_message.save()
self.assertEqual(SupportMessage.objects.filter(is_checked=False).count(), 3)
# ------------------------------------------------------------------------------------------------------------------
def tearDown(self):
for book in Book.objects.all():
if os.path.exists(book.book_file.path):
os.remove(book.book_file.path)
if book.photo and os.path.exists(book.photo.path):
os.remove(book.photo.path)
| OlegKlimenko/Plamber | app/tests/test_models.py | Python | apache-2.0 | 53,921 |
# coding: utf-8
from flask import Flask, request, render_template
import utils, config
import sys
reload(sys)
sys.setdefaultencoding('utf-8')
app = Flask(__name__)
@app.route('/', methods=['GET', 'POST'])
def index():
url = None
if request.method == 'POST':
account = request.form['account']
password = request.form['password']
url = 'Empty!!'
helper = utils.Helper()
try:
token = helper.get_token()
if token != '':
helper.login(account, password, token)
url = helper.get_play_url()
except Exception, e:
raise e
return render_template('index.html', play_url=url)
if __name__ == '__main__':
app.debug = config.DEBUG
app.run(host=config.HOST, port=config.PORT) | solupro/kancolle | app.py | Python | unlicense | 698 |
# Copyright 2014 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import mock
from neutron_lib import constants
from oslo_config import cfg
from oslo_utils import importutils
import testscenarios
from neutron import context
from neutron.db import agentschedulers_db as sched_db
from neutron.db import common_db_mixin
from neutron.db import models_v2
from neutron.extensions import dhcpagentscheduler
from neutron.scheduler import dhcp_agent_scheduler
from neutron.tests.common import helpers
from neutron.tests.unit.plugins.ml2 import test_plugin
from neutron.tests.unit import testlib_api
# Required to generate tests from scenarios. Not compatible with nose.
load_tests = testscenarios.load_tests_apply_scenarios
HOST_C = 'host-c'
HOST_D = 'host-d'
class TestDhcpSchedulerBaseTestCase(testlib_api.SqlTestCase):
def setUp(self):
super(TestDhcpSchedulerBaseTestCase, self).setUp()
self.ctx = context.get_admin_context()
self.network = {'id': 'foo_network_id'}
self.network_id = 'foo_network_id'
self._save_networks([self.network_id])
def _create_and_set_agents_down(self, hosts, down_agent_count=0,
admin_state_up=True,
az=helpers.DEFAULT_AZ):
agents = []
for i, host in enumerate(hosts):
is_alive = i >= down_agent_count
agents.append(helpers.register_dhcp_agent(
host,
admin_state_up=admin_state_up,
alive=is_alive,
az=az))
return agents
def _save_networks(self, networks):
for network_id in networks:
with self.ctx.session.begin(subtransactions=True):
self.ctx.session.add(models_v2.Network(id=network_id))
def _test_schedule_bind_network(self, agents, network_id):
scheduler = dhcp_agent_scheduler.ChanceScheduler()
scheduler.resource_filter.bind(self.ctx, agents, network_id)
results = self.ctx.session.query(
sched_db.NetworkDhcpAgentBinding).filter_by(
network_id=network_id).all()
self.assertEqual(len(agents), len(results))
for result in results:
self.assertEqual(network_id, result.network_id)
class TestDhcpScheduler(TestDhcpSchedulerBaseTestCase):
def test_schedule_bind_network_single_agent(self):
agents = self._create_and_set_agents_down(['host-a'])
self._test_schedule_bind_network(agents, self.network_id)
def test_schedule_bind_network_multi_agents(self):
agents = self._create_and_set_agents_down(['host-a', 'host-b'])
self._test_schedule_bind_network(agents, self.network_id)
def test_schedule_bind_network_multi_agent_fail_one(self):
agents = self._create_and_set_agents_down(['host-a'])
self._test_schedule_bind_network(agents, self.network_id)
with mock.patch.object(dhcp_agent_scheduler.LOG, 'info') as fake_log:
self._test_schedule_bind_network(agents, self.network_id)
self.assertEqual(1, fake_log.call_count)
def _test_get_agents_and_scheduler_for_dead_agent(self):
agents = self._create_and_set_agents_down(['dead_host', 'alive_host'],
1)
dead_agent = [agents[0]]
alive_agent = [agents[1]]
self._test_schedule_bind_network(dead_agent, self.network_id)
scheduler = dhcp_agent_scheduler.ChanceScheduler()
return dead_agent, alive_agent, scheduler
def _test_reschedule_vs_network_on_dead_agent(self,
active_hosts_only):
dead_agent, alive_agent, scheduler = (
self._test_get_agents_and_scheduler_for_dead_agent())
network = {'id': self.network_id}
plugin = mock.Mock()
plugin.get_subnets.return_value = [{"network_id": self.network_id,
"enable_dhcp": True}]
plugin.get_agents_db.return_value = dead_agent + alive_agent
plugin.filter_hosts_with_network_access.side_effect = (
lambda context, network_id, hosts: hosts)
if active_hosts_only:
plugin.get_dhcp_agents_hosting_networks.return_value = []
self.assertTrue(
scheduler.schedule(
plugin, self.ctx, network))
else:
plugin.get_dhcp_agents_hosting_networks.return_value = dead_agent
self.assertFalse(
scheduler.schedule(
plugin, self.ctx, network))
def test_network_rescheduled_when_db_returns_active_hosts(self):
self._test_reschedule_vs_network_on_dead_agent(True)
def test_network_not_rescheduled_when_db_returns_all_hosts(self):
self._test_reschedule_vs_network_on_dead_agent(False)
def _get_agent_binding_from_db(self, agent):
return self.ctx.session.query(
sched_db.NetworkDhcpAgentBinding
).filter_by(dhcp_agent_id=agent[0].id).all()
def _test_auto_reschedule_vs_network_on_dead_agent(self,
active_hosts_only):
dead_agent, alive_agent, scheduler = (
self._test_get_agents_and_scheduler_for_dead_agent())
plugin = mock.Mock()
plugin.get_subnets.return_value = [{"network_id": self.network_id,
"enable_dhcp": True}]
plugin.get_network.return_value = self.network
if active_hosts_only:
plugin.get_dhcp_agents_hosting_networks.return_value = []
else:
plugin.get_dhcp_agents_hosting_networks.return_value = dead_agent
network_assigned_to_dead_agent = (
self._get_agent_binding_from_db(dead_agent))
self.assertEqual(1, len(network_assigned_to_dead_agent))
self.assertTrue(
scheduler.auto_schedule_networks(
plugin, self.ctx, "alive_host"))
network_assigned_to_dead_agent = (
self._get_agent_binding_from_db(dead_agent))
network_assigned_to_alive_agent = (
self._get_agent_binding_from_db(alive_agent))
self.assertEqual(1, len(network_assigned_to_dead_agent))
if active_hosts_only:
self.assertEqual(1, len(network_assigned_to_alive_agent))
else:
self.assertEqual(0, len(network_assigned_to_alive_agent))
def test_network_auto_rescheduled_when_db_returns_active_hosts(self):
self._test_auto_reschedule_vs_network_on_dead_agent(True)
def test_network_not_auto_rescheduled_when_db_returns_all_hosts(self):
self._test_auto_reschedule_vs_network_on_dead_agent(False)
class TestAutoScheduleNetworks(TestDhcpSchedulerBaseTestCase):
"""Unit test scenarios for ChanceScheduler.auto_schedule_networks.
network_present
Network is present or not
enable_dhcp
Dhcp is enabled or disabled in the subnet of the network
scheduled_already
Network is already scheduled to the agent or not
agent_down
Dhcp agent is down or alive
valid_host
If true, then an valid host is passed to schedule the network,
else an invalid host is passed.
az_hints
'availability_zone_hints' of the network.
note that default 'availability_zone' of an agent is 'nova'.
"""
scenarios = [
('Network present',
dict(network_present=True,
enable_dhcp=True,
scheduled_already=False,
agent_down=False,
valid_host=True,
az_hints=[])),
('No network',
dict(network_present=False,
enable_dhcp=False,
scheduled_already=False,
agent_down=False,
valid_host=True,
az_hints=[])),
('Network already scheduled',
dict(network_present=True,
enable_dhcp=True,
scheduled_already=True,
agent_down=False,
valid_host=True,
az_hints=[])),
('Agent down',
dict(network_present=True,
enable_dhcp=True,
scheduled_already=False,
agent_down=False,
valid_host=True,
az_hints=[])),
('dhcp disabled',
dict(network_present=True,
enable_dhcp=False,
scheduled_already=False,
agent_down=False,
valid_host=False,
az_hints=[])),
('Invalid host',
dict(network_present=True,
enable_dhcp=True,
scheduled_already=False,
agent_down=False,
valid_host=False,
az_hints=[])),
('Match AZ',
dict(network_present=True,
enable_dhcp=True,
scheduled_already=False,
agent_down=False,
valid_host=True,
az_hints=['nova'])),
('Not match AZ',
dict(network_present=True,
enable_dhcp=True,
scheduled_already=False,
agent_down=False,
valid_host=True,
az_hints=['not-match'])),
]
def test_auto_schedule_network(self):
plugin = mock.MagicMock()
plugin.get_subnets.return_value = (
[{"network_id": self.network_id, "enable_dhcp": self.enable_dhcp}]
if self.network_present else [])
plugin.get_network.return_value = {'availability_zone_hints':
self.az_hints}
scheduler = dhcp_agent_scheduler.ChanceScheduler()
if self.network_present:
down_agent_count = 1 if self.agent_down else 0
agents = self._create_and_set_agents_down(
['host-a'], down_agent_count=down_agent_count)
if self.scheduled_already:
self._test_schedule_bind_network(agents, self.network_id)
expected_result = (self.network_present and self.enable_dhcp)
expected_hosted_agents = (1 if expected_result and
self.valid_host else 0)
if (self.az_hints and
agents[0]['availability_zone'] not in self.az_hints):
expected_hosted_agents = 0
host = "host-a" if self.valid_host else "host-b"
observed_ret_value = scheduler.auto_schedule_networks(
plugin, self.ctx, host)
self.assertEqual(expected_result, observed_ret_value)
hosted_agents = self.ctx.session.query(
sched_db.NetworkDhcpAgentBinding).all()
self.assertEqual(expected_hosted_agents, len(hosted_agents))
class TestNetworksFailover(TestDhcpSchedulerBaseTestCase,
sched_db.DhcpAgentSchedulerDbMixin,
common_db_mixin.CommonDbMixin):
def test_reschedule_network_from_down_agent(self):
agents = self._create_and_set_agents_down(['host-a', 'host-b'], 1)
self._test_schedule_bind_network([agents[0]], self.network_id)
self._save_networks(["foo-network-2"])
self._test_schedule_bind_network([agents[1]], "foo-network-2")
with mock.patch.object(self, 'remove_network_from_dhcp_agent') as rn,\
mock.patch.object(self,
'schedule_network',
return_value=[agents[1]]) as sch,\
mock.patch.object(self,
'get_network',
create=True,
return_value={'id': self.network_id}):
notifier = mock.MagicMock()
self.agent_notifiers[constants.AGENT_TYPE_DHCP] = notifier
self.remove_networks_from_down_agents()
rn.assert_called_with(mock.ANY, agents[0].id, self.network_id,
notify=False)
sch.assert_called_with(mock.ANY, {'id': self.network_id})
notifier.network_added_to_agent.assert_called_with(
mock.ANY, self.network_id, agents[1].host)
def _test_failed_rescheduling(self, rn_side_effect=None):
agents = self._create_and_set_agents_down(['host-a', 'host-b'], 1)
self._test_schedule_bind_network([agents[0]], self.network_id)
with mock.patch.object(self,
'remove_network_from_dhcp_agent',
side_effect=rn_side_effect) as rn,\
mock.patch.object(self,
'schedule_network',
return_value=None) as sch,\
mock.patch.object(self,
'get_network',
create=True,
return_value={'id': self.network_id}):
notifier = mock.MagicMock()
self.agent_notifiers[constants.AGENT_TYPE_DHCP] = notifier
self.remove_networks_from_down_agents()
rn.assert_called_with(mock.ANY, agents[0].id, self.network_id,
notify=False)
sch.assert_called_with(mock.ANY, {'id': self.network_id})
self.assertFalse(notifier.network_added_to_agent.called)
def test_reschedule_network_from_down_agent_failed(self):
self._test_failed_rescheduling()
def test_reschedule_network_from_down_agent_concurrent_removal(self):
self._test_failed_rescheduling(
rn_side_effect=dhcpagentscheduler.NetworkNotHostedByDhcpAgent(
network_id='foo', agent_id='bar'))
def test_filter_bindings(self):
bindings = [
sched_db.NetworkDhcpAgentBinding(network_id='foo1',
dhcp_agent={'id': 'id1'}),
sched_db.NetworkDhcpAgentBinding(network_id='foo2',
dhcp_agent={'id': 'id1'}),
sched_db.NetworkDhcpAgentBinding(network_id='foo3',
dhcp_agent={'id': 'id2'}),
sched_db.NetworkDhcpAgentBinding(network_id='foo4',
dhcp_agent={'id': 'id2'})]
with mock.patch.object(self, 'agent_starting_up',
side_effect=[True, False]):
res = [b for b in self._filter_bindings(None, bindings)]
# once per each agent id1 and id2
self.assertEqual(2, len(res))
res_ids = [b.network_id for b in res]
self.assertIn('foo3', res_ids)
self.assertIn('foo4', res_ids)
def test_reschedule_network_from_down_agent_failed_on_unexpected(self):
agents = self._create_and_set_agents_down(['host-a'], 1)
self._test_schedule_bind_network([agents[0]], self.network_id)
with mock.patch.object(
self, '_filter_bindings',
side_effect=Exception()):
# just make sure that no exception is raised
self.remove_networks_from_down_agents()
def test_reschedule_network_catches_exceptions_on_fetching_bindings(self):
with mock.patch('neutron.context.get_admin_context') as get_ctx:
mock_ctx = mock.Mock()
get_ctx.return_value = mock_ctx
mock_ctx.session.query.side_effect = Exception()
# just make sure that no exception is raised
self.remove_networks_from_down_agents()
def test_reschedule_doesnt_occur_if_no_agents(self):
agents = self._create_and_set_agents_down(['host-a', 'host-b'], 2)
self._test_schedule_bind_network([agents[0]], self.network_id)
with mock.patch.object(
self, 'remove_network_from_dhcp_agent') as rn:
self.remove_networks_from_down_agents()
self.assertFalse(rn.called)
class DHCPAgentWeightSchedulerTestCase(test_plugin.Ml2PluginV2TestCase):
"""Unit test scenarios for WeightScheduler.schedule."""
def setUp(self):
super(DHCPAgentWeightSchedulerTestCase, self).setUp()
weight_scheduler = (
'neutron.scheduler.dhcp_agent_scheduler.WeightScheduler')
cfg.CONF.set_override('network_scheduler_driver', weight_scheduler)
self.plugin = importutils.import_object('neutron.plugins.ml2.plugin.'
'Ml2Plugin')
mock.patch.object(
self.plugin, 'filter_hosts_with_network_access',
side_effect=lambda context, network_id, hosts: hosts).start()
self.plugin.network_scheduler = importutils.import_object(
weight_scheduler)
cfg.CONF.set_override("dhcp_load_type", "networks")
self.ctx = context.get_admin_context()
def _create_network(self):
net = self.plugin.create_network(
self.ctx,
{'network': {'name': 'name',
'tenant_id': 'tenant_one',
'admin_state_up': True,
'shared': True}})
return net['id']
def test_scheduler_one_agents_per_network(self):
net_id = self._create_network()
helpers.register_dhcp_agent(HOST_C)
self.plugin.network_scheduler.schedule(self.plugin, self.ctx,
{'id': net_id})
agents = self.plugin.get_dhcp_agents_hosting_networks(self.ctx,
[net_id])
self.assertEqual(1, len(agents))
def test_scheduler_two_agents_per_network(self):
cfg.CONF.set_override('dhcp_agents_per_network', 2)
net_id = self._create_network()
helpers.register_dhcp_agent(HOST_C)
helpers.register_dhcp_agent(HOST_D)
self.plugin.network_scheduler.schedule(self.plugin, self.ctx,
{'id': net_id})
agents = self.plugin.get_dhcp_agents_hosting_networks(self.ctx,
[net_id])
self.assertEqual(2, len(agents))
def test_scheduler_no_active_agents(self):
net_id = self._create_network()
self.plugin.network_scheduler.schedule(self.plugin, self.ctx,
{'id': net_id})
agents = self.plugin.get_dhcp_agents_hosting_networks(self.ctx,
[net_id])
self.assertEqual(0, len(agents))
def test_scheduler_equal_distribution(self):
net_id_1 = self._create_network()
net_id_2 = self._create_network()
net_id_3 = self._create_network()
helpers.register_dhcp_agent(HOST_C)
helpers.register_dhcp_agent(HOST_D, networks=1)
self.plugin.network_scheduler.schedule(
self.plugin, context.get_admin_context(), {'id': net_id_1})
helpers.register_dhcp_agent(HOST_D, networks=2)
self.plugin.network_scheduler.schedule(
self.plugin, context.get_admin_context(), {'id': net_id_2})
helpers.register_dhcp_agent(HOST_C, networks=4)
self.plugin.network_scheduler.schedule(
self.plugin, context.get_admin_context(), {'id': net_id_3})
agent1 = self.plugin.get_dhcp_agents_hosting_networks(
self.ctx, [net_id_1])
agent2 = self.plugin.get_dhcp_agents_hosting_networks(
self.ctx, [net_id_2])
agent3 = self.plugin.get_dhcp_agents_hosting_networks(
self.ctx, [net_id_3])
self.assertEqual('host-c', agent1[0]['host'])
self.assertEqual('host-c', agent2[0]['host'])
self.assertEqual('host-d', agent3[0]['host'])
class TestDhcpSchedulerFilter(TestDhcpSchedulerBaseTestCase,
sched_db.DhcpAgentSchedulerDbMixin):
def _test_get_dhcp_agents_hosting_networks(self, expected, **kwargs):
agents = self._create_and_set_agents_down(['host-a', 'host-b'], 1)
agents += self._create_and_set_agents_down(['host-c', 'host-d'], 1,
admin_state_up=False)
self._test_schedule_bind_network(agents, self.network_id)
agents = self.get_dhcp_agents_hosting_networks(self.ctx,
[self.network_id],
**kwargs)
host_ids = set(a['host'] for a in agents)
self.assertEqual(expected, host_ids)
def test_get_dhcp_agents_hosting_networks_default(self):
self._test_get_dhcp_agents_hosting_networks({'host-a', 'host-b',
'host-c', 'host-d'})
def test_get_dhcp_agents_hosting_networks_active(self):
self._test_get_dhcp_agents_hosting_networks({'host-b', 'host-d'},
active=True)
def test_get_dhcp_agents_hosting_networks_admin_up(self):
self._test_get_dhcp_agents_hosting_networks({'host-a', 'host-b'},
admin_state_up=True)
def test_get_dhcp_agents_hosting_networks_active_admin_up(self):
self._test_get_dhcp_agents_hosting_networks({'host-b'},
active=True,
admin_state_up=True)
def test_get_dhcp_agents_hosting_networks_admin_down(self):
self._test_get_dhcp_agents_hosting_networks({'host-c', 'host-d'},
admin_state_up=False)
def test_get_dhcp_agents_hosting_networks_active_admin_down(self):
self._test_get_dhcp_agents_hosting_networks({'host-d'},
active=True,
admin_state_up=False)
class DHCPAgentAZAwareWeightSchedulerTestCase(TestDhcpSchedulerBaseTestCase):
def setUp(self):
super(DHCPAgentAZAwareWeightSchedulerTestCase, self).setUp()
DB_PLUGIN_KLASS = 'neutron.plugins.ml2.plugin.Ml2Plugin'
self.setup_coreplugin(DB_PLUGIN_KLASS)
cfg.CONF.set_override("network_scheduler_driver",
'neutron.scheduler.dhcp_agent_scheduler.AZAwareWeightScheduler')
self.plugin = importutils.import_object('neutron.plugins.ml2.plugin.'
'Ml2Plugin')
mock.patch.object(
self.plugin, 'filter_hosts_with_network_access',
side_effect=lambda context, network_id, hosts: hosts).start()
cfg.CONF.set_override('dhcp_agents_per_network', 1)
cfg.CONF.set_override("dhcp_load_type", "networks")
def test_az_scheduler_one_az_hints(self):
self._save_networks(['1111'])
helpers.register_dhcp_agent('az1-host1', networks=1, az='az1')
helpers.register_dhcp_agent('az1-host2', networks=2, az='az1')
helpers.register_dhcp_agent('az2-host1', networks=3, az='az2')
helpers.register_dhcp_agent('az2-host2', networks=4, az='az2')
self.plugin.network_scheduler.schedule(self.plugin, self.ctx,
{'id': '1111', 'availability_zone_hints': ['az2']})
agents = self.plugin.get_dhcp_agents_hosting_networks(self.ctx,
['1111'])
self.assertEqual(1, len(agents))
self.assertEqual('az2-host1', agents[0]['host'])
def test_az_scheduler_default_az_hints(self):
cfg.CONF.set_override('default_availability_zones', ['az1'])
self._save_networks(['1111'])
helpers.register_dhcp_agent('az1-host1', networks=1, az='az1')
helpers.register_dhcp_agent('az1-host2', networks=2, az='az1')
helpers.register_dhcp_agent('az2-host1', networks=3, az='az2')
helpers.register_dhcp_agent('az2-host2', networks=4, az='az2')
self.plugin.network_scheduler.schedule(self.plugin, self.ctx,
{'id': '1111', 'availability_zone_hints': []})
agents = self.plugin.get_dhcp_agents_hosting_networks(self.ctx,
['1111'])
self.assertEqual(1, len(agents))
self.assertEqual('az1-host1', agents[0]['host'])
def test_az_scheduler_two_az_hints(self):
cfg.CONF.set_override('dhcp_agents_per_network', 2)
self._save_networks(['1111'])
helpers.register_dhcp_agent('az1-host1', networks=1, az='az1')
helpers.register_dhcp_agent('az1-host2', networks=2, az='az1')
helpers.register_dhcp_agent('az2-host1', networks=3, az='az2')
helpers.register_dhcp_agent('az2-host2', networks=4, az='az2')
helpers.register_dhcp_agent('az3-host1', networks=5, az='az3')
helpers.register_dhcp_agent('az3-host2', networks=6, az='az3')
self.plugin.network_scheduler.schedule(self.plugin, self.ctx,
{'id': '1111', 'availability_zone_hints': ['az1', 'az3']})
agents = self.plugin.get_dhcp_agents_hosting_networks(self.ctx,
['1111'])
self.assertEqual(2, len(agents))
expected_hosts = set(['az1-host1', 'az3-host1'])
hosts = set([a['host'] for a in agents])
self.assertEqual(expected_hosts, hosts)
def test_az_scheduler_two_az_hints_one_available_az(self):
cfg.CONF.set_override('dhcp_agents_per_network', 2)
self._save_networks(['1111'])
helpers.register_dhcp_agent('az1-host1', networks=1, az='az1')
helpers.register_dhcp_agent('az1-host2', networks=2, az='az1')
helpers.register_dhcp_agent('az2-host1', networks=3, alive=False,
az='az2')
helpers.register_dhcp_agent('az2-host2', networks=4,
admin_state_up=False, az='az2')
self.plugin.network_scheduler.schedule(self.plugin, self.ctx,
{'id': '1111', 'availability_zone_hints': ['az1', 'az2']})
agents = self.plugin.get_dhcp_agents_hosting_networks(self.ctx,
['1111'])
self.assertEqual(2, len(agents))
expected_hosts = set(['az1-host1', 'az1-host2'])
hosts = set([a['host'] for a in agents])
self.assertEqual(expected_hosts, hosts)
def test_az_scheduler_no_az_hints(self):
cfg.CONF.set_override('dhcp_agents_per_network', 2)
self._save_networks(['1111'])
helpers.register_dhcp_agent('az1-host1', networks=2, az='az1')
helpers.register_dhcp_agent('az1-host2', networks=3, az='az1')
helpers.register_dhcp_agent('az2-host1', networks=2, az='az2')
helpers.register_dhcp_agent('az2-host2', networks=1, az='az2')
self.plugin.network_scheduler.schedule(self.plugin, self.ctx,
{'id': '1111', 'availability_zone_hints': []})
agents = self.plugin.get_dhcp_agents_hosting_networks(self.ctx,
['1111'])
self.assertEqual(2, len(agents))
expected_hosts = set(['az1-host1', 'az2-host2'])
hosts = {a['host'] for a in agents}
self.assertEqual(expected_hosts, hosts)
| bigswitch/neutron | neutron/tests/unit/scheduler/test_dhcp_agent_scheduler.py | Python | apache-2.0 | 27,788 |
# -*- coding: utf-8 -*-
"""
Copyright (C) 2014 Dariusz Suchojad <dsuch at zato.io>
Licensed under LGPLv3, see LICENSE.txt for terms and conditions.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
# stdlib
import weakref
from copy import deepcopy
from logging import getLogger
from traceback import format_exc
# Bunch
from bunch import Bunch
# Zato
from zato.common import Inactive, SECRET_SHADOW
logger = getLogger(__name__)
class BaseAPI(object):
""" A base class for connection/query APIs.
"""
def __init__(self, conn_store):
self._conn_store = conn_store
def get(self, name, skip_inactive=False):
item = self._conn_store.get(name)
if not item:
msg = 'No such item `{}` in `{}`'.format(name, sorted(self._conn_store.items))
logger.warn(msg)
raise KeyError(msg)
if not item.config.is_active and not skip_inactive:
msg = '`{}` is inactive'.format(name)
logger.warn(msg)
raise Inactive(msg)
return item
def __getitem__(self, name):
return self.get(name, False)
def create(self, name, msg, *args, **extra):
return self._conn_store.create(name, msg, **extra)
def edit(self, name, msg, **extra):
return self._conn_store.edit(name, msg, **extra)
def delete(self, name):
return self._conn_store.delete(name)
def change_password(self, config):
return self._conn_store.change_password(config)
# ################################################################################################################################
class BaseStore(object):
""" A base class for connection/query stores.
"""
def __init__(self):
self.items = {}
# gevent
from gevent.lock import RLock
self.lock = RLock()
def __getitem__(self, name):
return self.items[name]
def get(self, name):
return self.items.get(name)
def _create(self, name, config, **extra):
""" Actually adds a new definition, must be called with self.lock held.
"""
config_no_sensitive = deepcopy(config)
if 'password' in config:
config_no_sensitive['password'] = SECRET_SHADOW
item = Bunch(config=config, config_no_sensitive=config_no_sensitive, is_created=False, impl=None)
# It's optional
conn = extra.get('def_', {'conn':None})['conn']
try:
logger.debug('Creating `%s`', config_no_sensitive)
impl = self.create_impl(config, config_no_sensitive, **extra)
def execute(session, statement):
def execute_impl(**kwargs):
if not session:
raise Exception('Cannot execute the query without a session')
return session.execute(statement, kwargs)
return execute_impl
item.execute = execute(conn, impl)
logger.debug('Created `%s`', config_no_sensitive)
except Exception, e:
logger.warn('Could not create `%s`, config:`%s`, e:`%s`', name, config_no_sensitive, format_exc(e))
else:
item.impl = impl
item.is_created = True
if conn:
item.extra = weakref.proxy(conn)
else:
item.conn = item.impl
self.items[name] = item
return item
def create(self, name, config, **extra):
""" Adds a new connection definition.
"""
with self.lock:
return self._create(name, config, **extra)
def _delete(self, name):
""" Actually deletes a definition. Must be called with self.lock held.
"""
try:
if not name in self.items:
raise Exception('No such name `{}` among `{}`'.format(name, self.items.keys()))
self.delete_impl()
except Exception, e:
logger.warn('Error while deleting `%s`, e:`%s`', name, format_exc(e))
finally:
del self.items[name]
def delete(self, name):
""" Deletes an existing connection.
"""
with self.lock:
self._delete(name)
def _edit(self, name, config, **extra):
self._delete(name)
return self._create(config.name, config, **extra)
def edit(self, name, config, **extra):
with self.lock:
self._edit(name, config, **extra)
def change_password(self, password_data):
with self.lock:
new_config = deepcopy(self.items[password_data.name].config_no_sensitive)
new_config.password = password_data.password
self.edit(password_data.name, new_config)
def create_impl(self):
raise NotImplementedError('Should be overridden by subclasses')
def delete_impl(self):
pass # It's OK - sometimes deleting a connection doesn't have to mean doing anything unusual
| AmrnotAmr/zato | code/zato-server/src/zato/server/store.py | Python | gpl-3.0 | 4,970 |
# Problem c
diag = lambda x : (x[0]*x[0]+x[1]*x[1])**0.5
def solve(n, p, ip):
t = 2 * sum(map(sum,ip))
_min = 2*min(map(min,ip))
if t + _min > p:
return t
_max = 2*sum(map(diag,ip))
if t + _max == p:
return p
if t + _max < p:
return t + _max
all_min = 2*sum(map(min,ip))
if t + all_min < p:
return p
D = p-t
sums = []
max_d = 0
m_m = []
m_m[0]=2*min(ip[0])
for i in range(1,n):
x = 2*min(ip[i])
y = 0
for j in range(0,i):
y = max(y, n_m[j])
if n_m[j]+x < d:
y = n_m[j] + x
n_m.append(y)
for rect in ip:
ps = []
x = 2*min(rect)
y = 2*diag(rect)
if x < D:
if y > D:
return p
max_d = max(max_d, y)
for i,j in sums:
_i,_j = i+x , j+y
if _i < D:
ps.append([_i, _j])
max_d = max(max_d,_j)
if _j > D:
return p
sums.extend(ps)
sums.append([x,y])
return max_d + t
if __name__ == "__main__":
t = int(input())
for ti in range(1,t+1):
n,p = map(int,input().split(" "))
ip =[]
for _ in range(n):
ip.append(tuple(map(int, input().split(" "))))
print("Case #{}: {:.6f}".format(ti, solve(n,p,ip)))
| subhrm/google-code-jam-solutions | solutions/2018/1A/C/c.py | Python | mit | 1,448 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
'''
Created on Jul 14, 2014
@author: anroco
How to working the format method of a python str?
¿Como funciona el metodo format de un string en python?
'''
#
#review https://docs.python.org/3/library/string.html#formatstrings
#
#Using the comma as a thousands separator (numeric types)
s = '{:,}'.format(1049378473)
print(s)
#converting the value to different bases (only for integer)
s = 'int: {0:d}; hex: {0:x}; oct: {0:o}; bin: {0:b}'.format(62)
print(s)
#adds the prefix respective '0b', '0o', or '0x' to the output value.
s = 'hex: {0:#x}; oct: {0:#o}; bin: {0:#b}'.format(32)
print(s)
#Displays the number as a fixed-point number, with 4 decimal places
s = '{:.4f}'.format(24 / 53)
print(s)
#Displays the number as a percentage, with 2 decimal places
s = '{:.2%}'.format(24 / 53)
print(s)
#Displays the number in exponent notation, with 2 decimal places
s = '{:e}'.format(24 / 53)
print(s)
| OxPython/Python_str_format | src/format3_str.py | Python | epl-1.0 | 955 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import platform
def format_args(args):
"""
Format the args to pass to the subprocess
Linux requires a string with spaces (if an argument contains spaces it must be surrounded with quotes), whereas Windows requires a list
:param args: A list of arguments
:return: The arguments as required by the operating system
"""
if platform.system() == 'Linux' and isinstance(args, list):
formatted_string = ''
for arg in args:
formatted_string += '%s ' % arg if ' ' not in arg else '"%s" ' % arg
return formatted_string
else:
return args
| ValyrianTech/BitcoinSpellbook-v0.3 | helpers/platformhelpers.py | Python | gpl-3.0 | 652 |
# -----------------------------------------------------------------------
# OpenXenManager
#
# Copyright (C) 2009 Alberto Gonzalez Rodriguez [email protected]
# Copyright (C) 2014 Daniel Lintott <[email protected]>
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# -----------------------------------------------------------------------
import os
import sys
import shutil
import pygtk
import pango
from configobj import ConfigObj
from tunnel import Tunnel
if os.path.dirname(sys.argv[0]):
os.chdir(os.path.dirname(sys.argv[0]))
# On next releases we will use gettext for translations TODO: Investigate translations
APP = 'oxc'
DIR = 'locale'
if sys.platform != "win32" and sys.platform != "darwin":
# If sys.platform is Linux or Unix
import gtkvnc
# Only needed for translations
import gtk.glade
gtk.glade.bindtextdomain(APP, DIR)
elif sys.platform == "darwin":
# On MacOSX with macports sys.platform is "darwin", we need Popen for run tightvnc
from subprocess import Popen
else:
# On Windows we need right tightvnc and we need win32 libraries for move the window
from subprocess import Popen
import win32gui
import win32con
from oxcSERVER import *
import signal
import atexit
# For a TreeView Cell with image+text
from PixbufTextCellRenderer import PixbufTextCellRenderer
import gettext
gettext.install('oxc', localedir="./locale")
gobject.threads_init()
# Import the split classes for oxcWindow
from window_vm import *
from window_host import *
from window_properties import *
from window_storage import *
from window_alerts import *
from window_addserver import *
from window_newvm import *
from window_menuitem import *
from window_tools import *
from xdot import DotWindow
class MyDotWindow(DotWindow):
def __init__(self, window, liststore, treestore):
self.liststore = liststore
self.treestore = treestore
DotWindow.__init__(self, window)
self.widget.connect('button_press_event', self.on_double_clicked)
def on_double_clicked(self, widget, event):
# On double click go to element
if event.type == gtk.gdk._2BUTTON_PRESS:
x, y = int(event.x), int(event.y)
if widget.get_url(x, y):
url = widget.get_url(x, y).url
# Search ref and go to
self.liststore.foreach(self.search_ref, url)
return True
def search_ref(self, model, path, iter_ref, user_data):
if self.liststore.get_value(iter_ref, 6) == user_data:
self.treestore.get_selection().select_path(path)
event = gtk.gdk.Event(gtk.gdk.BUTTON_RELEASE)
event.x = float(-10)
event.y = float(-10)
self.treestore.emit("button_press_event", event)
class oxcWindow(oxcWindowVM, oxcWindowHost, oxcWindowProperties,
oxcWindowStorage, oxcWindowAlerts, oxcWindowNewVm,
oxcWindowMenuItem, oxcWindowTools, AddServer):
"""Main class to oxc window"""
xc_servers = {}
# When you select a element of left tree these variables are filled
selected_actions = None
selected_ref = None
selected_iter = None
selected_tab = None
selected_host = None
selected_type = None
selected_widget = None
selected_state = None
noclosevnc = False
# If "Use master password" is enabled, password typed is set on it
password = None
reattach_storage = False
# For New VM
newvmdata = {}
# On Host -> On general tab: "VMs" label hasn't a fixed width
# If host is included on "moved" variable then "VMs" label was moved
moved = []
# Flag variable to avoid select signals
set_active = False
# Flag variable to export snapshot
export_snap = False
export_snap_vm = False
# For windows only
hWnd = 0
# Used only for plugins..
delete_pages = []
# Used for pool join force
last_host_pool = None
# For XTEA only (needs a string with 8 characters)
iv = "OXCENTER"
# Tunnel VNC
tunnel = None
# For know if performance images was set
performance_updated = False
def __init__(self):
atexit.register(self.signal_handler)
signal.signal(15, self.signal_handler)
# Read the configuration from oxc.conf file
if sys.platform != "win32":
if not os.path.exists(os.path.join(os.path.expanduser("~"), ".config")):
os.mkdir(os.path.join(os.path.expanduser("~"), ".config"))
if not os.path.exists(os.path.join(os.path.expanduser("~"), ".config", "openxenmanager")):
os.mkdir(os.path.join(os.path.expanduser("~"), ".config", "openxenmanager"))
dirconfig = os.path.join(os.path.expanduser("~"), ".config", "openxenmanager")
pathconfig = os.path.join(os.path.expanduser("~"), ".config", "openxenmanager", "oxc.conf")
else:
if not os.path.exists(os.path.join(os.path.expanduser("~"), "openxenmanager")):
os.mkdir(os.path.join(os.path.expanduser("~"), "openxenmanager"))
dirconfig = os.path.join(os.path.expanduser("~"), "openxenmanager")
pathconfig = os.path.join(os.path.expanduser("~"), "openxenmanager", "oxc.conf")
if not os.path.exists(pathconfig):
shutil.copy(os.path.join(utils.module_path(), "oxc.conf"), pathconfig)
self.config = ConfigObj(pathconfig)
self.pathconfig = dirconfig
# Read from configuration saved servers
if self.config['servers']['hosts']:
self.config_hosts = self.config['servers']['hosts']
else:
self.config_hosts = {}
# Define the glade file
glade_dir = os.path.join(utils.module_path(), 'ui')
glade_files = []
for g_file in os.listdir(glade_dir):
if g_file.endswith('.glade'):
glade_files.append(os.path.join(glade_dir, g_file))
self.builder = gtk.Builder()
self.builder.set_translation_domain("oxc")
# Add the glade files to gtk.Builder object
for g_file in glade_files:
try:
self.builder.add_from_file(g_file)
except:
print "While loading Glade GUI Builder file \"" + g_file + "\" a duplicate entry was found:"
raise
# Connect Windows and Dialog to delete-event (we want not destroy dialog/window)
# delete-event is called when you close the window with "x" button
# TODO: csun: eventually it should be possible not to do this: http://stackoverflow.com/questions/4657344/
for widget in self.builder.get_objects():
if isinstance(widget, gtk.Dialog) or \
isinstance(widget, gtk.Window) and gtk.Buildable.get_name(widget) != "window1":
widget.connect("delete-event", self.on_delete_event)
# Frequent objects
self.txttreefilter = self.builder.get_object("txttreefilter")
self.listphydvd = self.builder.get_object("listphydvd")
self.listisoimage = self.builder.get_object("listisoimage")
self.listnetworks = self.builder.get_object("listnewvmnetworks")
self.listnetworkcolumn = self.builder.get_object("listnewvmnetworkcolumn")
self.window = self.builder.get_object("window1")
self.listalerts = self.builder.get_object("listalerts")
self.treealerts = self.builder.get_object("treealerts")
self.filesave = self.builder.get_object("filesave")
self.fileopen = self.builder.get_object("fileopen")
self.newvm = self.builder.get_object("window_newvm")
self.treeview = self.builder.get_object("treevm")
self.treeprop = self.builder.get_object("treeprop")
self.listprop = self.builder.get_object("listprop")
self.statusbar = self.builder.get_object("statusbar1")
self.treesearch = self.builder.get_object("treesearch")
self.treestg = self.builder.get_object("treestg")
#Tunnel and VNC pid dicts
self.tunnel = {}
self.vnc_process = {} #used in osx
self.vnc = {}
self.vnc_builders = {} #used to store vnc pygtk builders for the different windows in Linux
"""
for i in range(0,7):
if self.newvm.get_nth_page(i):
self.newvm.set_page_complete(self.newvm.get_nth_page(i), True)
"""
# Combo's style
style = gtk.rc_parse_string('''
style "my-style" { GtkComboBox::appears-as-list = 1 }
widget "*" style "my-style"
''')
self.builder.connect_signals(self)
self.treestg.get_selection().connect('changed', self.on_treestg_selection_changed)
# Create a new TreeStore
self.treestore = gtk.TreeStore(gtk.gdk.Pixbuf, str, str, str, str, str, str, object, str)
# Image, Name, uuid, type, state, host, ref, actions, ip
# Append default logo on created TreeStore
self.treeroot = self.treestore.append(None, ([gtk.gdk.pixbuf_new_from_file(
os.path.join(utils.module_path(), "images/xen.gif")), "OpenXenManager", None, "home", "home", None,
None, ["addserver", "connectall", "disconnectall"], None]))
# Model Filter is used but show/hide templates/custom templates/local storage..
self.modelfilter = self.treestore.filter_new()
# Define the function to check if a element should be showed or not
self.modelfilter.set_visible_func(self.visible_func)
self.treeview.set_model(self.modelfilter)
self.modelfiltertpl = self.builder.get_object("listtemplates").filter_new()
self.builder.get_object("treetemplates").set_model(self.modelfiltertpl)
self.modelfiltertpl.set_visible_func(self.visible_func_templates)
self.builder.get_object("networkcolumn1").set_property("model",
self.builder.get_object("listimportnetworkcolumn"))
self.builder.get_object("cellrenderercombo1").set_property("model",
self.builder.get_object("listnewvmnetworkcolumn"))
# Same for properties treestore
self.propmodelfilter = self.listprop.filter_new()
self.propmodelfilter.set_visible_func(self.prop_visible_func)
self.treeprop.set_model(self.propmodelfilter)
# Fill defaults selection variables
self.selected_name = "OpenXenManager"
self.selected_type = "home"
self.selected_uuid = ""
self.headimage = self.builder.get_object("headimage")
self.headlabel = self.builder.get_object("headlabel")
self.headlabel.set_label(self.selected_name)
self.headimage.set_from_pixbuf(gtk.gdk.pixbuf_new_from_file(os.path.join(utils.module_path(),
"images/xen.gif")))
if 'pane_position' in self.config['gui']:
pane = self.builder.get_object('main_pane')
pane.set_position(int(self.config['gui']['pane_position']))
if "show_hidden_vms" not in self.config["gui"]:
self.config["gui"]["show_hidden_vms"] = "False"
self.config.write()
# Set menuitem checks to value from configuration
self.builder.get_object("checkshowxstpls").set_active(self.config["gui"]["show_xs_templates"] == "True")
self.builder.get_object("checkshowcustomtpls").set_active(self.config["gui"]["show_custom_templates"] == "True")
self.builder.get_object("checkshowlocalstorage").set_active(self.config["gui"]["show_local_storage"] == "True")
self.builder.get_object("checkshowtoolbar").set_active(self.config["gui"]["show_toolbar"] == "True")
self.builder.get_object("checkshowhiddenvms").set_active(self.config["gui"]["show_hidden_vms"] == "True")
if "maps" in self.config:
for check in self.config["maps"]:
self.builder.get_object(check).set_active(self.config["maps"][check] == "True")
# If "Show toolbar" is checked then show, else hide
if self.config["gui"]["show_toolbar"] != "False":
self.builder.get_object("toolbar").show()
else:
self.builder.get_object("toolbar").hide()
# Add to left tree the saved servers from configuration
for host in self.config_hosts.keys():
self.builder.get_object("listaddserverhosts").append([host])
self.treestore.append(self.treeroot, ([gtk.gdk.pixbuf_new_from_file(
os.path.join(utils.module_path(), "images/tree_disconnected_16.png")), host, None, "server",
"Disconnected", None, None, ["connect", "forgetpw", "remove"], None]))
# Expand left tree and update menubar, tabs and toolbar
self.treeview.expand_all()
self.update_menubar()
self.update_tabs()
self.update_toolbar()
# Create a TreeStore for SERVER->Search tab
# (image, name, loadimg, loadtext,
# memimg, memtext, disks, network, address, uptime
# color)
self.listsearch = gtk.TreeStore(gtk.gdk.Pixbuf, str, object, str,
object, str, str, str, str, str,
gtk.gdk.Color)
self.treesearch.set_model(self.listsearch)
#self.treesearch.get_column(0).set_cell_data_func(self.func_cell_data_treesearch, self.treesearch.get_cell(0))
# Add two columns with image/text from PixBufTextCellRenderer class
pbtcell = PixbufTextCellRenderer()
pbtcell.set_property('xpad', 15)
pbtcell.set_property('ypad', 13)
tvc = gtk.TreeViewColumn('CPU Usage', pbtcell, text=3, pixbuf=2, background=10)
tvc.set_widget(self.builder.get_object("lbltreesearch6"))
self.builder.get_object("lbltreesearch6").show()
tvc.set_reorderable(True)
tvc.set_sort_column_id(3)
self.treesearch.insert_column(tvc, 1)
pbtcell = PixbufTextCellRenderer()
pbtcell.set_property('xpad', 15)
pbtcell.set_property('ypad', 13)
tvc = gtk.TreeViewColumn('Used memory', pbtcell, text=5, pixbuf=4, background=10)
tvc.set_widget(self.builder.get_object("lbltreesearch7"))
tvc.set_reorderable(True)
tvc.set_sort_column_id(5)
self.treesearch.insert_column(tvc, 2)
# ComboBox created from GLADE needs a cellrenderertext
# and an attribute defining the column to show
combobox = self.builder.get_object("radiobutton2_data")
cell = gtk.CellRendererText()
combobox.pack_start(cell, True)
combobox.add_attribute(cell, 'text', 0)
combobox.set_model(self.listphydvd)
combobox = self.builder.get_object("radiobutton3_data")
cell = gtk.CellRendererText()
combobox.pack_start(cell, True)
combobox.add_attribute(cell, 'text', 0)
combobox.add_attribute(cell, 'rise', 2)
combobox.add_attribute(cell, 'sensitive', 3)
combobox.set_model(self.listisoimage)
combobox = self.builder.get_object("treeeditnetwork")
cell = gtk.CellRendererText()
combobox.pack_start(cell, True)
combobox.add_attribute(cell, 'text', 1)
combobox.set_model(self.builder.get_object("listeditnetwork"))
combobox = self.builder.get_object("treeaddnetwork")
cell = gtk.CellRendererText()
combobox.pack_start(cell, True)
combobox.add_attribute(cell, 'text', 1)
combobox.set_model(self.builder.get_object("listaddnetwork"))
combobox.set_active(0)
combobox = self.builder.get_object("combostgmode")
cell = gtk.CellRendererText()
combobox.pack_start(cell, True)
combobox.add_attribute(cell, 'text', 1)
combobox.set_model(self.builder.get_object("liststgmode"))
combobox.set_active(0)
combobox = self.builder.get_object("combostgposition")
cell = gtk.CellRendererText()
combobox.pack_start(cell, True)
combobox.add_attribute(cell, 'text', 0)
combobox.set_model(self.builder.get_object("liststgposition"))
combobox.set_active(0)
combobox.set_style(style)
combobox = self.builder.get_object("combomgmtnetworks")
cell = gtk.CellRendererText()
combobox.pack_start(cell, True)
combobox.add_attribute(cell, 'text', 1)
combobox.set_model(self.builder.get_object("listmgmtnetworks"))
combobox.set_active(0)
combobox.set_style(style)
combobox = self.builder.get_object("combopoolmaster")
cell = gtk.CellRendererText()
combobox.pack_start(cell, True)
combobox.add_attribute(cell, 'text', 1)
combobox.set_model(self.builder.get_object("listpoolmaster"))
combobox.set_active(0)
combobox.set_style(style)
combobox = self.builder.get_object("combotargetiqn")
cell = gtk.CellRendererText()
combobox.pack_start(cell, True)
combobox.add_attribute(cell, 'text', 1)
combobox.set_model(self.builder.get_object("listtargetiqn"))
combobox.set_active(0)
combobox.set_style(style)
combobox = self.builder.get_object("combotargetlun")
cell = gtk.CellRendererText()
combobox.pack_start(cell, True)
combobox.add_attribute(cell, 'text', 1)
combobox.set_model(self.builder.get_object("listtargetlun"))
combobox.set_active(0)
combobox.set_style(style)
combobox = self.builder.get_object("combonetworknic")
cell = gtk.CellRendererText()
combobox.pack_start(cell, True)
combobox.add_attribute(cell, 'text', 1)
combobox.set_model(self.builder.get_object("listnetworknic"))
combobox.set_active(0)
combobox.set_style(style)
combobox = self.builder.get_object("combocustomfields")
cell = gtk.CellRendererText()
combobox.pack_start(cell, True)
combobox.add_attribute(cell, 'text', 0)
combobox.set_model(self.builder.get_object("listcombocustomfields"))
combobox.set_active(0)
combobox.set_style(style)
#print combobox.get_internal_child()
# If gtk version is 2.18.0 or higher then add "marks" to scale
if hasattr(self.builder.get_object("scalepropvmprio"), "add_mark"):
self.builder.get_object("scalepropvmprio").add_mark(0, gtk.POS_BOTTOM, "\nLowest")
self.builder.get_object("scalepropvmprio").add_mark(1, gtk.POS_BOTTOM, "")
self.builder.get_object("scalepropvmprio").add_mark(2, gtk.POS_BOTTOM, "")
self.builder.get_object("scalepropvmprio").add_mark(3, gtk.POS_BOTTOM, "")
self.builder.get_object("scalepropvmprio").add_mark(4, gtk.POS_BOTTOM, "\nNormal")
self.builder.get_object("scalepropvmprio").add_mark(5, gtk.POS_BOTTOM, "")
self.builder.get_object("scalepropvmprio").add_mark(6, gtk.POS_BOTTOM, "")
self.builder.get_object("scalepropvmprio").add_mark(7, gtk.POS_BOTTOM, "")
self.builder.get_object("scalepropvmprio").add_mark(8, gtk.POS_BOTTOM, "\nHighest")
# Manual function to set the default buttons on dialogs/window
# Default buttons could be pressed with enter without need do click
self.set_window_defaults()
# Make the background of the tab box, and its container children white
tabbox = self.builder.get_object("tabbox")
tabbox.modify_bg(gtk.STATE_NORMAL, gtk.gdk.Color('#FFFFFF'))
#for tab_box_child in tabbox.get_children():
self.recursive_set_bg_color(tabbox)
# To easily modify and provide a consistent section header look in the
# main_window: I've named all EventBoxes main_section_header#. Iterate through
# them until we get a NoneType
section_header_string = "main_section_header"
section_header_index = 1
while 1:
done = self.prettify_section_header(section_header_string + str(section_header_index))
if(done is None):
break
section_header_index = section_header_index + 1
# If we need a master password for connect to servers without password:
# Show the dialog asking master password
if str(self.config["gui"]["save_password"]) == "True":
self.builder.get_object("masterpassword").show()
if sys.platform == 'win32' or sys.platform == 'darwin':
self.builder.get_object('consolescale').hide()
self.windowmap = MyDotWindow(self.builder.get_object("viewportmap"), self.treestore, self.treeview)
# Recursive function to set the background colour on certain objects
def recursive_set_bg_color(self, widget):
for child in widget.get_children():
# Is a storage container, dive into it
if isinstance(child, gtk.Container):
self.recursive_set_bg_color(child)
# Is a specific type of widget
child.modify_bg(gtk.STATE_NORMAL, gtk.gdk.Color('#FFFFFF'))
# Add a common theme to the section header areas
def prettify_section_header(self, widget_name):
if type(widget_name) is not str:
return None
section_header = self.builder.get_object(widget_name)
if(section_header is None):
return None
# Make the event boxes window visible and set the background color
section_header.set_visible_window(True)
section_header.modify_bg(gtk.STATE_NORMAL, gtk.gdk.Color('#3498db'))
child_list = section_header.get_children()
if child_list is not None:
for child in child_list:
if child is not None:
if type(child) == gtk.Label:
child.modify_fg(gtk.STATE_NORMAL, gtk.gdk.Color('#FFFFFF'))
# Preserve attributes set within Glade.
child_attributes = child.get_attributes()
if child_attributes is None:
child_attributes = pango.AttrList()
# Add/modify a few attributes
child_attributes.change(pango.AttrScale(pango.SCALE_XX_LARGE, 0, -1))
child.set_attributes(child_attributes)
return True
# todo: James - When we're done redoing the performance tab let's do this on any new scrollbars created
#def adjust_scrollbar_performance(self):
# for widget in ["scrwin_cpuusage", "scrwin_memusage", "scrwin_netusage", "scrwin_diskusage"]:
# self.builder.get_object(widget).grab_focus()
# adj = self.builder.get_object(widget).get_hadjustment()
# adj.set_value(adj.upper - adj.page_size)
def func_cell_data_treesearch(self, column, cell, model, iter_ref, user_data):
# Test function don't used TODO: Can this be removed?
print column, cell, model, iter_ref, user_data
def set_window_defaults(self):
"""
Function to define what button is the default for each window/dialog
Default button could be pressed with enter key
"""
widgets = ["addserverpassword", "addserverusername", "snaptplname", "snapshotname", "vmaddnewdisk_name",
"txtcopyvmname", "txtpropvmname", "txtnetworkname", "txtmasterpassword", "txtaskmasterpassword"
]
dialogs = {
"addserver": "connect_addserver",
"newvmdisk": "acceptnewvmdisk",
"vmaddnewdisk": "acceptvmaddnewdisk",
"dialogsnapshotname": "btacceptsnapshotname",
"dialogsnaptplname": "btacceptsnaptplname",
"dialogsnapshotdelete": "btacceptsnapshotdelete",
"vmattachdisk": "btacceptattachdisk",
"dialogdeletevm": "dialogdelete_accept",
"dialogdeletevdi": "dialogdeletevdi_accept",
"windowcopyvm": "windowcopyvm_copy",
"dialogvmprop": "btvmpropaccept",
"dialogdeletehostnetwork": "acceptdialogdeletehostnetwork",
"dialogdeletehostnic": "acceptdialogdeletehostnic",
"addbond": "btacceptaddbond",
"newnetwork": "acceptnewnetwork",
"dialogoptions": "acceptdialogoptions",
"masterpassword": "acceptmasterpassword",
"dialogeditnetwork": "accepteditnetwork",
"dialognetworkrestart": "acceptdialognetworkrestart",
"vmimport": "nextvmimport",
"mgmtinterface": "acceptmgmtinterface",
"newpool": "acceptnewpool"
}
# For each dialog
for wid in dialogs:
# Set the flag indicating the widget could be a default button
self.builder.get_object(dialogs[wid]).set_flags(gtk.CAN_DEFAULT)
# If widget is a dialog
if type(self.builder.get_object(wid)) == type(gtk.Dialog()):
# Set the button with "id response = 1" as default
self.builder.get_object(wid).set_default_response(1)
else:
# If is a Gtk.Window set the indicated button as default
self.builder.get_object(wid).set_default(self.builder.get_object(dialogs[wid]))
for wid in widgets:
# For each button indicate that it may be the default button
self.builder.get_object(wid).set_activates_default(True)
def visible_func_templates(self, model, iter_ref, user_data=None):
name = self.builder.get_object("listtemplates").get_value(iter_ref, 1)
txttemplatesearch = self.builder.get_object("txttemplatesearch")
if txttemplatesearch.get_text().strip() == "":
return True
else:
return name.lower().count(txttemplatesearch.get_text().lower()) > 0
def visible_func(self, model, iter_ref, user_data=None):
"""
This function define if a element should be showed or not in left tree
This function checks configuration values and show/hide elements
Returning False you hide the element, returning True you show the element
"""
host = self.treestore.get_value(iter_ref, 5)
ref = self.treestore.get_value(iter_ref, 6)
seltype = self.treestore.get_value(iter_ref, 3)
if len(self.txttreefilter.get_text()) > 0 and \
((seltype == "vm" or seltype == "template" or seltype == "storage" or seltype == "custom_template") and
self.treestore.get_value(iter_ref, 1).lower().count(self.txttreefilter.get_text().lower()) == 0):
return False
if seltype == "vm" and str(self.config["gui"]["show_hidden_vms"]) == "False" and host and ref and \
self.xc_servers[host].all['vms'][ref].get("other_config") and \
str(self.xc_servers[host].all['vms'][ref]["other_config"].get("HideFromXenCenter")).lower() == "true":
return False
if seltype == "template":
if self.config["gui"]["show_xs_templates"] == "False" or not self.config["gui"]["show_xs_templates"]:
return False
elif seltype == "custom_template":
if self.config["gui"]["show_custom_templates"] == "False" or \
not self.config["gui"]["show_custom_templates"]:
return False
elif seltype == "storage":
if self.config["gui"]["show_local_storage"] == "False" or not self.config["gui"]["show_local_storage"]:
if host and ref:
if not self.xc_servers[host].all['SR'][ref]['shared']:
return False
return True
def foreach_connect(self, model, path, iter_ref, user_data):
"""
This function connect or disconnect depends user_data value
if user_data is True then connect all disconnected servers
if user_data is False then disconnect all connected servers
No code commented because doesn't work so well..
"""
if self.treestore.get_value(iter_ref, 3) == "server":
if self.treestore.get_value(iter_ref, 4) == "Disconnected":
if user_data:
name = self.treestore.get_value(iter_ref, 1)
if self.config_hosts[name][1]:
path = self.modelfilter.convert_path_to_child_path(path)
self.treeview.get_selection().select_path(path)
iter_ref = self.treestore.get_iter(path)
self.selected_iter = iter_ref
self.selected_name = self.treestore.get_value(iter_ref, 1)
self.selected_uuid = self.treestore.get_value(iter_ref, 2)
self.selected_type = self.treestore.get_value(iter_ref, 3)
self.selected_state = self.treestore.get_value(iter_ref, 4)
self.selected_host = self.treestore.get_value(iter_ref, 5)
self.selected_ip = self.treestore.get_value(iter_ref, 8)
self.on_m_connect_activate(self.treestore, None)
self.treesearch.expand_all()
if self.treestore.get_value(iter_ref, 3) == "host" or self.treestore.get_value(iter_ref, 3) == "pool":
if self.treestore.get_value(iter_ref, 4) == "Running":
if not user_data:
path = self.modelfilter.convert_path_to_child_path(path)
self.treeview.get_selection().select_path(path)
iter_ref = self.treestore.get_iter(path)
self.selected_iter = iter_ref
self.selected_name = self.treestore.get_value(iter_ref, 1)
self.selected_uuid = self.treestore.get_value(iter_ref, 2)
self.selected_type = self.treestore.get_value(iter_ref, 3)
self.selected_state = self.treestore.get_value(iter_ref, 4)
self.selected_host = self.treestore.get_value(iter_ref, 5)
self.selected_ip = self.treestore.get_value(iter_ref, 8)
self.on_m_disconnect_activate(self.treestore, None)
self.treesearch.expand_all()
else:
print "**", self.treestore.get_value(iter_ref, 4)
def on_window1_configure_event(self, widget, data=None):
self.on_window1_size_request(widget, data)
def on_window1_size_request(self, widget, data=None):
if self.hWnd != 0:
console_area = self.builder.get_object("frameconsole")
console_area.realize()
console_alloc = console_area.get_allocation()
window_alloc = self.window.get_position()
x = console_alloc.x + window_alloc[0] + 10
y = console_alloc.y + window_alloc[1] + 47
win32gui.MoveWindow(self.hWnd, x, y, console_alloc.width-10, console_alloc.height-5, 1)
def on_console_area_key_press_event(self, widget, event):
self.tunnel[self.selected_ref].key = hex(event.hardware_keycode - 8)
def on_aboutdialog_close(self, widget, data=None):
"""
Function to hide about dialog when you close it
"""
self.builder.get_object("aboutdialog").hide()
def on_acceptmasterpassword_clicked(self, widget, data=None):
"""
Function what checks ff you typed a master password is right
"""
# Create a md5 object
m = hashlib.md5()
password = self.builder.get_object("txtaskmasterpassword").get_text()
# Add password typed to md5 object
m.update(password)
# m.hexdigest() is a md5 ascii password (as saved in the configuration)
if self.config["gui"]["master_password"] != m.hexdigest():
# If is wrong show the label indicating is a wrong password
self.builder.get_object("lblwrongpassword").show()
else:
# If is a good password set to global variable "password" and hide dialog
self.password = password
self.builder.get_object("masterpassword").hide()
def on_cancelmasterpassword_clicked(self, widget, data=None):
"""
Function called when you cancel the master password dialog.
"""
#If you cancel the dialog, then set global variable "password" to None
self.password = None
self.builder.get_object("masterpassword").hide()
def on_txtaskmasterpassword_changed(self, widget, data=None):
"""
Function called when you write or remove characters on master password entry
"""
# If you check "save server passwords" then you need specify a master password
# If len of master password is 0, then disable "Accept" button in options dialog
self.builder.get_object("acceptmasterpassword").set_sensitive(len(widget.get_text()))
def update_tabs(self):
"""
Function called when you select an element from left tree
Depending on selected type show or hide different tabs
"""
frames = ("framestggeneral", "framememory", "framestgdisks", "framevmgeneral", "framevmstorage",
"framevmnetwork", "framehostgeneral", "framehostnetwork", "framehoststorage", "frameconsole",
"framehostnics", "framesnapshots", "frameperformance", "frametplgeneral", "framehome", "frameconsole",
"framepoolgeneral", "framelogs", "framesearch", "frameusers", "framemaps", "framehosthw")
showframes = {
"pool": ["framepoolgeneral", "framelogs", "framesearch", "framemaps"],
"home": ["framehome"],
"vm": ["framevmgeneral", "framememory", "framevmstorage", "framevmnetwork", "framelogs", "framesnapshots",
"frameperformance"],
"host": ["framesearch", "framehostgeneral", "framehostnetwork", "framehoststorage", "framelogs",
"frameconsole", "framehostnics", "frameperformance", "frameusers", "framemaps"],
"template": ["frametplgeneral", "framevmnetwork", "framehostgeneral"],
"custom_template": ["frametplgeneral", "framevmnetwork", "framevmstorage", "framelogs"],
"storage": ["framestggeneral", "framestgdisks", "framelogs"],
}
if self.selected_type in showframes:
[self.builder.get_object(frame).show() for frame in showframes[self.selected_type]]
[self.builder.get_object(frame).hide() for frame in frames if frame not in showframes[self.selected_type]]
if self.selected_type == "pool":
self.xc_servers[self.selected_host].update_tab_pool_general(self.selected_ref, self.builder)
elif self.selected_type == "vm":
# If "VM" is running, show console tab, else hide
if self.selected_state == "Running":
self.builder.get_object("frameconsole").show()
else:
self.builder.get_object("frameconsole").hide()
self.xc_servers[self.selected_host].update_tab_vm_general(self.selected_ref, self.builder)
elif self.selected_type == "host":
self.xc_servers[self.selected_host].update_tab_host_general(self.selected_ref, self.builder)
if self.xc_servers[self.selected_host].has_hardware_script(self.selected_ref):
self.builder.get_object("framehosthw").show()
else:
self.builder.get_object("framehosthw").hide()
elif self.selected_type == "template":
self.xc_servers[self.selected_host].update_tab_template(self.selected_ref, self.builder)
elif self.selected_type == "custom_template":
self.xc_servers[self.selected_host].update_tab_template(self.selected_ref, self.builder)
elif self.selected_type == "storage":
operations = self.xc_servers[self.selected_host].all['SR'][self.selected_ref]['allowed_operations']
if operations.count("vdi_create"):
self.builder.get_object("btstgnewdisk").show()
else:
self.builder.get_object("btstgnewdisk").hide()
self.xc_servers[self.selected_host].update_tab_storage(self.selected_ref, self.builder)
# Experimental only
try:
import webkit
import glob
for deletepage in self.delete_pages:
# FIXME: remove doesn't work
self.builder.get_object("tabbox").get_nth_page(deletepage).hide_all()
self.delete_pages = []
for infile in glob.glob("plugins/*.xml"):
data = open(infile).read()
"""
dom = xml.dom.minidom.parseString(data)
nodes = dom.getElementsByTagName("XenCenterPlugin")
applicable = False
if len(nodes[0].getElementsByTagName("TabPage")):
for tabpage in nodes[0].getElementsByTagName("TabPage"):
if tabpage.attributes.getNamedItem("search"):
search_uuid = tabpage.attributes.getNamedItem("search").value
tabname = tabpage.attributes.getNamedItem("name").value # REVISE
url = tabpage.attributes.getNamedItem("url").value # REVISE
if len(nodes[0].getElementsByTagName("Search")):
host = self.selected_host
[applicable, ip] = self.plugin_get_search(nodes, search_uuid, host, ref)
"""
host = self.selected_host
ref = self.selected_ref
[applicable, ip, url, tabname] = self.process_xml(data, host, ref)
if applicable:
view = webkit.WebView()
browser = gtk.ScrolledWindow()
url = url.replace("{$ip_address}", ip)
view.open(url)
browser.add_with_viewport(view)
tablabel = gtk.Label(tabname)
self.delete_pages.append(self.builder.get_object("tabbox").append_page(browser, tablabel))
browser.show_all()
except ImportError or RuntimeError:
pass
def process_xml(self, data, host, ref):
dom = xml.dom.minidom.parseString(data)
if dom.documentElement.nodeName != u'XenCenterPlugin':
print "no XenCenterPlugin"
return
node = dom.documentElement
ip = None
applicable = False
for tabpage in node.getElementsByTagName("TabPage"):
search_uuid = tabpage.getAttribute('search')
tabname = tabpage.getAttribute("name") # REVISE
url = tabpage.getAttribute("url") # REVISE
if search_uuid and tabname and url:
for search in [e for e in node.getElementsByTagName("Search") if e.getAttribute("uuid") == search_uuid]:
for query in search.getElementsByTagName("Query"):
for queryscope in [e for e in query.getElementsByTagName("QueryScope")[0].childNodes
if e.nodeType != dom.TEXT_NODE]:
if queryscope.nodeName == "LocalSR":
if self.selected_type == "storage":
shared = \
self.xc_servers[self.selected_host].all['SR'][self.selected_ref]['shared']
if not shared:
applicable = True
elif queryscope.nodeName == "RemoteSR":
if self.selected_type == "storage":
shared = \
self.xc_servers[self.selected_host].all['SR'][self.selected_ref]['shared']
if shared:
applicable = True
elif queryscope.nodeName == "Pool": # REVISE
if self.selected_type == "pool":
applicable = True
elif queryscope.nodeName == "Vm": # REVISE
if self.selected_type == "vm":
applicable = True
elif queryscope.nodeName == "Host": # REVISE
if self.selected_type == "host":
applicable = True
if applicable:
for enumpropertyquery in query.getElementsByTagName("EnumPropertyQuery"):
data = None
if self.selected_type == "storage":
data = self.xc_servers[host].all['SR'][ref]
pbds = data['PBDs']
ip = ""
if "target" in self.xc_servers[host].all['PBD'][pbds[0]]["device_config"]:
ip = self.xc_servers[host].all['PBD'][pbds[0]]["device_config"]['target']
#ip = data["name_description"].split(" ")[2][1:]
elif self.selected_type == "vm":
data = self.xc_servers[host].all['vms'][ref]
ip = self.selected_ip
if self.selected_type == "host":
data = self.xc_servers[host].all['host'][ref]
ip = self.selected_ip
if self.selected_type == "pool":
data = self.xc_servers[host].all['pool'][ref]
ip = self.selected_ip
if data:
prop = enumpropertyquery.attributes.getNamedItem("property").value
equals = enumpropertyquery.attributes.getNamedItem("equals").value
value = enumpropertyquery.attributes.getNamedItem("query").value
if prop in data:
if equals == "no":
if isinstance(data[prop], str):
applicable = data[prop].count(value) > 0
else: # REVISE
applicable = False
else:
applicable = (data == value)
else:
if "XenCenter.CustomFields." + prop in data["other_config"]:
applicable = True
url = url.replace("{$%s}" % prop, data["other_config"]["XenCenter.CustomFields." + prop])
else:
applicable = False
return [applicable, ip, url, tabname]
def plugin_get_search(self, nodes, search_uuid, host, ref):
"""
Determine if plugin is applicable
"""
applicable = False
ip = None
for search in nodes[0].getElementsByTagName("Search"):
if search.attributes.getNamedItem("uuid").value == search_uuid:
for query in search.getElementsByTagName("Query"):
queryscopes = query.getElementsByTagName("QueryScope")
for queryscope in queryscopes[0].childNodes:
if queryscope.nodeName != "#text":
if queryscope.nodeName == "LocalSR":
if self.selected_type == "storage":
shared = self.xc_servers[host].all['SR'][ref]['shared']
if not shared:
applicable = True
elif queryscope.nodeName == "RemoteSR":
if self.selected_type == "storage":
shared = self.xc_servers[host].all['SR'][ref]['shared']
if shared:
applicable = True
elif queryscope.nodeName == "Pool": # REVISE
if self.selected_type == "pool":
applicable = True
elif queryscope.nodeName == "Vm": # REVISE
if self.selected_type == "VM":
applicable = True
elif queryscope.nodeName == "Host": # REVISE
if self.selected_type == "host":
applicable = True
if applicable:
for enumpropertyquery in query.getElementsByTagName("EnumPropertyQuery"):
data = None
if self.selected_type == "storage":
data = self.xc_servers[host].all['SR'][ref]
ip = data["name_description"].split(" ")[2][1:]
elif self.selected_type == "vm":
data = self.xc_servers[host].all['vms'][ref]
ip = self.selected_ip
if self.selected_type == "host":
data = self.xc_servers[host].all['host'][ref]
ip = self.selected_ip
if self.selected_type == "pool":
data = self.xc_servers[host].all['pool'][ref]
ip = self.selected_ip
if data:
prop = enumpropertyquery.attributes.getNamedItem("property").value
equals = enumpropertyquery.attributes.getNamedItem("equals").value
value = enumpropertyquery.attributes.getNamedItem("query").value
if prop in data:
if equals == "no":
if isinstance(data[prop], str):
applicable = data[prop].count(value)>0
else: # REVISE
applicable = False
else:
applicable = (data == value)
else:
applicable = False
return [applicable, ip]
def on_window_destroy(self, widget, data=None):
"""
Function called when you close the window or press Quit
"""
# For each server
if self.tunnel:
for key in self.tunnel.keys():
self.tunnel[key].close()
for sh in self.xc_servers:
# Stop the threads setting True the condition variables
self.xc_servers[sh].halt = True
self.xc_servers[sh].halt_search = True
self.xc_servers[sh].halt_import = True
self.xc_servers[sh].halt_performance = True
# Do a logout, remember logout disconnect to server and unregister events
self.xc_servers[sh].logout()
# For windows only: close the tightvnc console
if self.hWnd != 0:
win32gui.PostMessage(self.hWnd, win32con.WM_QUIT, 0, 0)
self.hWnd = 0
# Get the position of the main window pane
self.save_pane_position()
# Save unsaved changes
self.config.write()
# Exit!
gtk.main_quit()
if self.vnc_process:
for process in self.vnc_process.keys():
#Kill all running sub_processes
if self.vnc_process[process].poll() != 0:
os.killpg(os.getpgid(self.vnc_process[process].pid), signal.SIGTERM)
#Force Quit
os._exit(0)
return
def save_pane_position(self):
"""
Save the position of the main window HPaned
"""
pane = self.builder.get_object('main_pane')
self.config['gui']['pane_position'] = pane.get_position()
def count_list(self, model, path, iter_ref, user_data):
"""
Function to count elements from list..
"""
#TODO: remove and use __len__()
self.nelements = self.nelements + 1
def on_tabbox_focus_tab(self, widget, data=None, data2=None):
"""
Function called when you click on a tab
Tabbox contains all possible tabs, when you click on a tab first we will check the name
Depending of this name we will do different actions
"""
# Get the selected host
host = self.selected_host
# Check if we've actually selected a host
if host:
# Get the Tab name
#tab_label = widget.get_tab_label(widget.get_nth_page(data2)).name
tab_label = gtk.Buildable.get_name(widget.get_tab_label(widget.get_nth_page(data2)))
# Set as selected
self.selected_tab = tab_label
if tab_label != "VM_Console":
# If vnc console was opened and we change to another, close it
# Disable the send ctrl-alt-del menu item
self.builder.get_object("menuitem_tools_cad").set_sensitive(False)
if hasattr(self, "vnc") and self.vnc and not self.noclosevnc and not eval(self.config["options"]["multiple_vnc"]):
for key in self.vnc:
self.vnc[key].destroy()
self.builder.get_object("windowvncundock").hide()
self.vnc = {}
# Same on Windows
if sys.platform == 'win32' and self.hWnd != 0:
if win32gui.IsWindow(self.hWnd):
win32gui.PostMessage(self.hWnd, win32con.WM_CLOSE, 0, 0)
self.hWnd = 0
if self.tunnel and not self.noclosevnc and not eval(self.config["options"]["multiple_vnc"]):
for key in self.tunnel:
self.tunnel[key].close()
self.tunnel = {}
if self.vnc_builders and not eval(self.config["options"]["multiple_vnc"]):
for key in self.vnc_builders:
self.vnc_builders[key].get_object("console_area3").remove(self.vnc[key])
self.vnc_builders[key].get_object("windowvncundock").destroy()
self.vnc_builders = {}
if tab_label != "HOST_Search" and host:
# If we change tab to another different to HOST Search, then stop the filling thread
self.xc_servers[host].halt_search = True
if tab_label != "VM_Performance" and host:
self.xc_servers[host].halt_performance = True
if tab_label == "VM_Console":
self.builder.get_object("menuitem_tools_cad").set_sensitive(True)
self.treeview = self.builder.get_object("treevm")
if hasattr(self, "vnc") and self.vnc and not eval(self.config["options"]["multiple_vnc"]):
if self.tunnel:
for key in self.tunnel:
self.tunnel[key].close()
self.tunnel = {}
for key in self.vnc:
self.vnc[key].destroy()
self.builder.get_object("windowvncundock").hide()
self.vnc = {}
if self.treeview.get_cursor()[1]:
state = self.selected_state
# First checks if VM is running
self.builder.get_object("btenterfullscreen").grab_focus()
self.builder.get_object("console_area").grab_focus()
if state == "Running":
if self.selected_type == "host":
ref = self.xc_servers[host].host_vm[self.selected_ref][0]
else:
ref = self.selected_ref
location = self.get_console_location(host, ref)
if location is not None and ( self.selected_ref not in self.tunnel.keys() or ( self.selected_ref in self.vnc_process.keys() and self.vnc_process[self.selected_ref].poll() == 0)):
self.tunnel[self.selected_ref] = Tunnel(self.xc_servers[host].session_uuid, location)
port = self.tunnel[self.selected_ref].get_free_port()
if port is not None:
Thread(target=self.tunnel[self.selected_ref].listen, args=(port,)).start()
time.sleep(1)
else:
# TODO: Break here on error
print 'Could not get a free port'
if sys.platform != "win32" and sys.platform != "darwin":
if self.vnc and self.selected_ref in self.vnc.keys(): self.vnc[self.selected_ref]
# Create a gtkvnc object
self.vnc[self.selected_ref] = gtkvnc.Display()
# Add to gtkvnc to a console area
console_area = self.builder.get_object("console_area")
if hasattr(self, "current_vnc"):
console_area.remove(self.current_vnc)
# Define current VNC window
self.current_vnc = self.vnc[self.selected_ref]
# Add it to the console area
console_area.add(self.vnc[self.selected_ref])
console_area.show_all()
self.vnc[self.selected_ref].activate()
self.vnc[self.selected_ref].grab_focus()
self.vnc[self.selected_ref].set_pointer_grab(False)
self.vnc[self.selected_ref].set_pointer_local(False)
self.vnc[self.selected_ref].set_keyboard_grab(True)
self.vnc[self.selected_ref].set_shared_flag(True)
self.vnc[self.selected_ref].connect("vnc-disconnected", self.vnc_disconnected)
self.vnc[self.selected_ref].connect("key_press_event", self.on_console_area_key_press_event)
# And open the connection
try:
self.vnc[self.selected_ref].set_depth(1)
except RuntimeError:
pass
self.vnc[self.selected_ref].connect("vnc-server-cut-text", self.vnc_button_release)
self.vnc[self.selected_ref].open_host("localhost", str(port))
elif sys.platform == "darwin":
# Run ./vncviewer with host, vm renf and session ref
viewer = self.config['options']['vnc_viewer']
if viewer and os.path.exists(viewer):
self.vnc_process[self.selected_ref] = Popen([viewer,"localhost::%s" % port],shell=False,preexec_fn=os.setsid)
console_area = self.builder.get_object("console_area")
console_alloc = console_area.get_allocation()
else:
print "No VNC detected or VNC executable path does not exist"
else:
Thread(target=self.tunnel[self.selected_ref].listen, args=(port,)).start()
time.sleep(1)
# And open the connection
# TODO: Add the capability to change this path in the options and save to config
#viewer = os.path.join('C:\\', 'Program Files', 'TightVNC', 'tvnviewer.exe')
viewer = self.config['options']['vnc_viewer']
# Tight VNC Options
# Start the viewer and connect to the specified host:
# tvnviewer hostname::port [OPTIONS]
param = 'localhost::' + str(port)
pid = Popen([viewer, param])
console_area = self.builder.get_object("frameconsole")
console_area.realize()
console_alloc = console_area.get_allocation()
window_alloc = self.window.get_position()
x = console_alloc.x + window_alloc[0] + 10
y = console_alloc.y + window_alloc[1] + 47
# On windows we'll move the window..
while win32gui.FindWindow(None, "HVMXEN-%s" % self.selected_uuid) == 0 \
and win32gui.FindWindow(None, "XenServer Virtual Terminal") == 0 \
and win32gui.FindWindow(
None, "XenServer Virtual Terminal - TightVNC Viewer") == 0:
pass
self.hWnd = win32gui.FindWindow(None, "HVMXEN-%s" % self.selected_uuid)
if self.hWnd == 0:
self.hWnd = win32gui.FindWindow(None, "XenServer Virtual Terminal")
if self.hWnd == 0:
self.hWnd = win32gui.FindWindow(
None, 'XenServer Virtual Terminal - TightVNC Viewer')
if self.hWnd != 0:
win32gui.MoveWindow(self.hWnd, x, y, console_alloc.width-10,
console_alloc.height-5, 1)
else:
print 'Could not retrieve the window ID'
else:
if sys.platform != "win32" and sys.platform != "darwin" and eval(self.config["options"]["multiple_vnc"]):
console_area = self.builder.get_object("console_area")
if hasattr(self, "current_vnc"):
console_area.remove(self.current_vnc)
# Define current VNC window
self.current_vnc = self.vnc[self.selected_ref]
# Add it to the console area
console_area.add(self.vnc[self.selected_ref])
console_area.show_all()
else:
print 'No console available'
else:
print state
if tab_label == "VM_Memory":
self.update_memory_tab()
if tab_label == "VM_Storage":
if self.treeview.get_cursor()[1]:
# liststorage contains the storage on VM
liststorage = self.builder.get_object("listvmstorage")
# liststoragdvd contains the possibles dvd/isos to mount on VM
liststoragedvd = self.builder.get_object("listvmstoragedvd")
#liststoragedvd.set_sort_func(1, self.compare_data)
# Fill liststorage
self.xc_servers[host].fill_vm_storage(self.selected_ref, liststorage)
# Fill liststoragedvd, fill_vm_storage_dvd return the current dvd/iso mounted
active = self.xc_servers[host].fill_vm_storage_dvd(self.selected_ref, liststoragedvd)
# Flag variable to no emit signal
self.set_active = True
# Set as the active dvd/iso mounted
self.builder.get_object("combovmstoragedvd").set_active(active)
self.set_active = False
elif tab_label == "VM_Network":
if self.treeview.get_cursor()[1]:
treenetwork = self.builder.get_object("treevmnetwork")
# listvmnetwork contains the networks of a vm
listnetwork = self.builder.get_object("listvmnetwork")
# Fill the list of networks
self.xc_servers[host].fill_vm_network(self.selected_ref, treenetwork, listnetwork)
elif tab_label == "VM_Snapshots":
if self.treeview.get_cursor()[1]:
treevmsnapshots = self.builder.get_object("treevmsnapshots")
# listvmsnapshots contains the snapshots of a vm
listvmsnapshots = self.builder.get_object("listvmsnapshots")
# Fill the list of snapshots
self.xc_servers[host].fill_vm_snapshots(self.selected_ref, treevmsnapshots, listvmsnapshots)
elif tab_label == "VM_Performance":
if self.treeview.get_cursor()[1]: # Get which VM is selected in the left list
# Thread to update performance images
ref = self.selected_ref
if self.selected_type == "vm":
self.builder.get_object("scrwin_diskusage").show()
self.builder.get_object("labeldiskusage").show()
Thread(target=self.xc_servers[host].update_performance, args=(self.selected_uuid, ref,
self.selected_ip, False)).start()
else:
self.builder.get_object("scrwin_diskusage").hide()
self.builder.get_object("labeldiskusage").hide()
if host and self.selected_ref in self.xc_servers[host].host_vm:
uuid = self.xc_servers[host].host_vm[self.selected_ref][1]
Thread(target=self.xc_servers[host].update_performance,
args=(uuid, ref, self.selected_ip, True)).start()
elif tab_label == "VM_Logs":
if self.treeview.get_cursor()[1]:
treeviewlog = self.builder.get_object("treeviewlog")
# listlog contains the snapshots of a vm/host
listlog = self.builder.get_object("listlog")
# Fill the list of logs
if self.selected_type == "vm":
self.xc_servers[host].fill_vm_log(self.selected_uuid, treeviewlog, listlog)
else:
self.xc_servers[host].fill_vm_log(self.selected_uuid, treeviewlog, listlog)
elif tab_label == "HOST_Users":
if self.selected_type == "pool":
name = self.xc_servers[host].all['pool'][self.selected_ref]['name_label']
externalauth = self.xc_servers[host].get_external_auth(
self.xc_servers[host]['master'])
else:
if self.selected_ref in self.xc_servers[host].all['host']:
name = self.xc_servers[host].all['host'][
self.selected_ref]['name_label']
externalauth = self.xc_servers[host].get_external_auth(
self.selected_ref)
listusers = self.builder.get_object("listusers")
self.xc_servers[host].fill_domain_users(self.selected_ref, listusers)
if externalauth[0] == "":
self.builder.get_object("btjoindomain").set_sensitive(True)
self.builder.get_object("btleavedomain").set_sensitive(False)
self.builder.get_object("lblusersdomain").set_text("AD is not currently configured for '" +
self.selected_name + "'. To enable AD "
"authentication, click "
"Join.")
else:
self.builder.get_object("btleavedomain").set_sensitive(True)
self.builder.get_object("btjoindomain").set_sensitive(False)
self.builder.get_object("lblusersdomain").set_text("Pool/host " + self.selected_name +
" belongs to domain '" + externalauth[1] +
"'. To enable AD authentication, click Join.")
elif tab_label == "HOST_Storage":
if self.treeview.get_cursor()[1]:
# listhoststorage contains the snapshots of a vm/host
liststorage = self.builder.get_object("listhoststorage")
# Fill the list of storage
self.xc_servers[host].fill_host_storage(self.selected_ref, liststorage)
elif tab_label == "HOST_Nics":
if self.treeview.get_cursor()[1]:
# liststorage = self.builder.get_object("listhostnics")
# self.xc_servers[host].fill_host_nics(self.selected_ref, liststorage)
# Call to update_tab_host_nics to fill the host nics
self.update_tab_host_nics()
elif tab_label == "HOST_Search":
if self.treeview.get_cursor()[1]:
self.xc_servers[host].halt_search = False
# Host_Search contains a live monitoring status of VM
# Create a thread to fill "listsearch"
self.xc_servers[host].thread_host_search(self.selected_ref, self.listsearch)
# Expand "treesearch"
self.treesearch.expand_all()
elif tab_label == "HOST_Hardware":
if host:
self.xc_servers[host].fill_host_hardware(self.selected_ref)
elif tab_label == "HOST_Network":
# Call to update_tab_host_network to fill the host networks
self.update_tab_host_network()
elif tab_label == "Local_Storage":
if self.treeview.get_cursor()[1]:
# liststg contains the vdi under storage
liststg = self.builder.get_object("liststg")
liststg.set_sort_func(1, self.compare_data)
liststg.set_sort_column_id(1, gtk.SORT_ASCENDING)
# Fill the list of storage
if host:
self.xc_servers[host].fill_local_storage(self.selected_ref, liststg)
elif tab_label == "Maps":
self.update_maps()
def get_console_location(self, host, ref):
location = None
if self.xc_servers[host].all['vms'][ref]['consoles']:
nb_consoles = len(self.xc_servers[host].all['vms'][ref]['consoles'])
for i in range(nb_consoles):
console_ref = self.xc_servers[host].all['vms'][ref]['consoles'][i]
protocol = self.xc_servers[host].all['console'][console_ref]['protocol']
if protocol == 'rfb':
location = self.xc_servers[host].all['console'][console_ref]['location']
break
if location is None:
print 'No VNC console found'
return location
def compare_data(self, model, iter1, iter2):
data1 = model.get_value(iter1, 1)
data2 = model.get_value(iter2, 1)
return cmp(data1, data2)
def update_maps(self):
dotcode = """
digraph G {
overlap=false;
bgcolor=white;
node [shape=polygon, sides=6, fontname="Verdana", fontsize="8"];
edge [color=deepskyblue3, fontname="Verdana", fontsize="5"];
"""
if self.selected_host:
show_halted_vms = self.builder.get_object("check_show_halted_vms").get_active()
if self.builder.get_object("check_show_network").get_active():
relation = self.xc_servers[self.selected_host].get_network_relation(self.selected_ref,
show_halted_vms)
for network in relation:
uuid, name = network.split("_", 1)
safename = name.replace("&", "&").replace("<", "<").replace("\"", """)
if self.builder.get_object("check_unused_network").get_active() or relation[network]:
dotcode += '"%s"[shape=plaintext, label=<<table border="0" cellpadding="0" ' \
'cellspacing="0"><tr><td><img src="%s"/></td></tr><tr>' \
'<td> </td></tr><tr><td>%s</td></tr></table>> tooltip="%s"];' % \
(uuid,
os.path.join(utils.module_path(), "images_map/network.png"),
safename,
name)
dotcode += "\n"
for vm in relation[network]:
uuid2, name2 = vm.split("_", 1)
dotcode += '"%s"[shape=plaintext, label=<<table border="0" cellpadding="0" ' \
'cellspacing="0"><tr><td><img src="%s"/></td></tr><tr>' \
'<td> </td></tr><tr><td>%s</td></tr></table>>URL="%s" tooltip="%s"];' % \
(uuid2,
os.path.join(utils.module_path(), "images_map/server.png"),
name2,
uuid2,
name2)
dotcode += "\n"
dotcode += '"%s" -> "%s"' % (uuid, uuid2)
dotcode += "\n"
if self.builder.get_object("check_show_storage").get_active():
dotcode += 'edge [color=forestgreen, fontname="Verdana", fontsize="5"];'
relation = self.xc_servers[self.selected_host].get_storage_relation(self.selected_ref,
show_halted_vms)
for storage in relation:
uuid, name = storage.split("_", 1)
safename = name.replace("&", "&").replace("<", "<").replace("\"", """)
if self.builder.get_object("check_unused_storage").get_active() or relation[storage]:
dotcode += '"%s"[shape=plaintext, label=<<table border="0" cellpadding="0" ' \
'cellspacing="0"><tr><td><img src="%s"/></td></tr><tr>' \
'<td> </td></tr><tr><td>%s</td></tr></table>>URL="%s" tooltip="%s"];' % \
(uuid,
os.path.join(utils.module_path(), "images_map/storage.png"),
safename,
uuid,
name)
dotcode += "\n"
for vm in relation[storage]:
uuid2, name2 = vm.split("_", 1)
safename2 = name2.replace("&", "&").replace("<", "<").replace("\"", """)
dotcode += '"%s"[shape=plaintext, label=<<table border="0" cellpadding="0" ' \
'cellspacing="0"><tr><td><img src="%s"/></td></tr><tr>' \
'<td> </td></tr><tr><td>%s</td></tr></table>>URL="%s" tooltip="%s"];' % \
(uuid2,
os.path.join(utils.module_path(), "images_map/server.png"),
safename2,
uuid2,
name2)
dotcode += "\n"
dotcode += '"%s" -> "%s"' % (uuid2, uuid)
dotcode += "\n"
dotcode += "}"
self.windowmap.set_dotcode(dotcode)
self.builder.get_object("viewportmap").show_all()
def on_btopenfile_activate(self, widget, data=None):
"""
Obsoleted function
"""
filechooser = self.fileopen.get_children()[0].get_children()[0]
if filechooser.get_filename():
self.xc_servers[self.selected_host].import_vm(self.selected_ref, filechooser.get_filename())
self.fileopen.hide()
else:
self.show_error_dlg("Select a file")
def on_btsavefile_activate(self, widget, data=None):
"""
Function called when you press "Export VM"
"""
filechooser = self.filesave.get_children()[0].get_children()[0]
if filechooser.get_filename():
# Call to export_vm function with vm renf and filename choosed
if self.export_snap:
print "Export snap.."
self.xc_servers[self.selected_host].export_vm(self.selected_snap_ref, filechooser.get_filename(),
self.selected_ref)
self.export_snap = False
elif self.export_snap_vm:
print "Export snap as VM.."
self.xc_servers[self.selected_host].export_vm(self.selected_snap_ref, filechooser.get_filename(),
self.selected_ref, as_vm=True)
self.export_snap_vm = False
else:
self.xc_servers[self.selected_host].export_vm(self.selected_ref, filechooser.get_filename())
self.filesave.hide()
self.builder.get_object("tabbox").set_current_page(17)
else:
self.show_error_dlg("Select a file")
def on_filesave_confirm_overwrite(self, widget, data=None):
"""
Not used function
"""
print widget
print data
def on_btcancelsavefile_activate(self, widget, data=None):
"""
If you press cancel on "Export VM" dialog, then close the dialog
"""
self.export_snap = False
self.filesave.hide()
def on_btcancelopenfile_activate(self, widget, data=None):
"""
Not used function
"""
self.fileopen.hide()
def on_treevm_button_press_event(self, widget, event):
"""
Function is called when you do click (or double click) on left tree
"""
x = int(event.x)
y = int(event.y)
event_time = event.time
if x == -10 and y == -10:
pthinfo = [self.modelfilter.get_path(self.treeview.get_selection().get_selected()[1]), None, 0, 0]
else:
pthinfo = widget.get_path_at_pos(x, y)
if event.type == gtk.gdk._2BUTTON_PRESS:
# On double click, if server is disconnected then connect to it
if self.selected_state == "Disconnected":
self.on_m_connect_activate(widget, None)
elif pthinfo is not None:
# On single click
path, col, cellx, celly = pthinfo
widget.grab_focus()
widget.set_cursor( path, col, 0)
path = self.modelfilter.convert_path_to_child_path(path)
iter_ref = self.treestore.get_iter(path)
# Define selected variables
self.selected_iter = iter_ref
self.selected_name = self.treestore.get_value(iter_ref, 1)
self.selected_uuid = self.treestore.get_value(iter_ref, 2)
self.selected_type = self.treestore.get_value(iter_ref, 3)
self.selected_state = self.treestore.get_value(iter_ref, 4)
self.selected_host = self.treestore.get_value(iter_ref, 5)
self.selected_ip = self.treestore.get_value(iter_ref, 8)
# Used to prevent not manual changes
previous_ref = self.selected_ref
self.selected_ref = self.treestore.get_value(iter_ref, 6)
# Define the possible actions for VM/host/storage..
if self.selected_type == "vm":
self.selected_actions = self.xc_servers[self.selected_host].get_actions(self.selected_ref)
else:
self.selected_actions = self.treestore.get_value(iter_ref, 7)
#if type(self.selected_actions) == type(""):
# self.selected_actions = eval(self.selected_actions)
# Update menubar and tabs with new selection
self.update_menubar()
self.update_tabs()
if self.selected_ref != previous_ref:
# If you selected a different element than previous
# then select the correct tab for selected type
if self.selected_type == "vm":
self.builder.get_object("tabbox").set_current_page(5)
else:
self.builder.get_object("tabbox").set_current_page(3)
if self.selected_type == "pool":
self.builder.get_object("tabbox").set_current_page(0)
elif self.selected_type == "host":
self.builder.get_object("tabbox").set_current_page(1)
self.builder.get_object("tabbox").set_current_page(4)
elif self.selected_type == "server":
self.builder.get_object("tabbox").set_current_page(2)
elif self.selected_type == "template":
self.builder.get_object("tabbox").set_current_page(2)
elif self.selected_type == "custom_template":
self.builder.get_object("tabbox").set_current_page(2)
elif self.selected_type == "storage":
self.builder.get_object("tabbox").set_current_page(1)
if event.button == 3:
# On right click..
# Show the menu
menu_vm = self.builder.get_object("context_menu_vm")
collapsed = False
expanded = False
can_expand_or_collapse = False
for child in range(0, self.treestore.iter_n_children(self.selected_iter)):
iter_ref = self.treestore.iter_nth_child(self.selected_iter, child)
if self.treestore.iter_n_children(iter_ref):
can_expand_or_collapse = True
path = self.treestore.get_path(iter_ref)
if self.treeview.row_expanded(path):
expanded = True
else:
collapsed = True
if can_expand_or_collapse:
if collapsed:
self.builder.get_object("expandall").show()
else:
self.builder.get_object("expandall").hide()
if expanded:
self.builder.get_object("collapsechildren").show()
else:
self.builder.get_object("collapsechildren").hide()
else:
self.builder.get_object("expandall").hide()
self.builder.get_object("collapsechildren").hide()
for child in menu_vm.get_children():
# Menuitems are with name "m_action"
# Checks if "action" is on selected_actions"
typestg = None
pbdstg = 1
if self.selected_type == "storage":
typestg = self.xc_servers[self.selected_host].all['SR'][self.selected_ref]["type"]
pbdstg = len(self.xc_servers[self.selected_host].all['SR'][self.selected_ref]["PBDs"])
if gtk.Buildable.get_name(child)[0:2] == "m_":
if not self.selected_actions or \
self.selected_actions.count(gtk.Buildable.get_name(child)[2:]) == 0:
child.hide()
else:
# If selected_type is storage and typestg is not "lvm" or "udev"
if typestg != "lvm" and typestg != "udev":
# If has not pbds.. then enable only "Reattach" and "Forget"
if pbdstg == 0 and (gtk.Buildable.get_name(child) == "m_plug" or
gtk.Buildable.get_name(child) == "m_forget"):
child.show()
else:
# Disable else
if pbdstg == 0:
child.hide()
else:
# If has pbds.. disable "Reattach"
if gtk.Buildable.get_name(child) != "m_plug":
child.show()
else:
child.hide()
else:
child.hide()
# Properties will be showed always else on home and disconnected servers
if gtk.Buildable.get_name(child) == "properties":
if self.selected_type == "home":
child.hide()
elif self.selected_type == "server" and not self.selected_ref:
child.hide()
else:
child.show()
# Delete will be showed only on pool
elif gtk.Buildable.get_name(child) == "delete":
if self.selected_type == "pool":
child.show()
else:
child.hide()
# Install XenServer Tools only on
elif gtk.Buildable.get_name(child) == "installxenservertools":
if self.selected_type == "vm" and self.selected_state == "Running":
self.builder.get_object("separator1").show()
self.builder.get_object("separator2").show()
child.show()
else:
self.builder.get_object("separator1").hide()
self.builder.get_object("separator2").hide()
child.hide()
# Repair storage, only on broken storage
elif gtk.Buildable.get_name(child) == "m_repair_storage":
if self.selected_type == "storage":
broken = self.xc_servers[self.selected_host].is_storage_broken(self.selected_ref)
if broken:
child.show()
else:
child.hide()
# Add to pool, only for servers without pools
elif gtk.Buildable.get_name(child) == "m_add_to_pool":
if self.selected_type == "host":
pool_ref = self.xc_servers[self.selected_host].all['pool'].keys()[0]
if self.xc_servers[self.selected_host].all['pool'][pool_ref]["name_label"] == "":
child.show()
else:
child.hide()
else:
child.hide()
# Add server to pool from pool menu
elif gtk.Buildable.get_name(child) == "m_pool_add_server":
if self.selected_type == "pool":
child.show()
else:
child.hide()
menu_vm.popup( None, None, None, event.button, event_time)
# Update toolbar and set label/image on top right pane
self.update_toolbar()
self.headlabel.set_label(self.calc_headlabel_text())
self.headimage.set_from_pixbuf(self.treestore.get_value(iter_ref, 0))
def calc_headlabel_text(self):
"""
Work out the text to display on the headlabel
:return: Headlabel text
:rtype: str
"""
if self.selected_type == 'vm':
txt = '%s on %s' % (self.selected_name, self.selected_host)
else:
txt = self.selected_name
return txt
def vnc_disconnected(self, info):
print "VNC disconnected..", info
#We need to find which one of the open vnc windows was disconnected in order to remove it from the stored dictionaries
disconnected_vnc = None
if self.vnc and eval(self.config["options"]["multiple_vnc"]):
for key in self.vnc:
if self.vnc[key] == info: disconnected_vnc = key; break
if disconnected_vnc:
if disconnected_vnc in self.vnc_builders:
#This will hook to the destroy method so there is no need to remove the key from the dict
#TODO handle the reboot in the window itself
self.vnc_builders[disconnected_vnc].get_object("windowvncundock").destroy()
if disconnected_vnc in self.vnc.keys(): del self.vnc[disconnected_vnc]
if disconnected_vnc in self.tunnel.keys(): del self.tunnel[disconnected_vnc]
def on_txttreefilter_changed(self, widget, data=None):
"""
Function called when on left top entry you write text to filter
"""
self.modelfilter.refilter()
self.treeview.expand_all()
def show_error_dlg(self, error_string, error_title="Error"):
"""This Function is used to show an error dialog when
an error occurs.
error_string - The error string that will be displayed
on the dialog.
http://www.pygtk.org/articles/extending-our-pygtk-application/extending-our-pygtk-application.htm
"""
self.builder.get_object("walert").set_title(error_title)
self.builder.get_object("walerttext").set_text(error_string)
self.builder.get_object("walert").show()
def on_closewalert_clicked(self, widget, data=None):
self.builder.get_object("walert").hide()
def push_alert(self, alert):
"""
Function to set in statusbar an alert
"""
self.statusbar.get_children()[0].get_children()[0].modify_fg(gtk.STATE_NORMAL, gtk.gdk.color_parse('#000000'))
self.statusbar.push(1, alert)
def push_error_alert(self, alert):
"""
Function to set in statusbar an error alert
"""
self.statusbar.get_children()[0].get_children()[0].modify_fg(gtk.STATE_NORMAL, gtk.gdk.color_parse('#FF0000'))
self.statusbar.push(1, alert)
def not_implemented_yet(self, widget, data=None):
"""
Some functions are not implemented yet, show the dialog
"""
self.show_error_dlg("Not implemented yet")
def dump(self, obj):
"""
Internal use only
"""
for attr in dir(obj):
print "obj.%s = %s" % (attr, getattr(obj, attr))
def signal_handler(self):
"""
Function called when oxc gets a signal
"""
print "Exiting..."
for sh in self.xc_servers:
self.xc_servers[sh].halt = True
self.xc_servers[sh].halt_search = True
self.xc_servers[sh].halt_performance = True
self.xc_servers[sh].logout()
self.config.write()
if self.hWnd != 0:
win32gui.PostMessage(self.hWnd, win32con.WM_QUIT, 0, 0)
self.hWnd = 0
def on_delete_event(self, widget, event):
# Returning True, the window will not be destroyed
widget.hide()
return True
def convert_bytes(self, bytes):
# Convert bytes to string
# http://www.5dollarwhitebox.org/drupal/node/84
bytes = float(bytes)
if bytes >= 1099511627776:
terabytes = bytes / 1099511627776
size = '%.1fT' % terabytes
elif bytes >= 1073741824:
gigabytes = bytes / 1073741824
size = '%.1fG' % gigabytes
elif bytes >= 1048576:
megabytes = bytes / 1048576
size = '%.1fM' % megabytes
elif bytes >= 1024:
kilobytes = bytes / 1024
size = '%.1fK' % kilobytes
else:
size = '%.1fb' % bytes
return size
def convert_bytes_mb(self, n):
# Convert bytes to mb string
n = float(n)
K, M = 1 << 10, 1 << 20
if n >= M:
return '%d' % (float(n) / M)
elif n >= K:
return '%d' % (float(n) / K)
else:
return '%d' % n
| OpenXenManager/openxenmanager | src/OXM/window.py | Python | gpl-2.0 | 90,995 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('meta', '0002_auto_20170521_0304'),
]
operations = [
migrations.AddField(
model_name='meta',
name='font',
field=models.CharField(default=b'Impact', max_length=255, null=True, blank=True),
),
migrations.AddField(
model_name='meta',
name='font_color',
field=models.CharField(max_length=255, null=True, blank=True),
),
migrations.AddField(
model_name='meta',
name='font_effect',
field=models.CharField(default=None, max_length=255, null=True, blank=True, choices=[(b'shadow', b'Normal Shadow'), (b'anaglyph', b'Anaglyph'), (b'brick-sign', b'Brick Sign'), (b'canvas-print', b'Canvas Print'), (b'crackle', b'Crackle'), (b'decaying', b'Decaying'), (b'destruction', b'Destruction'), (b'distressed', b'Distressed'), (b'distressed-wood', b'Distressed Wood'), (b'emboss', b'Emboss'), (b'fire', b'Fire'), (b'fire-animation', b'Fire Animation'), (b'fragile', b'Fragile'), (b'grass', b'Grass'), (b'ice', b'Ice'), (b'mitosis', b'Mitosis'), (b'neon', b'Neon'), (b'outline', b'Outline'), (b'putting-green', b'Putting Green'), (b'scuffed-steel', b'Scuffed Steel'), (b'splintered', b'Splintered'), (b'static', b'Static'), (b'stonewash', b'Stonewash'), (b'3d', b'3d'), (b'3d-float', b'3d Float'), (b'vintage', b'Vintage'), (b'wallpaper', b'Wallpaper')]),
),
migrations.AddField(
model_name='meta',
name='font_size',
field=models.CharField(max_length=255, null=True, blank=True),
),
migrations.AddField(
model_name='meta',
name='font_weight',
field=models.CharField(default=b'normal', max_length=255, null=True, blank=True),
),
migrations.AddField(
model_name='meta',
name='outline_color',
field=models.CharField(default=None, max_length=255, null=True, blank=True),
),
]
| google/mirandum | alerts/meta/migrations/0003_auto_20170521_1611.py | Python | apache-2.0 | 2,155 |
# -*- coding: utf-8 -*-
#
# MyTemplateWebsite documentation build configuration file, created by
# sphinx-quickstart.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import os
import sys
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
# sys.path.insert(0, os.path.abspath('.'))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = []
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
# source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'MyTemplateWebsite'
copyright = u"2015, Zu Ming Tan"
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.1'
# The full version, including alpha/beta/rc tags.
release = '0.1'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
# language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
# today = ''
# Else, today_fmt is used as the format for a strftime call.
# today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all documents.
# default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
# add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
# add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
# show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
# modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
# html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
# html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
# html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
# html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
# html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
# html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
# html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
# html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
# html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
# html_additional_pages = {}
# If false, no module index is generated.
# html_domain_indices = True
# If false, no index is generated.
# html_use_index = True
# If true, the index is split into individual pages for each letter.
# html_split_index = False
# If true, links to the reST sources are added to the pages.
# html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
# html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
# html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
# html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
# html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'MyTemplateWebsitedoc'
# -- Options for LaTeX output --------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
# 'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index',
'MyTemplateWebsite.tex',
u'MyTemplateWebsite Documentation',
u"Zu Ming Tan", 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
# latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
# latex_use_parts = False
# If true, show page references after internal links.
# latex_show_pagerefs = False
# If true, show URL addresses after external links.
# latex_show_urls = False
# Documents to append as an appendix to all manuals.
# latex_appendices = []
# If false, no module index is generated.
# latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'MyTemplateWebsite', u'MyTemplateWebsite Documentation',
[u"Zu Ming Tan"], 1)
]
# If true, show URL addresses after external links.
# man_show_urls = False
# -- Options for Texinfo output ------------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'MyTemplateWebsite', u'MyTemplateWebsite Documentation',
u"Zu Ming Tan", 'MyTemplateWebsite',
'A simple Django website', 'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
# texinfo_appendices = []
# If false, no module index is generated.
# texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
# texinfo_show_urls = 'footnote'
| zooming-tan/MyTemplateWebsite | docs/conf.py | Python | bsd-3-clause | 7,873 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2015 Cutting Edge QA Marcin Koperski
import os
import time
import sys
import csv
# import unicodecsv as csv
from robot.libraries import DateTime
from robot.utils import asserts
from TestToolsMK import robot_instances
from TestToolsMK.robot_instances import validate_create_artifacts_dir
class CsvKeywords(object):
OUTPUT_FILE_CSV = "Artifacts/output.csv"
def csv_set_output_file(self, file_name="Artifacts/output.csv"):
self.OUTPUT_FILE_CSV = validate_create_artifacts_dir(file_name)
@staticmethod
def append_to_csv(filename, values_list, encoding='UTF-8'):
"""
Example usage:
| ${list} | Create List | a | ""1"" | "é,őáá" | #example with chars utf-8 |
| Append To Csv | example.csv |
"""
if sys.version_info[0] < 3:
with open(filename, 'ab') as csv_file:
writer_csv = csv.writer(csv_file, dialect='excel')
writer_csv.writerow([item.encode(encoding) for item in values_list])
else:
with open(filename, 'a', newline='') as csv_file:
writer_csv = csv.writer(csv_file, dialect='excel')
writer_csv.writerow([item for item in values_list])
def csv_writer(self, *values):
"""
Store to default file records in csv
${EXECDIR}/Artifacts/output.csv
change file name using csv change output file
"""
log_file = self.OUTPUT_FILE_CSV
self.append_to_csv(log_file, list(values))
def csv_writer_rows(self, filename, table ,**kwargs):
"""
Append multiple rows to file csv file
"""
if sys.version_info[0] < 3:
with open(filename, 'ab') as csv_file:
writer_csv = csv.writer(csv_file, dialect='excel' ,**kwargs)
writer_csv.writerows(table)
else:
with open(filename, 'a', newline='') as csv_file:
writer_csv = csv.writer(csv_file, dialect='excel' ,**kwargs)
writer_csv.writerows(table)
def csv_writer_with_extra(self, *values):
"""
Add extra params at beginning
1. time of execution
2. suite + test cases name
"""
test_case_name = str(robot_instances.bi().get_variable_value("${TEST_NAME}"))
suite_name = str(robot_instances.bi().get_variable_value("${SUITE_NAME}"))
extra_list = list(values)
extra_list.insert(0, suite_name + test_case_name)
self.csv_writer_with_time(*extra_list)
def csv_writer_with_time(self, *values):
current_time = DateTime.get_current_date(result_format="%Y.%m.%d %H:%M:%S")
extra_list = list(values)
extra_list.insert(0, current_time)
self.csv_writer(*extra_list)
@staticmethod
def file_should_not_change(filename, time_in_sec="1", msg="File was modify during waiting time"):
"""
Methods check modification date date if date doesnt change after set time return true
Best use with method Wait Until Keyword Succeeds
"""
before = os.stat(filename).st_mtime
time.sleep(float(time_in_sec))
after = os.stat(filename).st_mtime
asserts.assert_equal(before, after, msg, values=False)
@staticmethod
def append_to_file_at_beginning(path, content, encoding="UTF-8"):
path = os.path.abspath(path)
parent = os.path.dirname(path)
if not os.path.exists(parent):
os.makedirs(parent)
if not os.path.isfile(path):
open(path, 'w').close()
with open(path, 'r') as original:
data = original.read()
final_content = content + "\n" + data
with open(path, 'w') as modified:
modified.write(final_content.encode(encoding))
# noinspection PyProtectedMember
robot_instances.osl()._link("Appended to file begin of file '%s'.", path)
@staticmethod
def get_file_lines_count(path):
with open(path) as f:
for i, l in enumerate(f):
pass
# noinspection PyUnboundLocalVariable
return i + 1
@staticmethod
def csv_read_file(path, encoding='UTF-8', encoding_errors='strict',delimiter=',',quotechar='"'):
"""
returns file CSV content as 2D table
"""
output_table = []
# encoding = osl()._map_encoding(encoding)
with open(path) as csv_file:
csv_reader = csv.reader(csv_file, quotechar=quotechar,delimiter=delimiter)
for row in csv_reader:
output_table.append(row)
return output_table
@staticmethod
def csv_read_file_return_dictionary(path, encoding='UTF-8', encoding_errors='strict',delimiter=',',quotechar='"'):
"""
returns file CSV content as 1D table of dictionaries
"""
output_table = []
# encoding = osl()._map_encoding(encoding)
with open(path) as csv_file:
csv_reader = csv.DictReader(csv_file, quotechar=quotechar,delimiter=delimiter)
for row in csv_reader:
output_table.append(row)
return output_table | IlfirinPL/robotframework-MarcinKoperski | src/TestToolsMK/csv_keywords.py | Python | mit | 5,221 |
from django.template import loader, RequestContext
from django.http import Http404, HttpResponse
from django.core.xheaders import populate_xheaders
from django.core.paginator import QuerySetPaginator, InvalidPage
from django.core.exceptions import ObjectDoesNotExist
def object_list(request, queryset, paginate_by=None, page=None,
allow_empty=True, template_name=None, template_loader=loader,
extra_context=None, context_processors=None, template_object_name='object',
mimetype=None):
"""
Generic list of objects.
Templates: ``<app_label>/<model_name>_list.html``
Context:
object_list
list of objects
is_paginated
are the results paginated?
results_per_page
number of objects per page (if paginated)
has_next
is there a next page?
has_previous
is there a prev page?
page
the current page
next
the next page
previous
the previous page
pages
number of pages, total
hits
number of objects, total
last_on_page
the result number of the last of object in the
object_list (1-indexed)
first_on_page
the result number of the first object in the
object_list (1-indexed)
page_range:
A list of the page numbers (1-indexed).
"""
if extra_context is None: extra_context = {}
queryset = queryset._clone()
if paginate_by:
paginator = QuerySetPaginator(queryset, paginate_by, allow_empty_first_page=allow_empty)
if not page:
page = request.GET.get('page', 1)
try:
page_number = int(page)
except ValueError:
if page == 'last':
page_number = paginator.num_pages
else:
# Page is not 'last', nor can it be converted to an int.
raise Http404
try:
page_obj = paginator.page(page_number)
except InvalidPage:
raise Http404
c = RequestContext(request, {
'%s_list' % template_object_name: page_obj.object_list,
'paginator': paginator,
'page_obj': page_obj,
# Legacy template context stuff. New templates should use page_obj
# to access this instead.
'is_paginated': page_obj.has_other_pages(),
'results_per_page': paginator.per_page,
'has_next': page_obj.has_next(),
'has_previous': page_obj.has_previous(),
'page': page_obj.number,
'next': page_obj.next_page_number(),
'previous': page_obj.previous_page_number(),
'first_on_page': page_obj.start_index(),
'last_on_page': page_obj.end_index(),
'pages': paginator.num_pages,
'hits': paginator.count,
'page_range': paginator.page_range,
}, context_processors)
else:
c = RequestContext(request, {
'%s_list' % template_object_name: queryset,
'paginator': None,
'page_obj': None,
'is_paginated': False,
}, context_processors)
if not allow_empty and len(queryset) == 0:
raise Http404
for key, value in extra_context.items():
if callable(value):
c[key] = value()
else:
c[key] = value
if not template_name:
model = queryset.model
template_name = "%s/%s_list.html" % (model._meta.app_label, model._meta.object_name.lower())
t = template_loader.get_template(template_name)
return HttpResponse(t.render(c), mimetype=mimetype)
def object_detail(request, queryset, object_id=None, slug=None,
slug_field='slug', template_name=None, template_name_field=None,
template_loader=loader, extra_context=None,
context_processors=None, template_object_name='object',
mimetype=None):
"""
Generic detail of an object.
Templates: ``<app_label>/<model_name>_detail.html``
Context:
object
the object
"""
if extra_context is None: extra_context = {}
model = queryset.model
if object_id:
queryset = queryset.filter(pk=object_id)
elif slug and slug_field:
queryset = queryset.filter(**{slug_field: slug})
else:
raise AttributeError, "Generic detail view must be called with either an object_id or a slug/slug_field."
try:
obj = queryset.get()
except ObjectDoesNotExist:
raise Http404, "No %s found matching the query" % (model._meta.verbose_name)
if not template_name:
template_name = "%s/%s_detail.html" % (model._meta.app_label, model._meta.object_name.lower())
if template_name_field:
template_name_list = [getattr(obj, template_name_field), template_name]
t = template_loader.select_template(template_name_list)
else:
t = template_loader.get_template(template_name)
c = RequestContext(request, {
template_object_name: obj,
}, context_processors)
for key, value in extra_context.items():
if callable(value):
c[key] = value()
else:
c[key] = value
response = HttpResponse(t.render(c), mimetype=mimetype)
populate_xheaders(request, response, model, getattr(obj, obj._meta.pk.name))
return response
| rawwell/django | django/views/generic/list_detail.py | Python | bsd-3-clause | 5,439 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
DEBUG = False
SECRET_KEY = "development key"
REDIS_PORT = 6379
REDIS_PREFIX = "osrc"
GITHUB_ID = None
GITHUB_SECRET = None
| errietta/osrc | osrc/default_settings.py | Python | mit | 172 |
""" Cornice services."""
import logging
from cornice import Service
import json
import re
from schema import create_table, Zopper
from exceptions import (NoFilterPassed,
InvadilDataException,
InvalidFilterFoundException,
NoDataPassedException)
from sqlalchemy import update
from sqlalchemy.sql import and_
from zopper.ws.dbkit import CreateSession, SessionCommit
from cornice.resource import resource
try:
from configparser import ConfigParser
except ImportError:
from ConfigParser import ConfigParser
logger = logging.getLogger('zopper.ws')
config = ConfigParser()
config.read('zopper.ws//development.ini')
should_commit = json.loads(config.get('zopper_config', 'commit'))
@resource(collection_path='/dataload/', path='/searchdata/')
class DataLoad(object):
def __init__(self, request):
self.request = request
self.response_dict = {}
self.session = CreateSession.createsession()
logger.info("Initializing resources")
logger.info("Requested URl: %s,"\
" HTTP Method Used: %s" % (self.request.url,
self.request.method))
logger.info("Module Name:'Views'")
def collection_post(self):
"""
Add data into database
"""
try:
dataload = self.request.body
records = json.loads(dataload)
data = records['data']
fields = records['fields']
columns = data.pop(0)
if not data:
msg = (400, "No data passed in payload: %s" % (dataload))
return NoDataPassedException(msg)
for index, each_data in enumerate(data):
values = Zopper(device_name=each_data[0],
mgnification=each_data[1],
field_of_view=each_data[2],
range=each_data[3])
self.session.add(values)
self.session.flush()
logger.info('Loading record %s' % str(index+1))
commit_obj = SessionCommit(self.session)
commit_obj.commit(flag=should_commit)
self.session.close()
response_msg = {"response_msg": 'Data loaded successfully.'}
self.response_dict['location'] = '/dataload/'
self.response_dict['message'] = str(response_msg)
self.response_dict['status_code'] = 201
return self.response_dict
except Exception:
msg = (400, "Invalid data passed in payload: %s" % (dataload))
return InvadilDataException(msg)
def get(self):
"""
Search data from database
"""
if self.request.GET:
if not self.request.GET.has_key('filter'):
msg = (400, "Filters must be passed with 'filter' in the URL")
logger.exception("Filters must be passed with 'filter' in the URL."\
"Error Status code:'%s' and Error Message:'%s'" % (msg[0], msg[1]))
return NoFilterPassed(msg)
self.filter_str = self.request.GET.get('filter', '')
search_results = self.filter_validation()
if not search_results[1]:
msg = (400, "'{0}' is an invalid filter."\
" These are the valid filters and their corresponding operators to be used: "\
"(1)Filter:'mgnification', Operators:'=' "\
"(2)Filter:'field_of_view' Operators:'=' "\
"(3)Filter:'range' Operators:'=' "\
"like '?filter=gmnification=7' or '?filter=range=800&field_of_view=8'."\
"Kindly pass the valid filter with their corresponding "\
"operator and try again.".format(search_results[0].strip('"')))
return InvalidFilterFoundException(msg)
logger.info("Fetching the query string passed.")
common_query = self.session.query(Zopper.device_name,
Zopper.mgnification,
Zopper.field_of_view,
Zopper.range)
if search_results[0]:
logger.info("Passed query string:%s." % str(self.filter_str))
results = common_query.filter(and_(*search_results[0])).all()
self.session.close()
res = []
if results:
for index, value in enumerate(results):
row_data = {}
row_data = {"DEVICE_NAME": value[0],
"MGNIFICATION": value[1],
"FIELD_OF_VIEW": value[2],
"RANGE": value[3]}
res.append(row_data)
self.response_dict['location'] = '/searchdata/'
self.response_dict['message'] = str({"SearchData": res})
self.response_dict['status_code'] = 200
if not res:
logger.info("No recond found !!!")
self.response_dict['status_code'] = 404
self.session.close()
return self.response_dict
def filter_validation(self):
"""
Validation of all the filter paramaters
"""
filter_attributes = self.filter_str.split(',')
terms = []
for attribute in filter_attributes:
filter_group = re.search(r'([a-zA-Z0-9_-]+)(=)([a-zA-Z0-9_\-\.0-9]+)', attribute)
if not filter_group:
return (attribute, False)
key,operator,value = filter_group.groups()
# Conversion from unicode to string
value = str(value)
if key == 'mgnification':
expression = Zopper.mgnification == value
elif key == 'field_of_view':
expression = Zopper.field_of_view == value
elif key == 'range':
expression = Zopper.range == value
else:
return (attribute, False)
terms.append(expression)
return (terms, True)
| itssafi/zopper.ws | zopper/ws/views.py | Python | gpl-3.0 | 6,300 |
"""HomeKit session fixtures."""
from unittest.mock import patch
from pyhap.accessory_driver import AccessoryDriver
import pytest
from homeassistant.components.homekit.const import EVENT_HOMEKIT_CHANGED
from homeassistant.core import callback as ha_callback
@pytest.fixture
def hk_driver(loop):
"""Return a custom AccessoryDriver instance for HomeKit accessory init."""
with patch("pyhap.accessory_driver.Zeroconf"), patch(
"pyhap.accessory_driver.AccessoryEncoder"
), patch("pyhap.accessory_driver.HAPServer"), patch(
"pyhap.accessory_driver.AccessoryDriver.publish"
), patch(
"pyhap.accessory_driver.AccessoryDriver.persist"
):
yield AccessoryDriver(pincode=b"123-45-678", address="127.0.0.1", loop=loop)
@pytest.fixture
def events(hass):
"""Yield caught homekit_changed events."""
events = []
hass.bus.async_listen(
EVENT_HOMEKIT_CHANGED, ha_callback(lambda e: events.append(e))
)
yield events
| turbokongen/home-assistant | tests/components/homekit/conftest.py | Python | apache-2.0 | 982 |
# Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
Utilities for generating the version string for Astropy (or an affiliated
package) and the version.py module, which contains version info for the
package.
Within the generated astropy.version module, the `major`, `minor`, and `bugfix`
variables hold the respective parts of the version number (bugfix is '0' if
absent). The `release` variable is True if this is a release, and False if this
is a development version of astropy. For the actual version string, use::
from astropy.version import version
or::
from astropy import __version__
"""
import datetime
import os
import pkgutil
import sys
import time
import warnings
from distutils import log
from configparser import ConfigParser
import pkg_resources
from . import git_helpers
from .distutils_helpers import is_distutils_display_option
from .git_helpers import get_git_devstr
from .utils import AstropyDeprecationWarning, import_file
__all__ = ['generate_version_py']
def _version_split(version):
"""
Split a version string into major, minor, and bugfix numbers. If any of
those numbers are missing the default is zero. Any pre/post release
modifiers are ignored.
Examples
========
>>> _version_split('1.2.3')
(1, 2, 3)
>>> _version_split('1.2')
(1, 2, 0)
>>> _version_split('1.2rc1')
(1, 2, 0)
>>> _version_split('1')
(1, 0, 0)
>>> _version_split('')
(0, 0, 0)
"""
parsed_version = pkg_resources.parse_version(version)
if hasattr(parsed_version, 'base_version'):
# New version parsing for setuptools >= 8.0
if parsed_version.base_version:
parts = [int(part)
for part in parsed_version.base_version.split('.')]
else:
parts = []
else:
parts = []
for part in parsed_version:
if part.startswith('*'):
# Ignore any .dev, a, b, rc, etc.
break
parts.append(int(part))
if len(parts) < 3:
parts += [0] * (3 - len(parts))
# In principle a version could have more parts (like 1.2.3.4) but we only
# support <major>.<minor>.<micro>
return tuple(parts[:3])
# This is used by setup.py to create a new version.py - see that file for
# details. Note that the imports have to be absolute, since this is also used
# by affiliated packages.
_FROZEN_VERSION_PY_TEMPLATE = """
# Autogenerated by {packagetitle}'s setup.py on {timestamp!s} UTC
import datetime
{header}
major = {major}
minor = {minor}
bugfix = {bugfix}
version_info = (major, minor, bugfix)
release = {rel}
timestamp = {timestamp!r}
debug = {debug}
astropy_helpers_version = "{ahver}"
"""[1:]
_FROZEN_VERSION_PY_WITH_GIT_HEADER = """
{git_helpers}
_packagename = "{packagename}"
_last_generated_version = "{verstr}"
_last_githash = "{githash}"
# Determine where the source code for this module
# lives. If __file__ is not a filesystem path then
# it is assumed not to live in a git repo at all.
if _get_repo_path(__file__, levels=len(_packagename.split('.'))):
version = update_git_devstr(_last_generated_version, path=__file__)
githash = get_git_devstr(sha=True, show_warning=False,
path=__file__) or _last_githash
else:
# The file does not appear to live in a git repo so don't bother
# invoking git
version = _last_generated_version
githash = _last_githash
"""[1:]
_FROZEN_VERSION_PY_STATIC_HEADER = """
version = "{verstr}"
githash = "{githash}"
"""[1:]
def _get_version_py_str(packagename, version, githash, release, debug,
uses_git=True):
try:
from astropy_helpers import __version__ as ahver
except ImportError:
ahver = "unknown"
epoch = int(os.environ.get('SOURCE_DATE_EPOCH', time.time()))
timestamp = datetime.datetime.utcfromtimestamp(epoch)
major, minor, bugfix = _version_split(version)
if packagename.lower() == 'astropy':
packagetitle = 'Astropy'
else:
packagetitle = 'Astropy-affiliated package ' + packagename
header = ''
if uses_git:
header = _generate_git_header(packagename, version, githash)
elif not githash:
# _generate_git_header will already generate a new git has for us, but
# for creating a new version.py for a release (even if uses_git=False)
# we still need to get the githash to include in the version.py
# See https://github.com/astropy/astropy-helpers/issues/141
githash = git_helpers.get_git_devstr(sha=True, show_warning=True)
if not header: # If _generate_git_header fails it returns an empty string
header = _FROZEN_VERSION_PY_STATIC_HEADER.format(verstr=version,
githash=githash)
return _FROZEN_VERSION_PY_TEMPLATE.format(packagetitle=packagetitle,
timestamp=timestamp,
header=header,
major=major,
minor=minor,
bugfix=bugfix,
ahver=ahver,
rel=release, debug=debug)
def _generate_git_header(packagename, version, githash):
"""
Generates a header to the version.py module that includes utilities for
probing the git repository for updates (to the current git hash, etc.)
These utilities should only be available in development versions, and not
in release builds.
If this fails for any reason an empty string is returned.
"""
loader = pkgutil.get_loader(git_helpers)
source = loader.get_source(git_helpers.__name__) or ''
source_lines = source.splitlines()
if not source_lines:
log.warn('Cannot get source code for astropy_helpers.git_helpers; '
'git support disabled.')
return ''
idx = 0
for idx, line in enumerate(source_lines):
if line.startswith('# BEGIN'):
break
git_helpers_py = '\n'.join(source_lines[idx + 1:])
verstr = version
new_githash = git_helpers.get_git_devstr(sha=True, show_warning=False)
if new_githash:
githash = new_githash
return _FROZEN_VERSION_PY_WITH_GIT_HEADER.format(
git_helpers=git_helpers_py, packagename=packagename,
verstr=verstr, githash=githash)
def generate_version_py(packagename=None, version=None, release=None, debug=None,
uses_git=None, srcdir='.'):
"""
Generate a version.py file in the package with version information, and
update developer version strings.
This function should normally be called without any arguments. In this case
the package name and version is read in from the ``setup.cfg`` file (from
the ``name`` or ``package_name`` entry and the ``version`` entry in the
``[metadata]`` section).
If the version is a developer version (of the form ``3.2.dev``), the
version string will automatically be expanded to include a sequential
number as a suffix (e.g. ``3.2.dev13312``), and the updated version string
will be returned by this function.
Based on this updated version string, a ``version.py`` file will be
generated inside the package, containing the version string as well as more
detailed information (for example the major, minor, and bugfix version
numbers, a ``release`` flag indicating whether the current version is a
stable or developer version, and so on.
"""
if packagename is not None:
warnings.warn('The packagename argument to generate_version_py has '
'been deprecated and will be removed in future. Specify '
'the package name in setup.cfg instead', AstropyDeprecationWarning)
if version is not None:
warnings.warn('The version argument to generate_version_py has '
'been deprecated and will be removed in future. Specify '
'the version number in setup.cfg instead', AstropyDeprecationWarning)
if release is not None:
warnings.warn('The release argument to generate_version_py has '
'been deprecated and will be removed in future. We now '
'use the presence of the "dev" string in the version to '
'determine whether this is a release', AstropyDeprecationWarning)
# We use ConfigParser instead of read_configuration here because the latter
# only reads in keys recognized by setuptools, but we need to access
# package_name below.
conf = ConfigParser()
conf.read('setup.cfg')
if conf.has_option('metadata', 'name'):
packagename = conf.get('metadata', 'name')
elif conf.has_option('metadata', 'package_name'):
# The package-template used package_name instead of name for a while
warnings.warn('Specifying the package name using the "package_name" '
'option in setup.cfg is deprecated - use the "name" '
'option instead.', AstropyDeprecationWarning)
packagename = conf.get('metadata', 'package_name')
elif packagename is not None: # deprecated
pass
else:
sys.stderr.write('ERROR: Could not read package name from setup.cfg\n')
sys.exit(1)
if conf.has_option('metadata', 'version'):
version = conf.get('metadata', 'version')
add_git_devstr = True
elif version is not None: # deprecated
add_git_devstr = False
else:
sys.stderr.write('ERROR: Could not read package version from setup.cfg\n')
sys.exit(1)
if release is None:
release = 'dev' not in version
if not release and add_git_devstr:
version += get_git_devstr(False)
if uses_git is None:
uses_git = not release
# In some cases, packages have a - but this is a _ in the module. Since we
# are only interested in the module here, we replace - by _
packagename = packagename.replace('-', '_')
try:
version_module = get_pkg_version_module(packagename)
try:
last_generated_version = version_module._last_generated_version
except AttributeError:
last_generated_version = version_module.version
try:
last_githash = version_module._last_githash
except AttributeError:
last_githash = version_module.githash
current_release = version_module.release
current_debug = version_module.debug
except ImportError:
version_module = None
last_generated_version = None
last_githash = None
current_release = None
current_debug = None
if release is None:
# Keep whatever the current value is, if it exists
release = bool(current_release)
if debug is None:
# Likewise, keep whatever the current value is, if it exists
debug = bool(current_debug)
package_srcdir = os.path.join(srcdir, *packagename.split('.'))
version_py = os.path.join(package_srcdir, 'version.py')
if (last_generated_version != version or current_release != release or
current_debug != debug):
if '-q' not in sys.argv and '--quiet' not in sys.argv:
log.set_threshold(log.INFO)
if is_distutils_display_option():
# Always silence unnecessary log messages when display options are
# being used
log.set_threshold(log.WARN)
log.info('Freezing version number to {0}'.format(version_py))
with open(version_py, 'w') as f:
# This overwrites the actual version.py
f.write(_get_version_py_str(packagename, version, last_githash,
release, debug, uses_git=uses_git))
return version
def get_pkg_version_module(packagename, fromlist=None):
"""Returns the package's .version module generated by
`astropy_helpers.version_helpers.generate_version_py`. Raises an
ImportError if the version module is not found.
If ``fromlist`` is an iterable, return a tuple of the members of the
version module corresponding to the member names given in ``fromlist``.
Raises an `AttributeError` if any of these module members are not found.
"""
version = import_file(os.path.join(packagename, 'version.py'), name='version')
if fromlist:
return tuple(getattr(version, member) for member in fromlist)
else:
return version
| astropy/astropy-helpers | astropy_helpers/version_helpers.py | Python | bsd-3-clause | 12,694 |
# -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2015 Comunitea Servicios Tecnológicos All Rights Reserved
# $Omar Castiñeira Saavedra <[email protected]>$
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp import models, fields, api, exceptions, _, SUPERUSER_ID
class ResUsers(models.Model):
_inherit = "res.users"
@api.one
@api.depends('password')
def _get_test_password(self):
if self.password:
self.test_password = self.password + u"#pruebas"
else:
self.test_password = False
test_password = fields.Char(compute=_get_test_password, readonly=True,
string="Test Password", store=True)
def check_credentials(self, cr, uid, password):
res = self.search(cr, SUPERUSER_ID, [('id', '=', uid),
('test_password', '=', password)])
if not res:
super(ResUsers, self).check_credentials(cr, uid, password)
res = self.search(cr, SUPERUSER_ID, [('id', '=', uid),
('password', '=', password)])
user = self.browse(cr, SUPERUSER_ID, res[0])
is_test = self.pool["res.company"].\
search(cr, SUPERUSER_ID, [('test_company_id', '=',
user.company_id.id)])
if is_test:
user.write({'company_id': is_test[0]})
else:
user = self.browse(cr, SUPERUSER_ID, res[0])
is_test = self.pool["res.company"].\
search(cr, SUPERUSER_ID, [('test_company_id', '=',
user.company_id.id)])
if not is_test:
test_company = user.company_id.test_company_id
if not test_company:
raise exceptions.AccessDenied()
else:
if test_company not in user.company_ids:
raise exceptions.AccessDenied()
user.write({'company_id': test_company.id})
| jgmanzanas/CMNT_004_15 | project-addons/test_management/users.py | Python | agpl-3.0 | 2,882 |
#______________________________________________________________________________
# Turn our site into a pdf printable document using princexml
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
import grok, re
from interfaces import ISiteRoot, IArticle, IArticleSorter
from menu import UtilItem
from resource import style, textLight
from zope.traversing.api import traverse
from urlparse import urlparse
import subprocess
try: # Figure if we have prince (http://www.princexml.com) installed
has_prince = subprocess.call(['prince', '--version']) >= 0
except:
has_prince = False
class PageSimpleHTML(grok.View):
''' Render this IArticle as a simple page, then do the same
for each of the sub-articles
'''
grok.context(IArticle)
grok.require('zope.Public')
def articleNumber(self):
order = getattr(self.context, "order", None)
if order is None:
self.context.section = ""
else:
order = int(order) + 1
parent = getattr(self.context, "__parent__", None)
if parent and len(parent.section):
section = "{}.{}".format(parent.section, order)
else:
section = "{}".format(order)
self.context.section = section
return section + ": "
return ""
def articleId(self, item):
if self.context.section:
section = "{}.{}".format(self.context.section, item.order+1)
else:
section = "{}".format(item.order+1)
return u'sn_'+section.replace('.', '_')
def articleContent(self):
def host_from(netloc):
if type(netloc) is str and len(netloc):
return netloc.split(":")[0]
baseUrl = self.url(self.context) + "/"
host = host_from(urlparse(baseUrl).netloc)
text = self.context.text
c = re.compile(r'<a\s*title="([^"]*)"\s*href="([^"]*)">([^<]*)</a>')
pos = 0
new_text = u""
while True: # Replace URLs inline
s = c.search(text, pos=pos)
if s is None:
new_text += text[pos:]
break
else:
new_text += text[pos:s.start()] # Add text up to start
pos = s.end()
url = urlparse(s.group(2))
if (len(url.netloc)==0 or host == host_from(url.netloc)):
# local link. replace with section anchor
if len(url.netloc)==0:
try: # Try relative to site
ob = traverse(grok.getSite(), url.path)
except:
try: # is it relative to current?
ob = traverse(self.context, url.path)
except:
ob = None
else:
try: # Path is relative to site
ob = traverse(grok.getSite(), url.path)
except:
ob = None
if ob is None or not IArticle.providedBy(ob):
self.flash('Path not found!')
self.flash(' - current page is: {}'.format(str(self.url(self.context))))
self.flash(' - netloc={}; path={}'.format(url.netloc, url.path))
new_text += s.group()
else:
fmt = '<a title="{}" href="#{}">{}</a>'
new_text += fmt.format(s.group(1),
ob.getArticleId(),
s.group(3))
else: # Replace global links with footnotes
fmt = u"<em>{}</em> <span class='fn'>{}: {}</span>"
new_text += fmt.format(s.group(3), s.group(1), s.group(2))
text = new_text
if self.context.attachments is not None:
for a in self.context.attachments:
st = 'attachments/{}'.format(a)
text = text.replace(st, baseUrl+st)
return text
def sortedItems(self):
sorter = IArticleSorter(self.context)
return sorter.sortedItems()
class FullPageHTML(grok.View):
''' Return the site as a single HTML page
'''
grok.context(ISiteRoot)
grok.require('zope.Public')
def update(self):
style.need()
textLight.need()
class IdView(grok.View):
''' Give all articles an ID
'''
grok.context(IArticle)
grok.require('zope.Public')
grok.name("id")
def render(self):
return self.context.getArticleId()
class MkBook(grok.View):
''' Turn the content of this site into a book
'''
grok.context(ISiteRoot)
grok.require('zope.Public')
def render(self):
url = self.url(self.context, name='fullpagehtml')
try:
result = subprocess.check_output(['prince', url, '-o', '-'])
except:
return
response = self.request.response
response.setHeader('content-type', 'application/pdf')
response.addHeader('content-disposition', 'inline;filename="gfn.pdf"')
return result
class MkBookButton(UtilItem):
''' A menu item that turns the site into a book
'''
grok.context(ISiteRoot)
grok.require('zope.Public')
grok.order(-1)
title = u'Create Book'
link = 'mkbook'
mclass = 'nav buttons'
def condition(self): # Depends on the 'prince' app being installed
return has_prince
| prsephton/Grok4Noobs | mkbook.py | Python | lgpl-2.1 | 5,654 |
# Support for "neopixel" leds
#
# Copyright (C) 2019-2020 Kevin O'Connor <[email protected]>
#
# This file may be distributed under the terms of the GNU GPLv3 license.
import logging
BACKGROUND_PRIORITY_CLOCK = 0x7fffffff00000000
BIT_MAX_TIME=.000004
RESET_MIN_TIME=.000050
MAX_MCU_SIZE = 500 # Sanity check on LED chain length
class PrinterNeoPixel:
def __init__(self, config):
self.printer = config.get_printer()
name = config.get_name().split()[1]
self.mutex = self.printer.get_reactor().mutex()
# Configure neopixel
ppins = self.printer.lookup_object('pins')
pin_params = ppins.lookup_pin(config.get('pin'))
self.mcu = pin_params['chip']
self.oid = self.mcu.create_oid()
self.pin = pin_params['pin']
self.mcu.register_config_callback(self.build_config)
formats = {v: v for v in ["RGB", "GRB", "RGBW", "GRBW"]}
self.color_order = config.getchoice("color_order", formats, "GRB")
elem_size = len(self.color_order)
self.chain_count = config.getint('chain_count', 1, minval=1,
maxval=MAX_MCU_SIZE//elem_size)
self.neopixel_update_cmd = self.neopixel_send_cmd = None
# Initial color
self.color_data = bytearray(self.chain_count * elem_size)
red = config.getfloat('initial_RED', 0., minval=0., maxval=1.)
green = config.getfloat('initial_GREEN', 0., minval=0., maxval=1.)
blue = config.getfloat('initial_BLUE', 0., minval=0., maxval=1.)
white = 0
if elem_size == 4:
white = config.getfloat('initial_WHITE', 0., minval=0., maxval=1.)
self.update_color_data(red, green, blue, white)
self.old_color_data = bytearray([d ^ 1 for d in self.color_data])
# Register commands
self.printer.register_event_handler("klippy:connect", self.send_data)
gcode = self.printer.lookup_object('gcode')
gcode.register_mux_command("SET_LED", "LED", name, self.cmd_SET_LED,
desc=self.cmd_SET_LED_help)
def build_config(self):
bmt = self.mcu.seconds_to_clock(BIT_MAX_TIME)
rmt = self.mcu.seconds_to_clock(RESET_MIN_TIME)
self.mcu.add_config_cmd("config_neopixel oid=%d pin=%s data_size=%d"
" bit_max_ticks=%d reset_min_ticks=%d"
% (self.oid, self.pin, len(self.color_data),
bmt, rmt))
cmd_queue = self.mcu.alloc_command_queue()
self.neopixel_update_cmd = self.mcu.lookup_command(
"neopixel_update oid=%c pos=%hu data=%*s", cq=cmd_queue)
self.neopixel_send_cmd = self.mcu.lookup_query_command(
"neopixel_send oid=%c", "neopixel_result oid=%c success=%c",
oid=self.oid, cq=cmd_queue)
def update_color_data(self, red, green, blue, white, index=None):
red = int(red * 255. + .5)
blue = int(blue * 255. + .5)
green = int(green * 255. + .5)
white = int(white * 255. + .5)
if self.color_order == "GRB":
color_data = [green, red, blue]
elif self.color_order == "RGB":
color_data = [red, green, blue]
elif self.color_order == "GRBW":
color_data = [green, red, blue, white]
else:
color_data = [red, green, blue, white]
if index is None:
self.color_data[:] = color_data * self.chain_count
else:
elem_size = len(color_data)
self.color_data[(index-1)*elem_size:index*elem_size] = color_data
def send_data(self, print_time=None):
old_data, new_data = self.old_color_data, self.color_data
if new_data == old_data:
return
# Find the position of all changed bytes in this framebuffer
diffs = [[i, 1] for i, (n, o) in enumerate(zip(new_data, old_data))
if n != o]
# Batch together changes that are close to each other
for i in range(len(diffs)-2, -1, -1):
pos, count = diffs[i]
nextpos, nextcount = diffs[i+1]
if pos + 5 >= nextpos and nextcount < 16:
diffs[i][1] = nextcount + (nextpos - pos)
del diffs[i+1]
# Transmit changes
ucmd = self.neopixel_update_cmd.send
for pos, count in diffs:
ucmd([self.oid, pos, new_data[pos:pos+count]],
reqclock=BACKGROUND_PRIORITY_CLOCK)
old_data[:] = new_data
# Instruct mcu to update the LEDs
minclock = 0
if print_time is not None:
minclock = self.mcu.print_time_to_clock(print_time)
scmd = self.neopixel_send_cmd.send
if self.printer.get_start_args().get('debugoutput') is not None:
return
for i in range(8):
params = scmd([self.oid], minclock=minclock,
reqclock=BACKGROUND_PRIORITY_CLOCK)
if params['success']:
break
else:
logging.info("Neopixel update did not succeed")
cmd_SET_LED_help = "Set the color of an LED"
def cmd_SET_LED(self, gcmd):
# Parse parameters
red = gcmd.get_float('RED', 0., minval=0., maxval=1.)
green = gcmd.get_float('GREEN', 0., minval=0., maxval=1.)
blue = gcmd.get_float('BLUE', 0., minval=0., maxval=1.)
white = gcmd.get_float('WHITE', 0., minval=0., maxval=1.)
index = gcmd.get_int('INDEX', None, minval=1, maxval=self.chain_count)
transmit = gcmd.get_int('TRANSMIT', 1)
sync = gcmd.get_int('SYNC', 1)
# Update and transmit data
def reactor_bgfunc(print_time):
with self.mutex:
self.update_color_data(red, green, blue, white, index)
if transmit:
self.send_data(print_time)
def lookahead_bgfunc(print_time):
reactor = self.printer.get_reactor()
reactor.register_callback(lambda et: reactor_bgfunc(print_time))
if sync:
#Sync LED Update with print time and send
toolhead = self.printer.lookup_object('toolhead')
toolhead.register_lookahead_callback(lookahead_bgfunc)
else:
#Send update now (so as not to wake toolhead and reset idle_timeout)
lookahead_bgfunc(None)
def get_status(self, eventtime):
cdata = []
elem_size = len(self.color_order)
for i in range(self.chain_count):
idx = i * elem_size
cdata.append(
{k: round(v / 255., 4) for k, v in
zip(self.color_order, self.color_data[idx:idx+elem_size])}
)
return {'color_data': cdata}
def load_config_prefix(config):
return PrinterNeoPixel(config)
| KevinOConnor/klipper | klippy/extras/neopixel.py | Python | gpl-3.0 | 6,836 |
from __future__ import absolute_import
import json
import sys
from os import makedirs
from os.path import join, normpath, isdir, isfile
from .base import *
DEBUG = True
SECRET_KEY = 'make-a-secret-key'
LOCAL_SETUP_DIR = join(PROJECT_DIR, 'test_setup')
if not isdir(LOCAL_SETUP_DIR):
makedirs(LOCAL_SETUP_DIR)
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': join(LOCAL_SETUP_DIR, 'plants.db3'),
},
}
SESSION_COOKIE_NAME = 'plants_local'
# ------------------------------------
# where static files are collected
# ------------------------------------
STATIC_ROOT = join(LOCAL_SETUP_DIR, 'staticfiles')
if not isdir(STATIC_ROOT):
makedirs(STATIC_ROOT)
INTERNAL_IPS = ('127.0.0.1',)
#MEDIA_URL = '/media/'
| arnarb/greenhousedb | plants/settings/laptop.py | Python | gpl-3.0 | 774 |
#!/usr/bin/env python
from django import forms
from djtokeninput.widgets import TokenWidget
class TokenField(forms.ModelMultipleChoiceField):
kwargs_for_widget = ("search_url",)
widget = TokenWidget
@staticmethod
def _class_name(value):
return value.replace(" ", "-")
def __init__(self, model, *args, **kwargs):
widget_attrs = { }
for name in self.kwargs_for_widget:
if name in kwargs:
widget_attrs[name] = kwargs.pop(name)
super(TokenField, self).__init__(model.objects.all(), *args, **kwargs)
self.widget.class_name = self._class_name(model._meta.verbose_name_plural)
self.widget.model = model
for name in widget_attrs:
setattr(self.widget, name, widget_attrs[name])
| jgruhl/djtokeninput | lib/djtokeninput/fields.py | Python | mit | 735 |
# -*- coding: utf-8 -*-
# Generated by Django 1.11.3 on 2017-07-06 20:01
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='CodeRun',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('run_id', models.PositiveIntegerField()),
('run_status', models.CharField(max_length=255)),
('output', models.TextField()),
],
),
]
| RadoRado/EuroPython2017 | run_python_run/repl/migrations/0001_initial.py | Python | mit | 665 |
"""
Discrete Fourier Transforms
Routines in this module:
fft(a, n=None, axis=-1)
ifft(a, n=None, axis=-1)
rfft(a, n=None, axis=-1)
irfft(a, n=None, axis=-1)
hfft(a, n=None, axis=-1)
ihfft(a, n=None, axis=-1)
fftn(a, s=None, axes=None)
ifftn(a, s=None, axes=None)
rfftn(a, s=None, axes=None)
irfftn(a, s=None, axes=None)
fft2(a, s=None, axes=(-2,-1))
ifft2(a, s=None, axes=(-2, -1))
rfft2(a, s=None, axes=(-2,-1))
irfft2(a, s=None, axes=(-2, -1))
i = inverse transform
r = transform of purely real data
h = Hermite transform
n = n-dimensional transform
2 = 2-dimensional transform
(Note: 2D routines are just nD routines with different default
behavior.)
The underlying code for these functions is an f2c-translated and modified
version of the FFTPACK routines.
"""
from __future__ import division, absolute_import, print_function
__all__ = ['fft', 'ifft', 'rfft', 'irfft', 'hfft', 'ihfft', 'rfftn',
'irfftn', 'rfft2', 'irfft2', 'fft2', 'ifft2', 'fftn', 'ifftn']
from numpy.core import (array, asarray, zeros, swapaxes, shape, conjugate,
take, sqrt)
from . import fftpack_lite as fftpack
_fft_cache = {}
_real_fft_cache = {}
def _raw_fft(a, n=None, axis=-1, init_function=fftpack.cffti,
work_function=fftpack.cfftf, fft_cache=_fft_cache):
a = asarray(a)
if n is None:
n = a.shape[axis]
if n < 1:
raise ValueError("Invalid number of FFT data points (%d) specified."
% n)
try:
# Thread-safety note: We rely on list.pop() here to atomically
# retrieve-and-remove a wsave from the cache. This ensures that no
# other thread can get the same wsave while we're using it.
wsave = fft_cache.setdefault(n, []).pop()
except (IndexError):
wsave = init_function(n)
if a.shape[axis] != n:
s = list(a.shape)
if s[axis] > n:
index = [slice(None)]*len(s)
index[axis] = slice(0, n)
a = a[index]
else:
index = [slice(None)]*len(s)
index[axis] = slice(0, s[axis])
s[axis] = n
z = zeros(s, a.dtype.char)
z[index] = a
a = z
if axis != -1:
a = swapaxes(a, axis, -1)
r = work_function(a, wsave)
if axis != -1:
r = swapaxes(r, axis, -1)
# As soon as we put wsave back into the cache, another thread could pick it
# up and start using it, so we must not do this until after we're
# completely done using it ourselves.
fft_cache[n].append(wsave)
return r
def _unitary(norm):
if norm not in (None, "ortho"):
raise ValueError("Invalid norm value %s, should be None or \"ortho\"."
% norm)
return norm is not None
def fft(a, n=None, axis=-1, norm=None):
"""
Compute the one-dimensional discrete Fourier Transform.
This function computes the one-dimensional *n*-point discrete Fourier
Transform (DFT) with the efficient Fast Fourier Transform (FFT)
algorithm [CT].
Parameters
----------
a : array_like
Input array, can be complex.
n : int, optional
Length of the transformed axis of the output.
If `n` is smaller than the length of the input, the input is cropped.
If it is larger, the input is padded with zeros. If `n` is not given,
the length of the input along the axis specified by `axis` is used.
axis : int, optional
Axis over which to compute the FFT. If not given, the last axis is
used.
norm : {None, "ortho"}, optional
.. versionadded:: 1.10.0
Normalization mode (see `numpy.fft`). Default is None.
Returns
-------
out : complex ndarray
The truncated or zero-padded input, transformed along the axis
indicated by `axis`, or the last one if `axis` is not specified.
Raises
------
IndexError
if `axes` is larger than the last axis of `a`.
See Also
--------
numpy.fft : for definition of the DFT and conventions used.
ifft : The inverse of `fft`.
fft2 : The two-dimensional FFT.
fftn : The *n*-dimensional FFT.
rfftn : The *n*-dimensional FFT of real input.
fftfreq : Frequency bins for given FFT parameters.
Notes
-----
FFT (Fast Fourier Transform) refers to a way the discrete Fourier
Transform (DFT) can be calculated efficiently, by using symmetries in the
calculated terms. The symmetry is highest when `n` is a power of 2, and
the transform is therefore most efficient for these sizes.
The DFT is defined, with the conventions used in this implementation, in
the documentation for the `numpy.fft` module.
References
----------
.. [CT] Cooley, James W., and John W. Tukey, 1965, "An algorithm for the
machine calculation of complex Fourier series," *Math. Comput.*
19: 297-301.
Examples
--------
>>> np.fft.fft(np.exp(2j * np.pi * np.arange(8) / 8))
array([ -3.44505240e-16 +1.14383329e-17j,
8.00000000e+00 -5.71092652e-15j,
2.33482938e-16 +1.22460635e-16j,
1.64863782e-15 +1.77635684e-15j,
9.95839695e-17 +2.33482938e-16j,
0.00000000e+00 +1.66837030e-15j,
1.14383329e-17 +1.22460635e-16j,
-1.64863782e-15 +1.77635684e-15j])
>>> import matplotlib.pyplot as plt
>>> t = np.arange(256)
>>> sp = np.fft.fft(np.sin(t))
>>> freq = np.fft.fftfreq(t.shape[-1])
>>> plt.plot(freq, sp.real, freq, sp.imag)
[<matplotlib.lines.Line2D object at 0x...>, <matplotlib.lines.Line2D object at 0x...>]
>>> plt.show()
In this example, real input has an FFT which is Hermitian, i.e., symmetric
in the real part and anti-symmetric in the imaginary part, as described in
the `numpy.fft` documentation.
"""
a = asarray(a).astype(complex)
if n is None:
n = a.shape[axis]
output = _raw_fft(a, n, axis, fftpack.cffti, fftpack.cfftf, _fft_cache)
if _unitary(norm):
output *= 1 / sqrt(n)
return output
def ifft(a, n=None, axis=-1, norm=None):
"""
Compute the one-dimensional inverse discrete Fourier Transform.
This function computes the inverse of the one-dimensional *n*-point
discrete Fourier transform computed by `fft`. In other words,
``ifft(fft(a)) == a`` to within numerical accuracy.
For a general description of the algorithm and definitions,
see `numpy.fft`.
The input should be ordered in the same way as is returned by `fft`,
i.e., ``a[0]`` should contain the zero frequency term,
``a[1:n/2+1]`` should contain the positive-frequency terms, and
``a[n/2+1:]`` should contain the negative-frequency terms, in order of
decreasingly negative frequency. See `numpy.fft` for details.
Parameters
----------
a : array_like
Input array, can be complex.
n : int, optional
Length of the transformed axis of the output.
If `n` is smaller than the length of the input, the input is cropped.
If it is larger, the input is padded with zeros. If `n` is not given,
the length of the input along the axis specified by `axis` is used.
See notes about padding issues.
axis : int, optional
Axis over which to compute the inverse DFT. If not given, the last
axis is used.
norm : {None, "ortho"}, optional
.. versionadded:: 1.10.0
Normalization mode (see `numpy.fft`). Default is None.
Returns
-------
out : complex ndarray
The truncated or zero-padded input, transformed along the axis
indicated by `axis`, or the last one if `axis` is not specified.
Raises
------
IndexError
If `axes` is larger than the last axis of `a`.
See Also
--------
numpy.fft : An introduction, with definitions and general explanations.
fft : The one-dimensional (forward) FFT, of which `ifft` is the inverse
ifft2 : The two-dimensional inverse FFT.
ifftn : The n-dimensional inverse FFT.
Notes
-----
If the input parameter `n` is larger than the size of the input, the input
is padded by appending zeros at the end. Even though this is the common
approach, it might lead to surprising results. If a different padding is
desired, it must be performed before calling `ifft`.
Examples
--------
>>> np.fft.ifft([0, 4, 0, 0])
array([ 1.+0.j, 0.+1.j, -1.+0.j, 0.-1.j])
Create and plot a band-limited signal with random phases:
>>> import matplotlib.pyplot as plt
>>> t = np.arange(400)
>>> n = np.zeros((400,), dtype=complex)
>>> n[40:60] = np.exp(1j*np.random.uniform(0, 2*np.pi, (20,)))
>>> s = np.fft.ifft(n)
>>> plt.plot(t, s.real, 'b-', t, s.imag, 'r--')
[<matplotlib.lines.Line2D object at 0x...>, <matplotlib.lines.Line2D object at 0x...>]
>>> plt.legend(('real', 'imaginary'))
<matplotlib.legend.Legend object at 0x...>
>>> plt.show()
"""
# The copy may be required for multithreading.
a = array(a, copy=True, dtype=complex)
if n is None:
n = a.shape[axis]
unitary = _unitary(norm)
output = _raw_fft(a, n, axis, fftpack.cffti, fftpack.cfftb, _fft_cache)
return output * (1 / (sqrt(n) if unitary else n))
def rfft(a, n=None, axis=-1, norm=None):
"""
Compute the one-dimensional discrete Fourier Transform for real input.
This function computes the one-dimensional *n*-point discrete Fourier
Transform (DFT) of a real-valued array by means of an efficient algorithm
called the Fast Fourier Transform (FFT).
Parameters
----------
a : array_like
Input array
n : int, optional
Number of points along transformation axis in the input to use.
If `n` is smaller than the length of the input, the input is cropped.
If it is larger, the input is padded with zeros. If `n` is not given,
the length of the input along the axis specified by `axis` is used.
axis : int, optional
Axis over which to compute the FFT. If not given, the last axis is
used.
norm : {None, "ortho"}, optional
.. versionadded:: 1.10.0
Normalization mode (see `numpy.fft`). Default is None.
Returns
-------
out : complex ndarray
The truncated or zero-padded input, transformed along the axis
indicated by `axis`, or the last one if `axis` is not specified.
If `n` is even, the length of the transformed axis is ``(n/2)+1``.
If `n` is odd, the length is ``(n+1)/2``.
Raises
------
IndexError
If `axis` is larger than the last axis of `a`.
See Also
--------
numpy.fft : For definition of the DFT and conventions used.
irfft : The inverse of `rfft`.
fft : The one-dimensional FFT of general (complex) input.
fftn : The *n*-dimensional FFT.
rfftn : The *n*-dimensional FFT of real input.
Notes
-----
When the DFT is computed for purely real input, the output is
Hermitian-symmetric, i.e. the negative frequency terms are just the complex
conjugates of the corresponding positive-frequency terms, and the
negative-frequency terms are therefore redundant. This function does not
compute the negative frequency terms, and the length of the transformed
axis of the output is therefore ``n//2 + 1``.
When ``A = rfft(a)`` and fs is the sampling frequency, ``A[0]`` contains
the zero-frequency term 0*fs, which is real due to Hermitian symmetry.
If `n` is even, ``A[-1]`` contains the term representing both positive
and negative Nyquist frequency (+fs/2 and -fs/2), and must also be purely
real. If `n` is odd, there is no term at fs/2; ``A[-1]`` contains
the largest positive frequency (fs/2*(n-1)/n), and is complex in the
general case.
If the input `a` contains an imaginary part, it is silently discarded.
Examples
--------
>>> np.fft.fft([0, 1, 0, 0])
array([ 1.+0.j, 0.-1.j, -1.+0.j, 0.+1.j])
>>> np.fft.rfft([0, 1, 0, 0])
array([ 1.+0.j, 0.-1.j, -1.+0.j])
Notice how the final element of the `fft` output is the complex conjugate
of the second element, for real input. For `rfft`, this symmetry is
exploited to compute only the non-negative frequency terms.
"""
# The copy may be required for multithreading.
a = array(a, copy=True, dtype=float)
output = _raw_fft(a, n, axis, fftpack.rffti, fftpack.rfftf,
_real_fft_cache)
if _unitary(norm):
output *= 1 / sqrt(a.shape[axis])
return output
def irfft(a, n=None, axis=-1, norm=None):
"""
Compute the inverse of the n-point DFT for real input.
This function computes the inverse of the one-dimensional *n*-point
discrete Fourier Transform of real input computed by `rfft`.
In other words, ``irfft(rfft(a), len(a)) == a`` to within numerical
accuracy. (See Notes below for why ``len(a)`` is necessary here.)
The input is expected to be in the form returned by `rfft`, i.e. the
real zero-frequency term followed by the complex positive frequency terms
in order of increasing frequency. Since the discrete Fourier Transform of
real input is Hermitian-symmetric, the negative frequency terms are taken
to be the complex conjugates of the corresponding positive frequency terms.
Parameters
----------
a : array_like
The input array.
n : int, optional
Length of the transformed axis of the output.
For `n` output points, ``n//2+1`` input points are necessary. If the
input is longer than this, it is cropped. If it is shorter than this,
it is padded with zeros. If `n` is not given, it is determined from
the length of the input along the axis specified by `axis`.
axis : int, optional
Axis over which to compute the inverse FFT. If not given, the last
axis is used.
norm : {None, "ortho"}, optional
.. versionadded:: 1.10.0
Normalization mode (see `numpy.fft`). Default is None.
Returns
-------
out : ndarray
The truncated or zero-padded input, transformed along the axis
indicated by `axis`, or the last one if `axis` is not specified.
The length of the transformed axis is `n`, or, if `n` is not given,
``2*(m-1)`` where ``m`` is the length of the transformed axis of the
input. To get an odd number of output points, `n` must be specified.
Raises
------
IndexError
If `axis` is larger than the last axis of `a`.
See Also
--------
numpy.fft : For definition of the DFT and conventions used.
rfft : The one-dimensional FFT of real input, of which `irfft` is inverse.
fft : The one-dimensional FFT.
irfft2 : The inverse of the two-dimensional FFT of real input.
irfftn : The inverse of the *n*-dimensional FFT of real input.
Notes
-----
Returns the real valued `n`-point inverse discrete Fourier transform
of `a`, where `a` contains the non-negative frequency terms of a
Hermitian-symmetric sequence. `n` is the length of the result, not the
input.
If you specify an `n` such that `a` must be zero-padded or truncated, the
extra/removed values will be added/removed at high frequencies. One can
thus resample a series to `m` points via Fourier interpolation by:
``a_resamp = irfft(rfft(a), m)``.
Examples
--------
>>> np.fft.ifft([1, -1j, -1, 1j])
array([ 0.+0.j, 1.+0.j, 0.+0.j, 0.+0.j])
>>> np.fft.irfft([1, -1j, -1])
array([ 0., 1., 0., 0.])
Notice how the last term in the input to the ordinary `ifft` is the
complex conjugate of the second term, and the output has zero imaginary
part everywhere. When calling `irfft`, the negative frequencies are not
specified, and the output array is purely real.
"""
# The copy may be required for multithreading.
a = array(a, copy=True, dtype=complex)
if n is None:
n = (a.shape[axis] - 1) * 2
unitary = _unitary(norm)
output = _raw_fft(a, n, axis, fftpack.rffti, fftpack.rfftb,
_real_fft_cache)
return output * (1 / (sqrt(n) if unitary else n))
def hfft(a, n=None, axis=-1, norm=None):
"""
Compute the FFT of a signal which has Hermitian symmetry (real spectrum).
Parameters
----------
a : array_like
The input array.
n : int, optional
Length of the transformed axis of the output.
For `n` output points, ``n//2+1`` input points are necessary. If the
input is longer than this, it is cropped. If it is shorter than this,
it is padded with zeros. If `n` is not given, it is determined from
the length of the input along the axis specified by `axis`.
axis : int, optional
Axis over which to compute the FFT. If not given, the last
axis is used.
norm : {None, "ortho"}, optional
.. versionadded:: 1.10.0
Normalization mode (see `numpy.fft`). Default is None.
Returns
-------
out : ndarray
The truncated or zero-padded input, transformed along the axis
indicated by `axis`, or the last one if `axis` is not specified.
The length of the transformed axis is `n`, or, if `n` is not given,
``2*(m-1)`` where ``m`` is the length of the transformed axis of the
input. To get an odd number of output points, `n` must be specified.
Raises
------
IndexError
If `axis` is larger than the last axis of `a`.
See also
--------
rfft : Compute the one-dimensional FFT for real input.
ihfft : The inverse of `hfft`.
Notes
-----
`hfft`/`ihfft` are a pair analogous to `rfft`/`irfft`, but for the
opposite case: here the signal has Hermitian symmetry in the time domain
and is real in the frequency domain. So here it's `hfft` for which
you must supply the length of the result if it is to be odd:
``ihfft(hfft(a), len(a)) == a``, within numerical accuracy.
Examples
--------
>>> signal = np.array([1, 2, 3, 4, 3, 2])
>>> np.fft.fft(signal)
array([ 15.+0.j, -4.+0.j, 0.+0.j, -1.-0.j, 0.+0.j, -4.+0.j])
>>> np.fft.hfft(signal[:4]) # Input first half of signal
array([ 15., -4., 0., -1., 0., -4.])
>>> np.fft.hfft(signal, 6) # Input entire signal and truncate
array([ 15., -4., 0., -1., 0., -4.])
>>> signal = np.array([[1, 1.j], [-1.j, 2]])
>>> np.conj(signal.T) - signal # check Hermitian symmetry
array([[ 0.-0.j, 0.+0.j],
[ 0.+0.j, 0.-0.j]])
>>> freq_spectrum = np.fft.hfft(signal)
>>> freq_spectrum
array([[ 1., 1.],
[ 2., -2.]])
"""
# The copy may be required for multithreading.
a = array(a, copy=True, dtype=complex)
if n is None:
n = (a.shape[axis] - 1) * 2
unitary = _unitary(norm)
return irfft(conjugate(a), n, axis) * (sqrt(n) if unitary else n)
def ihfft(a, n=None, axis=-1, norm=None):
"""
Compute the inverse FFT of a signal which has Hermitian symmetry.
Parameters
----------
a : array_like
Input array.
n : int, optional
Length of the inverse FFT.
Number of points along transformation axis in the input to use.
If `n` is smaller than the length of the input, the input is cropped.
If it is larger, the input is padded with zeros. If `n` is not given,
the length of the input along the axis specified by `axis` is used.
axis : int, optional
Axis over which to compute the inverse FFT. If not given, the last
axis is used.
norm : {None, "ortho"}, optional
.. versionadded:: 1.10.0
Normalization mode (see `numpy.fft`). Default is None.
Returns
-------
out : complex ndarray
The truncated or zero-padded input, transformed along the axis
indicated by `axis`, or the last one if `axis` is not specified.
If `n` is even, the length of the transformed axis is ``(n/2)+1``.
If `n` is odd, the length is ``(n+1)/2``.
See also
--------
hfft, irfft
Notes
-----
`hfft`/`ihfft` are a pair analogous to `rfft`/`irfft`, but for the
opposite case: here the signal has Hermitian symmetry in the time domain
and is real in the frequency domain. So here it's `hfft` for which
you must supply the length of the result if it is to be odd:
``ihfft(hfft(a), len(a)) == a``, within numerical accuracy.
Examples
--------
>>> spectrum = np.array([ 15, -4, 0, -1, 0, -4])
>>> np.fft.ifft(spectrum)
array([ 1.+0.j, 2.-0.j, 3.+0.j, 4.+0.j, 3.+0.j, 2.-0.j])
>>> np.fft.ihfft(spectrum)
array([ 1.-0.j, 2.-0.j, 3.-0.j, 4.-0.j])
"""
# The copy may be required for multithreading.
a = array(a, copy=True, dtype=float)
if n is None:
n = a.shape[axis]
unitary = _unitary(norm)
output = conjugate(rfft(a, n, axis))
return output * (1 / (sqrt(n) if unitary else n))
def _cook_nd_args(a, s=None, axes=None, invreal=0):
if s is None:
shapeless = 1
if axes is None:
s = list(a.shape)
else:
s = take(a.shape, axes)
else:
shapeless = 0
s = list(s)
if axes is None:
axes = list(range(-len(s), 0))
if len(s) != len(axes):
raise ValueError("Shape and axes have different lengths.")
if invreal and shapeless:
s[-1] = (a.shape[axes[-1]] - 1) * 2
return s, axes
def _raw_fftnd(a, s=None, axes=None, function=fft, norm=None):
a = asarray(a)
s, axes = _cook_nd_args(a, s, axes)
itl = list(range(len(axes)))
itl.reverse()
for ii in itl:
a = function(a, n=s[ii], axis=axes[ii], norm=norm)
return a
def fftn(a, s=None, axes=None, norm=None):
"""
Compute the N-dimensional discrete Fourier Transform.
This function computes the *N*-dimensional discrete Fourier Transform over
any number of axes in an *M*-dimensional array by means of the Fast Fourier
Transform (FFT).
Parameters
----------
a : array_like
Input array, can be complex.
s : sequence of ints, optional
Shape (length of each transformed axis) of the output
(`s[0]` refers to axis 0, `s[1]` to axis 1, etc.).
This corresponds to `n` for `fft(x, n)`.
Along any axis, if the given shape is smaller than that of the input,
the input is cropped. If it is larger, the input is padded with zeros.
if `s` is not given, the shape of the input along the axes specified
by `axes` is used.
axes : sequence of ints, optional
Axes over which to compute the FFT. If not given, the last ``len(s)``
axes are used, or all axes if `s` is also not specified.
Repeated indices in `axes` means that the transform over that axis is
performed multiple times.
norm : {None, "ortho"}, optional
.. versionadded:: 1.10.0
Normalization mode (see `numpy.fft`). Default is None.
Returns
-------
out : complex ndarray
The truncated or zero-padded input, transformed along the axes
indicated by `axes`, or by a combination of `s` and `a`,
as explained in the parameters section above.
Raises
------
ValueError
If `s` and `axes` have different length.
IndexError
If an element of `axes` is larger than than the number of axes of `a`.
See Also
--------
numpy.fft : Overall view of discrete Fourier transforms, with definitions
and conventions used.
ifftn : The inverse of `fftn`, the inverse *n*-dimensional FFT.
fft : The one-dimensional FFT, with definitions and conventions used.
rfftn : The *n*-dimensional FFT of real input.
fft2 : The two-dimensional FFT.
fftshift : Shifts zero-frequency terms to centre of array
Notes
-----
The output, analogously to `fft`, contains the term for zero frequency in
the low-order corner of all axes, the positive frequency terms in the
first half of all axes, the term for the Nyquist frequency in the middle
of all axes and the negative frequency terms in the second half of all
axes, in order of decreasingly negative frequency.
See `numpy.fft` for details, definitions and conventions used.
Examples
--------
>>> a = np.mgrid[:3, :3, :3][0]
>>> np.fft.fftn(a, axes=(1, 2))
array([[[ 0.+0.j, 0.+0.j, 0.+0.j],
[ 0.+0.j, 0.+0.j, 0.+0.j],
[ 0.+0.j, 0.+0.j, 0.+0.j]],
[[ 9.+0.j, 0.+0.j, 0.+0.j],
[ 0.+0.j, 0.+0.j, 0.+0.j],
[ 0.+0.j, 0.+0.j, 0.+0.j]],
[[ 18.+0.j, 0.+0.j, 0.+0.j],
[ 0.+0.j, 0.+0.j, 0.+0.j],
[ 0.+0.j, 0.+0.j, 0.+0.j]]])
>>> np.fft.fftn(a, (2, 2), axes=(0, 1))
array([[[ 2.+0.j, 2.+0.j, 2.+0.j],
[ 0.+0.j, 0.+0.j, 0.+0.j]],
[[-2.+0.j, -2.+0.j, -2.+0.j],
[ 0.+0.j, 0.+0.j, 0.+0.j]]])
>>> import matplotlib.pyplot as plt
>>> [X, Y] = np.meshgrid(2 * np.pi * np.arange(200) / 12,
... 2 * np.pi * np.arange(200) / 34)
>>> S = np.sin(X) + np.cos(Y) + np.random.uniform(0, 1, X.shape)
>>> FS = np.fft.fftn(S)
>>> plt.imshow(np.log(np.abs(np.fft.fftshift(FS))**2))
<matplotlib.image.AxesImage object at 0x...>
>>> plt.show()
"""
return _raw_fftnd(a, s, axes, fft, norm)
def ifftn(a, s=None, axes=None, norm=None):
"""
Compute the N-dimensional inverse discrete Fourier Transform.
This function computes the inverse of the N-dimensional discrete
Fourier Transform over any number of axes in an M-dimensional array by
means of the Fast Fourier Transform (FFT). In other words,
``ifftn(fftn(a)) == a`` to within numerical accuracy.
For a description of the definitions and conventions used, see `numpy.fft`.
The input, analogously to `ifft`, should be ordered in the same way as is
returned by `fftn`, i.e. it should have the term for zero frequency
in all axes in the low-order corner, the positive frequency terms in the
first half of all axes, the term for the Nyquist frequency in the middle
of all axes and the negative frequency terms in the second half of all
axes, in order of decreasingly negative frequency.
Parameters
----------
a : array_like
Input array, can be complex.
s : sequence of ints, optional
Shape (length of each transformed axis) of the output
(``s[0]`` refers to axis 0, ``s[1]`` to axis 1, etc.).
This corresponds to ``n`` for ``ifft(x, n)``.
Along any axis, if the given shape is smaller than that of the input,
the input is cropped. If it is larger, the input is padded with zeros.
if `s` is not given, the shape of the input along the axes specified
by `axes` is used. See notes for issue on `ifft` zero padding.
axes : sequence of ints, optional
Axes over which to compute the IFFT. If not given, the last ``len(s)``
axes are used, or all axes if `s` is also not specified.
Repeated indices in `axes` means that the inverse transform over that
axis is performed multiple times.
norm : {None, "ortho"}, optional
.. versionadded:: 1.10.0
Normalization mode (see `numpy.fft`). Default is None.
Returns
-------
out : complex ndarray
The truncated or zero-padded input, transformed along the axes
indicated by `axes`, or by a combination of `s` or `a`,
as explained in the parameters section above.
Raises
------
ValueError
If `s` and `axes` have different length.
IndexError
If an element of `axes` is larger than than the number of axes of `a`.
See Also
--------
numpy.fft : Overall view of discrete Fourier transforms, with definitions
and conventions used.
fftn : The forward *n*-dimensional FFT, of which `ifftn` is the inverse.
ifft : The one-dimensional inverse FFT.
ifft2 : The two-dimensional inverse FFT.
ifftshift : Undoes `fftshift`, shifts zero-frequency terms to beginning
of array.
Notes
-----
See `numpy.fft` for definitions and conventions used.
Zero-padding, analogously with `ifft`, is performed by appending zeros to
the input along the specified dimension. Although this is the common
approach, it might lead to surprising results. If another form of zero
padding is desired, it must be performed before `ifftn` is called.
Examples
--------
>>> a = np.eye(4)
>>> np.fft.ifftn(np.fft.fftn(a, axes=(0,)), axes=(1,))
array([[ 1.+0.j, 0.+0.j, 0.+0.j, 0.+0.j],
[ 0.+0.j, 1.+0.j, 0.+0.j, 0.+0.j],
[ 0.+0.j, 0.+0.j, 1.+0.j, 0.+0.j],
[ 0.+0.j, 0.+0.j, 0.+0.j, 1.+0.j]])
Create and plot an image with band-limited frequency content:
>>> import matplotlib.pyplot as plt
>>> n = np.zeros((200,200), dtype=complex)
>>> n[60:80, 20:40] = np.exp(1j*np.random.uniform(0, 2*np.pi, (20, 20)))
>>> im = np.fft.ifftn(n).real
>>> plt.imshow(im)
<matplotlib.image.AxesImage object at 0x...>
>>> plt.show()
"""
return _raw_fftnd(a, s, axes, ifft, norm)
def fft2(a, s=None, axes=(-2, -1), norm=None):
"""
Compute the 2-dimensional discrete Fourier Transform
This function computes the *n*-dimensional discrete Fourier Transform
over any axes in an *M*-dimensional array by means of the
Fast Fourier Transform (FFT). By default, the transform is computed over
the last two axes of the input array, i.e., a 2-dimensional FFT.
Parameters
----------
a : array_like
Input array, can be complex
s : sequence of ints, optional
Shape (length of each transformed axis) of the output
(`s[0]` refers to axis 0, `s[1]` to axis 1, etc.).
This corresponds to `n` for `fft(x, n)`.
Along each axis, if the given shape is smaller than that of the input,
the input is cropped. If it is larger, the input is padded with zeros.
if `s` is not given, the shape of the input along the axes specified
by `axes` is used.
axes : sequence of ints, optional
Axes over which to compute the FFT. If not given, the last two
axes are used. A repeated index in `axes` means the transform over
that axis is performed multiple times. A one-element sequence means
that a one-dimensional FFT is performed.
norm : {None, "ortho"}, optional
.. versionadded:: 1.10.0
Normalization mode (see `numpy.fft`). Default is None.
Returns
-------
out : complex ndarray
The truncated or zero-padded input, transformed along the axes
indicated by `axes`, or the last two axes if `axes` is not given.
Raises
------
ValueError
If `s` and `axes` have different length, or `axes` not given and
``len(s) != 2``.
IndexError
If an element of `axes` is larger than than the number of axes of `a`.
See Also
--------
numpy.fft : Overall view of discrete Fourier transforms, with definitions
and conventions used.
ifft2 : The inverse two-dimensional FFT.
fft : The one-dimensional FFT.
fftn : The *n*-dimensional FFT.
fftshift : Shifts zero-frequency terms to the center of the array.
For two-dimensional input, swaps first and third quadrants, and second
and fourth quadrants.
Notes
-----
`fft2` is just `fftn` with a different default for `axes`.
The output, analogously to `fft`, contains the term for zero frequency in
the low-order corner of the transformed axes, the positive frequency terms
in the first half of these axes, the term for the Nyquist frequency in the
middle of the axes and the negative frequency terms in the second half of
the axes, in order of decreasingly negative frequency.
See `fftn` for details and a plotting example, and `numpy.fft` for
definitions and conventions used.
Examples
--------
>>> a = np.mgrid[:5, :5][0]
>>> np.fft.fft2(a)
array([[ 50.0 +0.j , 0.0 +0.j , 0.0 +0.j ,
0.0 +0.j , 0.0 +0.j ],
[-12.5+17.20477401j, 0.0 +0.j , 0.0 +0.j ,
0.0 +0.j , 0.0 +0.j ],
[-12.5 +4.0614962j , 0.0 +0.j , 0.0 +0.j ,
0.0 +0.j , 0.0 +0.j ],
[-12.5 -4.0614962j , 0.0 +0.j , 0.0 +0.j ,
0.0 +0.j , 0.0 +0.j ],
[-12.5-17.20477401j, 0.0 +0.j , 0.0 +0.j ,
0.0 +0.j , 0.0 +0.j ]])
"""
return _raw_fftnd(a, s, axes, fft, norm)
def ifft2(a, s=None, axes=(-2, -1), norm=None):
"""
Compute the 2-dimensional inverse discrete Fourier Transform.
This function computes the inverse of the 2-dimensional discrete Fourier
Transform over any number of axes in an M-dimensional array by means of
the Fast Fourier Transform (FFT). In other words, ``ifft2(fft2(a)) == a``
to within numerical accuracy. By default, the inverse transform is
computed over the last two axes of the input array.
The input, analogously to `ifft`, should be ordered in the same way as is
returned by `fft2`, i.e. it should have the term for zero frequency
in the low-order corner of the two axes, the positive frequency terms in
the first half of these axes, the term for the Nyquist frequency in the
middle of the axes and the negative frequency terms in the second half of
both axes, in order of decreasingly negative frequency.
Parameters
----------
a : array_like
Input array, can be complex.
s : sequence of ints, optional
Shape (length of each axis) of the output (``s[0]`` refers to axis 0,
``s[1]`` to axis 1, etc.). This corresponds to `n` for ``ifft(x, n)``.
Along each axis, if the given shape is smaller than that of the input,
the input is cropped. If it is larger, the input is padded with zeros.
if `s` is not given, the shape of the input along the axes specified
by `axes` is used. See notes for issue on `ifft` zero padding.
axes : sequence of ints, optional
Axes over which to compute the FFT. If not given, the last two
axes are used. A repeated index in `axes` means the transform over
that axis is performed multiple times. A one-element sequence means
that a one-dimensional FFT is performed.
norm : {None, "ortho"}, optional
.. versionadded:: 1.10.0
Normalization mode (see `numpy.fft`). Default is None.
Returns
-------
out : complex ndarray
The truncated or zero-padded input, transformed along the axes
indicated by `axes`, or the last two axes if `axes` is not given.
Raises
------
ValueError
If `s` and `axes` have different length, or `axes` not given and
``len(s) != 2``.
IndexError
If an element of `axes` is larger than than the number of axes of `a`.
See Also
--------
numpy.fft : Overall view of discrete Fourier transforms, with definitions
and conventions used.
fft2 : The forward 2-dimensional FFT, of which `ifft2` is the inverse.
ifftn : The inverse of the *n*-dimensional FFT.
fft : The one-dimensional FFT.
ifft : The one-dimensional inverse FFT.
Notes
-----
`ifft2` is just `ifftn` with a different default for `axes`.
See `ifftn` for details and a plotting example, and `numpy.fft` for
definition and conventions used.
Zero-padding, analogously with `ifft`, is performed by appending zeros to
the input along the specified dimension. Although this is the common
approach, it might lead to surprising results. If another form of zero
padding is desired, it must be performed before `ifft2` is called.
Examples
--------
>>> a = 4 * np.eye(4)
>>> np.fft.ifft2(a)
array([[ 1.+0.j, 0.+0.j, 0.+0.j, 0.+0.j],
[ 0.+0.j, 0.+0.j, 0.+0.j, 1.+0.j],
[ 0.+0.j, 0.+0.j, 1.+0.j, 0.+0.j],
[ 0.+0.j, 1.+0.j, 0.+0.j, 0.+0.j]])
"""
return _raw_fftnd(a, s, axes, ifft, norm)
def rfftn(a, s=None, axes=None, norm=None):
"""
Compute the N-dimensional discrete Fourier Transform for real input.
This function computes the N-dimensional discrete Fourier Transform over
any number of axes in an M-dimensional real array by means of the Fast
Fourier Transform (FFT). By default, all axes are transformed, with the
real transform performed over the last axis, while the remaining
transforms are complex.
Parameters
----------
a : array_like
Input array, taken to be real.
s : sequence of ints, optional
Shape (length along each transformed axis) to use from the input.
(``s[0]`` refers to axis 0, ``s[1]`` to axis 1, etc.).
The final element of `s` corresponds to `n` for ``rfft(x, n)``, while
for the remaining axes, it corresponds to `n` for ``fft(x, n)``.
Along any axis, if the given shape is smaller than that of the input,
the input is cropped. If it is larger, the input is padded with zeros.
if `s` is not given, the shape of the input along the axes specified
by `axes` is used.
axes : sequence of ints, optional
Axes over which to compute the FFT. If not given, the last ``len(s)``
axes are used, or all axes if `s` is also not specified.
norm : {None, "ortho"}, optional
.. versionadded:: 1.10.0
Normalization mode (see `numpy.fft`). Default is None.
Returns
-------
out : complex ndarray
The truncated or zero-padded input, transformed along the axes
indicated by `axes`, or by a combination of `s` and `a`,
as explained in the parameters section above.
The length of the last axis transformed will be ``s[-1]//2+1``,
while the remaining transformed axes will have lengths according to
`s`, or unchanged from the input.
Raises
------
ValueError
If `s` and `axes` have different length.
IndexError
If an element of `axes` is larger than than the number of axes of `a`.
See Also
--------
irfftn : The inverse of `rfftn`, i.e. the inverse of the n-dimensional FFT
of real input.
fft : The one-dimensional FFT, with definitions and conventions used.
rfft : The one-dimensional FFT of real input.
fftn : The n-dimensional FFT.
rfft2 : The two-dimensional FFT of real input.
Notes
-----
The transform for real input is performed over the last transformation
axis, as by `rfft`, then the transform over the remaining axes is
performed as by `fftn`. The order of the output is as for `rfft` for the
final transformation axis, and as for `fftn` for the remaining
transformation axes.
See `fft` for details, definitions and conventions used.
Examples
--------
>>> a = np.ones((2, 2, 2))
>>> np.fft.rfftn(a)
array([[[ 8.+0.j, 0.+0.j],
[ 0.+0.j, 0.+0.j]],
[[ 0.+0.j, 0.+0.j],
[ 0.+0.j, 0.+0.j]]])
>>> np.fft.rfftn(a, axes=(2, 0))
array([[[ 4.+0.j, 0.+0.j],
[ 4.+0.j, 0.+0.j]],
[[ 0.+0.j, 0.+0.j],
[ 0.+0.j, 0.+0.j]]])
"""
# The copy may be required for multithreading.
a = array(a, copy=True, dtype=float)
s, axes = _cook_nd_args(a, s, axes)
a = rfft(a, s[-1], axes[-1], norm)
for ii in range(len(axes)-1):
a = fft(a, s[ii], axes[ii], norm)
return a
def rfft2(a, s=None, axes=(-2, -1), norm=None):
"""
Compute the 2-dimensional FFT of a real array.
Parameters
----------
a : array
Input array, taken to be real.
s : sequence of ints, optional
Shape of the FFT.
axes : sequence of ints, optional
Axes over which to compute the FFT.
norm : {None, "ortho"}, optional
.. versionadded:: 1.10.0
Normalization mode (see `numpy.fft`). Default is None.
Returns
-------
out : ndarray
The result of the real 2-D FFT.
See Also
--------
rfftn : Compute the N-dimensional discrete Fourier Transform for real
input.
Notes
-----
This is really just `rfftn` with different default behavior.
For more details see `rfftn`.
"""
return rfftn(a, s, axes, norm)
def irfftn(a, s=None, axes=None, norm=None):
"""
Compute the inverse of the N-dimensional FFT of real input.
This function computes the inverse of the N-dimensional discrete
Fourier Transform for real input over any number of axes in an
M-dimensional array by means of the Fast Fourier Transform (FFT). In
other words, ``irfftn(rfftn(a), a.shape) == a`` to within numerical
accuracy. (The ``a.shape`` is necessary like ``len(a)`` is for `irfft`,
and for the same reason.)
The input should be ordered in the same way as is returned by `rfftn`,
i.e. as for `irfft` for the final transformation axis, and as for `ifftn`
along all the other axes.
Parameters
----------
a : array_like
Input array.
s : sequence of ints, optional
Shape (length of each transformed axis) of the output
(``s[0]`` refers to axis 0, ``s[1]`` to axis 1, etc.). `s` is also the
number of input points used along this axis, except for the last axis,
where ``s[-1]//2+1`` points of the input are used.
Along any axis, if the shape indicated by `s` is smaller than that of
the input, the input is cropped. If it is larger, the input is padded
with zeros. If `s` is not given, the shape of the input along the
axes specified by `axes` is used.
axes : sequence of ints, optional
Axes over which to compute the inverse FFT. If not given, the last
`len(s)` axes are used, or all axes if `s` is also not specified.
Repeated indices in `axes` means that the inverse transform over that
axis is performed multiple times.
norm : {None, "ortho"}, optional
.. versionadded:: 1.10.0
Normalization mode (see `numpy.fft`). Default is None.
Returns
-------
out : ndarray
The truncated or zero-padded input, transformed along the axes
indicated by `axes`, or by a combination of `s` or `a`,
as explained in the parameters section above.
The length of each transformed axis is as given by the corresponding
element of `s`, or the length of the input in every axis except for the
last one if `s` is not given. In the final transformed axis the length
of the output when `s` is not given is ``2*(m-1)`` where ``m`` is the
length of the final transformed axis of the input. To get an odd
number of output points in the final axis, `s` must be specified.
Raises
------
ValueError
If `s` and `axes` have different length.
IndexError
If an element of `axes` is larger than than the number of axes of `a`.
See Also
--------
rfftn : The forward n-dimensional FFT of real input,
of which `ifftn` is the inverse.
fft : The one-dimensional FFT, with definitions and conventions used.
irfft : The inverse of the one-dimensional FFT of real input.
irfft2 : The inverse of the two-dimensional FFT of real input.
Notes
-----
See `fft` for definitions and conventions used.
See `rfft` for definitions and conventions used for real input.
Examples
--------
>>> a = np.zeros((3, 2, 2))
>>> a[0, 0, 0] = 3 * 2 * 2
>>> np.fft.irfftn(a)
array([[[ 1., 1.],
[ 1., 1.]],
[[ 1., 1.],
[ 1., 1.]],
[[ 1., 1.],
[ 1., 1.]]])
"""
# The copy may be required for multithreading.
a = array(a, copy=True, dtype=complex)
s, axes = _cook_nd_args(a, s, axes, invreal=1)
for ii in range(len(axes)-1):
a = ifft(a, s[ii], axes[ii], norm)
a = irfft(a, s[-1], axes[-1], norm)
return a
def irfft2(a, s=None, axes=(-2, -1), norm=None):
"""
Compute the 2-dimensional inverse FFT of a real array.
Parameters
----------
a : array_like
The input array
s : sequence of ints, optional
Shape of the inverse FFT.
axes : sequence of ints, optional
The axes over which to compute the inverse fft.
Default is the last two axes.
norm : {None, "ortho"}, optional
.. versionadded:: 1.10.0
Normalization mode (see `numpy.fft`). Default is None.
Returns
-------
out : ndarray
The result of the inverse real 2-D FFT.
See Also
--------
irfftn : Compute the inverse of the N-dimensional FFT of real input.
Notes
-----
This is really `irfftn` with different defaults.
For more details see `irfftn`.
"""
return irfftn(a, s, axes, norm)
| LumPenPacK/NetworkExtractionFromImages | win_build/nefi2_win_amd64_msvc_2015/site-packages/numpy/fft/fftpack.py | Python | bsd-2-clause | 45,497 |
#!/usr/bin/env python
"""PyQt4 port of the tools/regexp example from Qt v4.x"""
from PySide import QtCore, QtGui
class RegExpDialog(QtGui.QDialog):
MaxCaptures = 6
def __init__(self, parent=None):
super(RegExpDialog, self).__init__(parent)
self.patternComboBox = QtGui.QComboBox()
self.patternComboBox.setEditable(True)
self.patternComboBox.setSizePolicy(QtGui.QSizePolicy.Expanding,
QtGui.QSizePolicy.Preferred)
patternLabel = QtGui.QLabel("&Pattern:")
patternLabel.setBuddy(self.patternComboBox)
self.escapedPatternLineEdit = QtGui.QLineEdit()
self.escapedPatternLineEdit.setReadOnly(True)
palette = self.escapedPatternLineEdit.palette()
palette.setBrush(QtGui.QPalette.Base,
palette.brush(QtGui.QPalette.Disabled, QtGui.QPalette.Base))
self.escapedPatternLineEdit.setPalette(palette)
escapedPatternLabel = QtGui.QLabel("&Escaped Pattern:")
escapedPatternLabel.setBuddy(self.escapedPatternLineEdit)
self.syntaxComboBox = QtGui.QComboBox()
self.syntaxComboBox.addItem("Regular expression v1",
QtCore.QRegExp.RegExp)
self.syntaxComboBox.addItem("Regular expression v2",
QtCore.QRegExp.RegExp2)
self.syntaxComboBox.addItem("Wildcard", QtCore.QRegExp.Wildcard)
self.syntaxComboBox.addItem("Fixed string",
QtCore.QRegExp.FixedString)
syntaxLabel = QtGui.QLabel("&Pattern Syntax:")
syntaxLabel.setBuddy(self.syntaxComboBox)
self.textComboBox = QtGui.QComboBox()
self.textComboBox.setEditable(True)
self.textComboBox.setSizePolicy(QtGui.QSizePolicy.Expanding,
QtGui.QSizePolicy.Preferred)
textLabel = QtGui.QLabel("&Text:")
textLabel.setBuddy(self.textComboBox)
self.caseSensitiveCheckBox = QtGui.QCheckBox("Case &Sensitive")
self.caseSensitiveCheckBox.setChecked(True)
self.minimalCheckBox = QtGui.QCheckBox("&Minimal")
indexLabel = QtGui.QLabel("Index of Match:")
self.indexEdit = QtGui.QLineEdit()
self.indexEdit.setReadOnly(True)
matchedLengthLabel = QtGui.QLabel("Matched Length:")
self.matchedLengthEdit = QtGui.QLineEdit()
self.matchedLengthEdit.setReadOnly(True)
self.captureLabels = []
self.captureEdits = []
for i in range(self.MaxCaptures):
self.captureLabels.append(QtGui.QLabel("Capture %d:" % i))
self.captureEdits.append(QtGui.QLineEdit())
self.captureEdits[i].setReadOnly(True)
self.captureLabels[0].setText("Match:")
checkBoxLayout = QtGui.QHBoxLayout()
checkBoxLayout.addWidget(self.caseSensitiveCheckBox)
checkBoxLayout.addWidget(self.minimalCheckBox)
checkBoxLayout.addStretch(1)
mainLayout = QtGui.QGridLayout()
mainLayout.addWidget(patternLabel, 0, 0)
mainLayout.addWidget(self.patternComboBox, 0, 1)
mainLayout.addWidget(escapedPatternLabel, 1, 0)
mainLayout.addWidget(self.escapedPatternLineEdit, 1, 1)
mainLayout.addWidget(syntaxLabel, 2, 0)
mainLayout.addWidget(self.syntaxComboBox, 2, 1)
mainLayout.addLayout(checkBoxLayout, 3, 0, 1, 2)
mainLayout.addWidget(textLabel, 4, 0)
mainLayout.addWidget(self.textComboBox, 4, 1)
mainLayout.addWidget(indexLabel, 5, 0)
mainLayout.addWidget(self.indexEdit, 5, 1)
mainLayout.addWidget(matchedLengthLabel, 6, 0)
mainLayout.addWidget(self.matchedLengthEdit, 6, 1)
for i in range(self.MaxCaptures):
mainLayout.addWidget(self.captureLabels[i], 7 + i, 0)
mainLayout.addWidget(self.captureEdits[i], 7 + i, 1)
self.setLayout(mainLayout)
self.patternComboBox.editTextChanged.connect(self.refresh)
self.textComboBox.editTextChanged.connect(self.refresh)
self.caseSensitiveCheckBox.toggled.connect(self.refresh)
self.minimalCheckBox.toggled.connect(self.refresh)
self.syntaxComboBox.currentIndexChanged.connect(self.refresh)
self.patternComboBox.addItem("[A-Za-z_]+([A-Za-z_0-9]*)")
self.textComboBox.addItem("(10 + delta4)* 32")
self.setWindowTitle("RegExp")
self.setFixedHeight(self.sizeHint().height())
self.refresh()
def refresh(self):
self.setUpdatesEnabled(False)
pattern = self.patternComboBox.currentText()
text = self.textComboBox.currentText()
escaped = str(pattern)
escaped.replace('\\', '\\\\')
escaped.replace('"', '\\"')
self.escapedPatternLineEdit.setText('"' + escaped + '"')
rx = QtCore.QRegExp(pattern)
cs = QtCore.Qt.CaseInsensitive
if self.caseSensitiveCheckBox.isChecked():
cs = QtCore.Qt.CaseSensitive
rx.setCaseSensitivity(cs)
rx.setMinimal(self.minimalCheckBox.isChecked())
syntax = self.syntaxComboBox.itemData(self.syntaxComboBox.currentIndex())
rx.setPatternSyntax(QtCore.QRegExp.PatternSyntax(syntax))
palette = self.patternComboBox.palette()
if rx.isValid():
palette.setColor(QtGui.QPalette.Text,
self.textComboBox.palette().color(QtGui.QPalette.Text))
else:
palette.setColor(QtGui.QPalette.Text, QtCore.Qt.red)
self.patternComboBox.setPalette(palette)
self.indexEdit.setText(str(rx.indexIn(text)))
self.matchedLengthEdit.setText(str(rx.matchedLength()))
for i in range(self.MaxCaptures):
self.captureLabels[i].setEnabled(i <= rx.numCaptures())
self.captureEdits[i].setEnabled(i <= rx.numCaptures())
self.captureEdits[i].setText(rx.cap(i))
self.setUpdatesEnabled(True)
if __name__ == '__main__':
import sys
app = QtGui.QApplication(sys.argv)
dialog = RegExpDialog()
sys.exit(dialog.exec_())
| Southpaw-TACTIC/Team | src/python/Lib/site-packages/PySide/examples/tools/regexp.py | Python | epl-1.0 | 6,001 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from getpass import getpass
from gmusicapi import Webclient
def ask_for_credentials():
"""Make an instance of the api and attempts to login with it.
Return the authenticated api.
"""
# We're not going to upload anything, so the webclient is what we want.
api = Webclient()
logged_in = False
attempts = 0
while not logged_in and attempts < 3:
email = raw_input("Email: ")
password = getpass()
logged_in = api.login(email, password)
attempts += 1
return api
def demonstrate():
"""Demonstrate some api features."""
api = ask_for_credentials()
if not api.is_authenticated():
print "Sorry, those credentials weren't accepted."
return
print "Successfully logged in."
print
# Get all of the users songs.
# library is a big list of dictionaries, each of which contains a single song.
print "Loading library...",
library = api.get_all_songs()
print "done."
print len(library), "tracks detected."
print
# Show some info about a song. There is no guaranteed order;
# this is essentially a random song.
first_song = library[0]
print "The first song I see is '{}' by '{}'.".format(
first_song["name"],
first_song["artist"])
# We're going to create a new playlist and add a song to it.
# Songs are uniquely identified by 'song ids', so let's get the id:
song_id = first_song["id"]
print "I'm going to make a new playlist and add that song to it."
print "I'll delete it when we're finished."
print
playlist_name = raw_input("Enter a name for the playlist: ")
# Like songs, playlists have unique ids.
# Google Music allows more than one playlist of the same name;
# these ids are necessary.
playlist_id = api.create_playlist(playlist_name)
print "Made the playlist."
print
# Now let's add the song to the playlist, using their ids:
api.add_songs_to_playlist(playlist_id, song_id)
print "Added the song to the playlist."
print
# We're all done! The user can now go and see that the playlist is there.
# The web client syncs our changes in real time.
raw_input("You can now check on Google Music that the playlist exists.\n"
"When done, press enter to delete the playlist:")
api.delete_playlist(playlist_id)
print "Deleted the playlist."
# It's good practice to logout when finished.
api.logout()
print "All done!"
if __name__ == '__main__':
demonstrate()
| jimyx17/gmusic | example.py | Python | bsd-3-clause | 2,590 |
import numpy as np
import gdspy
from picwriter import toolkit as tk
import picwriter.components as pc
X_SIZE, Y_SIZE = 15000, 15000
exclusion_region = 2000.0 # region where no devices are to be fabricated
x0, y0 = X_SIZE / 2.0, Y_SIZE / 2.0 # define origin of the die
step = 100.0 # standard spacing between components
top = gdspy.Cell("top")
wgt = pc.WaveguideTemplate(
wg_width=0.45,
clad_width=10.0,
bend_radius=100,
resist="+",
fab="ETCH",
wg_layer=1,
wg_datatype=0,
clad_layer=2,
clad_datatype=0,
)
""" Add a die outline, with exclusion, from gdspy geometries found at
http://gdspy.readthedocs.io/en/latest/"""
top.add(gdspy.Rectangle((0, 0), (X_SIZE, Y_SIZE), layer=6, datatype=0))
top.add(
gdspy.Rectangle(
(0, Y_SIZE - exclusion_region), (X_SIZE, Y_SIZE), layer=7, datatype=0
)
)
top.add(gdspy.Rectangle((0, 0), (X_SIZE, exclusion_region), layer=7, datatype=0))
top.add(gdspy.Rectangle((0, 0), (exclusion_region, Y_SIZE), layer=7, datatype=0))
top.add(
gdspy.Rectangle(
(X_SIZE - exclusion_region, 0), (X_SIZE, Y_SIZE), layer=7, datatype=0
)
)
""" Add some components from the PICwriter library """
spiral_unit = gdspy.Cell("spiral_unit")
sp1 = pc.Spiral(
wgt, 1000.0, 10000, parity=1, port=(500.0 + exclusion_region + 4 * step, y0)
)
tk.add(spiral_unit, sp1)
wg1 = pc.Waveguide(
[sp1.portlist["input"]["port"], (sp1.portlist["input"]["port"][0], 4000.0)], wgt
)
wg2 = pc.Waveguide(
[
sp1.portlist["output"]["port"],
(sp1.portlist["output"]["port"][0], Y_SIZE - 4000.0),
],
wgt,
)
tk.add(spiral_unit, wg1)
tk.add(spiral_unit, wg2)
tp_bot = pc.Taper(wgt, length=100.0, end_width=0.1, **wg1.portlist["output"])
tk.add(spiral_unit, tp_bot)
gc_top = pc.GratingCouplerFocusing(
wgt,
focus_distance=20,
width=20,
length=40,
period=0.7,
dutycycle=0.4,
wavelength=1.55,
sin_theta=np.sin(np.pi * 8 / 180),
**wg2.portlist["output"]
)
tk.add(spiral_unit, gc_top)
for i in range(9):
top.add(gdspy.CellReference(spiral_unit, (i * 1100.0, 0)))
tk.build_mask(top, wgt, final_layer=3, final_datatype=0)
gdspy.LayoutViewer(cells=top)
gdspy.write_gds("mask_template.gds", unit=1.0e-6, precision=1.0e-9)
| DerekK88/PICwriter | docs/source/tutorial3.py | Python | mit | 2,255 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
################################################################################
# imports
from __future__ import unicode_literals
import os
from collections import deque
# matplotlib
import matplotlib.pyplot as plt
from matplotlib import cm
from mpl_toolkits.mplot3d import Axes3D
from matplotlib.collections import PolyCollection
# scipy/numpy
import numpy as np
from scipy.signal import savgol_filter
from scipy.interpolate import UnivariateSpline
# pandas
import pandas as pd
from pandas import DataFrame, Series, Index
from pprint import pprint
################################################################################
__author__ = 'edill'
#### DATA FOLDER DIRECTORY #####################################################
folder = 'C:\\DATA\\New folder\\Data_4_Eric\\SCAN_Xr0'
folder = 'C:\\DATA\\New folder\\Data_4_Eric\\SCAL_alongX'
# todo plot multiple waterfalls on the same figure
#### PLOTTING OPTIONS ##########################################################
x_label = '2-theta (degrees)'
y_label = 'Depth (um)'
z_label = 'Intensity (arb.)'
min_x = None
max_x = None
start_frame = None
end_frame = None
min_z = 0
max_z = None
alpha = 1
smooth = False
smooth_window_length = 91
smooth_poly_order = 7 # must be odd!
space_between_frames = 1 # y-spacing for each line
# color = cm.datad['winter']
# print(color)
color = 'hot_r'
cstart = 0.3
cstop = 0.7
frame_offset = 0
frame_delta = 0.2
print_color_options = True
if print_color_options:
print(list(cm.datad))
#### RUNTIME BEHAVIOR ##########################################################
# init some parameters
norm = cm.colors.Normalize(vmin=0, vmax=1)
files = os.listdir(folder)
# do in-place sorting of files
files.sort()
pprint(files)
data = DataFrame()
smoothed = DataFrame()
# init the defaults
if start_frame is None:
start_frame = 0
if end_frame is None:
end_frame = len(files)
for fname in files[start_frame:end_frame]:
with open(folder + os.sep + fname, 'r') as f:
# data.append(np.asarray([line.split() for line in f.next()]).T)
x, y = np.asarray([[float(val) for val in line.split()] for line in f]).T
val = Series(y, index=x)
data[fname] = Series(y, index=x)
# init the defaults
if min_x is None:
min_x = float(data.index[0])
if max_x is None:
max_x = float(data.index[-1])
# apply x-axis filtering
data = data[data.index > min_x]
data = data[data.index < max_x]
indices = data.index
if smooth:
# smooth the data into a new data frame
for d in data:
# smooth data
vals = data[d].values
y = savgol_filter(vals, smooth_window_length, smooth_poly_order, mode='nearest').tolist()
len(y)
smoothed[d] = Series(y, indices)
to_plot = smoothed
else:
# data.ix[0] = 0
# data.ix[-1] = 0
to_plot = data
to_plot.ix[0] = 0
to_plot.ix[-1] = 0
# to_plot.index.insert(0, 0)
# to_plot.index.insert(len(data.index), 0)
# set the min and max values for z after smoothing
if min_z is None:
min_z = np.min([np.min(data[col_name]) for col_name in data])
else:
for col_name in to_plot:
colvals = to_plot[col_name].values
colvals[colvals < min_z] = min_z
if max_z is None:
max_z = np.max([np.max(data[col_name]) for col_name in data])
else:
for col_name in to_plot:
colvals = to_plot[col_name].values
colvals[colvals > max_z] = max_z
poly_pairs = deque()
# create the color map
rgba = cm.ScalarMappable(cmap=color, norm=norm)
colors = rgba.to_rgba(x=np.linspace(cstart, cstop,
len(to_plot.columns)),
alpha=alpha)
for idx, d in enumerate(to_plot):
vals = to_plot[d]
poly_pairs.append([(x, y) for x, y in zip(indices, to_plot[d].values)])
# create the collection of polygons to plot
poly = PolyCollection(list(poly_pairs), facecolors=colors)
# init the matplotlib figures
fig = plt.figure()
ax3 = Axes3D(fig)
# set the offset of each polygon
start = frame_offset
end = start + frame_delta * len(data.columns)
zs = np.arange(start, end, frame_delta)
# add the polygons to the
ax3.add_collection3d(poly, zs=zs, zdir='y')
ax3.set_xlabel(x_label)
ax3.set_ylabel(y_label)
ax3.set_zlabel(z_label)
ax3.set_xlim3d(min_x, max_x)
ax3.set_ylim3d(start, end)
ax3.set_zlim3d(min_z, max_z)
# show the plot
plt.show() | ericdill/miniature-hipster | miniature-hipster/plotting/waterfall.py | Python | bsd-3-clause | 4,362 |
from django.forms import ModelForm
from django.forms.widgets import NumberInput
from django import forms
from django.db.models import Q
from .models import *
def qs_to_opt_choice(qs):
empty = [('',''),]
empty.extend((obj,obj) for obj in qs)
return empty
class ZugSearchForm(forms.Form):
def __init__(self, *args, **kwargs):
super(ZugSearchForm, self).__init__(*args, **kwargs)
self.fields['gattung'] = forms.MultipleChoiceField(help_text='Mehrfachauswahl möglich',required=False,choices=((obj,obj) for obj in FahrplanZug.objects.values_list('gattung', flat=True).distinct().order_by('gattung')))
self.fields['eintragort'] = forms.MultipleChoiceField(help_text='Sucht alle Orte im Zugfahrplan',choices=((obj,obj) for obj in FahrplanZugEintrag.objects.exclude(Q(ort__startswith='Sbk'))
.values_list('ort', flat=True).distinct().order_by('ort')), required=False)
self.fields['fahrplan'] = forms.MultipleChoiceField(choices=self.fahrplan_choices(), required=False,help_text='Mehrfachauswahl möglich')
self.fields['baureihe'] = forms.MultipleChoiceField(choices=((obj,obj) for obj in FahrzeugVariante.objects.values_list('br', flat=True).distinct().order_by('br')),required=False,help_text='Sucht alle Fahrzeuge in Zugreihung')
self.fields['steuerfahrzeug'] = forms.ChoiceField(choices=qs_to_opt_choice(FahrzeugVariante.objects.values_list('br', flat=True).exclude(fuehrerstand=None).filter().distinct().order_by('br')),required=False,help_text='Sucht erstes Fahrzeug bei Zugeinfahrt')
def fahrplan_choices(self):
return ((obj,obj.replace("Timetables\\Deutschland\\", '')) for obj in Fahrplan.objects.exclude(path__contains='_Docu').values_list('path', flat=True).order_by('path'))
gattung = forms.MultipleChoiceField()
nummer = forms.CharField(required=False)
fahrplan = forms.MultipleChoiceField()
eintragort = forms.MultipleChoiceField()
zuglauf = forms.CharField(help_text='(Teilweise vergleichen)',required=False,widget=forms.TextInput(attrs={'placeholder':'z.b. Berlin'}))
baureihe = forms.MultipleChoiceField()
steuerfahrzeug = forms.ChoiceField()
zugart = forms.MultipleChoiceField(choices=[(0, 'Güterzug'), (1,'Reisezug')],widget=forms.CheckboxSelectMultiple,required=False, initial=(0,1))
dekozug = forms.MultipleChoiceField(choices=[(0, 'Nein'), (1,'Ja')],widget=forms.CheckboxSelectMultiple,required=False, initial=(0,))
anfang = forms.MultipleChoiceField(choices=[(0, 'Unbeweglich'), (1,'in Bewegung')],widget=forms.CheckboxSelectMultiple,required=False, initial=(0,1))
speed_min = forms.IntegerField(widget=forms.NumberInput(attrs={'placeholder':'','addon_before':'≥','addon_after':'km/h'}),
label='Maximalgeschwindigkeit',required=False)
speed_max = forms.IntegerField(widget=forms.NumberInput(attrs={'placeholder':'','addon_before':'≤','addon_after':'km/h'}),
label='',required=False)
speed_min = forms.IntegerField(widget=forms.NumberInput(attrs={'placeholder':'','addon_before':'≥','addon_after':'km/h'}),
label='Maximalgeschwindigkeit',required=False)
speed_max = forms.IntegerField(widget=forms.NumberInput(attrs={'placeholder':'','addon_before':'≤','addon_after':'km/h'}),
label='',required=False)
#fahrzeit_min = forms.IntegerField(widget=forms.NumberInput(attrs={'placeholder':'','addon_before':'≥','addon_after':'minuten'}),
# label='Farhzeit',required=False)
#fahrzeit_max = forms.IntegerField(widget=forms.NumberInput(attrs={'placeholder':'','addon_before':'≤','addon_after':'minuten'}),
# label='',required=False)
antrieb = forms.MultipleChoiceField(required=False, choices=FahrzeugVariante.ANTRIEB_CHOICES, help_text='Mehrfachauswahl möglich')
neigezug = forms.MultipleChoiceField(choices=[(0, 'Nein'), (1,'Ja')],widget=forms.CheckboxSelectMultiple,initial=(0,1),required=False)
search = forms.CharField(widget=forms.HiddenInput(),required=False)
time_window = forms.IntegerField(widget=forms.HiddenInput(), min_value=0, max_value=1440, required=False)
fis = forms.MultipleChoiceField(label='FIS Ansagen',choices=[(0, 'Nein'), (1,'Ja')],widget=forms.CheckboxSelectMultiple,initial=(0,1),required=False)
class FuehrerstandSearchForm(forms.Form):
afb = forms.MultipleChoiceField(choices=[(0, 'ohne AFB'), (1,'mit AFB')],widget=forms.CheckboxSelectMultiple,initial=(0,),required=False)
zugsicherung = forms.MultipleChoiceField(required=False,choices=Fuehrerstand.ZUGSICHERUNG_CHOICES,help_text='Mehrfachauswahl möglich')
sifa = forms.MultipleChoiceField(required=False,choices=Fuehrerstand.SIFA_CHOICES,help_text='Mehrfachauswahl möglich')
tuersystem = forms.MultipleChoiceField(required=False,choices=Fuehrerstand.TUER_CHOICES,help_text='Mehrfachauswahl möglich') #TB5,TB0,SAT.SST,TAV,UICWTB
schleuderschutz = forms.MultipleChoiceField(required=False,choices=Fuehrerstand.SCHLEUDERSCHUTZ_CHOICES,help_text='Mehrfachauswahl möglich')
notbremse_system = forms.MultipleChoiceField(required=False,choices=Fuehrerstand.NOTBREMS_CHOICES,help_text='Mehrfachauswahl möglich')
class FahrzeugSearchForm(forms.Form):
def __init__(self, *args, **kwargs):
super(FahrzeugSearchForm, self).__init__(*args, **kwargs)
self.fields['baureihe'] = forms.ChoiceField(choices=qs_to_opt_choice(FahrzeugVariante.objects.values_list('br', flat=True).distinct().order_by('br')),required=False)
baureihe = forms.ChoiceField()
deko = forms.MultipleChoiceField(choices=[(0, 'Nein'), (1,'Ja')],widget=forms.CheckboxSelectMultiple,initial=(0,),required=False)
beschreibung = forms.CharField(help_text='(Teilweise vergleichen)',required=False)
farbgebung = forms.CharField(help_text='(Teilweise vergleichen)',required=False)
einsatz = forms.DateField(required=False,widget=forms.DateInput(attrs={'placeholder':'dd.mm.yy'}))
masse_min = forms.IntegerField(widget=forms.NumberInput(attrs={'placeholder':'','addon_before':'≥','addon_after':'kg'}),
label='Masse',required=False)
masse_max = forms.IntegerField(widget=forms.NumberInput(attrs={'placeholder':'','addon_before':'≤','addon_after':'kg'}),
label='',required=False)
laenge_min = forms.IntegerField(widget=forms.NumberInput(attrs={'placeholder':'','addon_before':'≥','addon_after':'m'}),
label='Länge',required=False)
laenge_max = forms.IntegerField(widget=forms.NumberInput(attrs={'placeholder':'','addon_before':'≤','addon_after':'m'}),
label='',required=False)
speed_min = forms.IntegerField(widget=forms.NumberInput(attrs={'placeholder':'','addon_before':'≥','addon_after':'km/h'}),
label='Maximalgeschwindigkeit',required=False)
speed_max = forms.IntegerField(widget=forms.NumberInput(attrs={'placeholder':'','addon_before':'≤','addon_after':'km/h'}),
label='',required=False)
antrieb = forms.MultipleChoiceField(required=False, choices=FahrzeugVariante.ANTRIEB_CHOICES,help_text='Mehrfachauswahl möglich')
bremse = forms.MultipleChoiceField(required=False, choices=FahrzeugVariante.BREMSE_CHOICES,help_text='Mehrfachauswahl möglich')
neigetechnik = forms.MultipleChoiceField(choices=[(0, 'Nein'), (1,'Ja')],widget=forms.CheckboxSelectMultiple, initial=(0,1), required=False)
| jonathanp0/zusidatenbank | zusidatenbank/datenbank/forms.py | Python | agpl-3.0 | 7,729 |
###############################################################################
#
# Tests for XlsxWriter.
#
# Copyright (c), 2013, John McNamara, [email protected]
#
import unittest
import os
from ...workbook import Workbook
from ..helperfunctions import _compare_xlsx_files
class TestCompareXLSXFiles(unittest.TestCase):
"""
Test file created by XlsxWriter against a file created by Excel.
"""
def setUp(self):
self.maxDiff = None
filename = 'image01.xlsx'
test_dir = 'xlsxwriter/test/comparison/'
self.image_dir = test_dir + 'images/'
self.got_filename = test_dir + '_test_' + filename
self.exp_filename = test_dir + 'xlsx_files/' + filename
self.ignore_files = []
self.ignore_elements = {}
def test_create_file(self):
"""Test the creation of a simple XlsxWriter file with image(s)."""
filename = self.got_filename
####################################################
workbook = Workbook(filename)
worksheet = workbook.add_worksheet()
worksheet.insert_image('E9', self.image_dir + 'red.png')
workbook.close()
####################################################
got, exp = _compare_xlsx_files(self.got_filename,
self.exp_filename,
self.ignore_files,
self.ignore_elements)
self.assertEqual(got, exp)
def test_create_file_in_memory(self):
"""Test the creation of a simple XlsxWriter file with image(s)."""
filename = self.got_filename
####################################################
workbook = Workbook(filename, {'in_memory': True})
worksheet = workbook.add_worksheet()
worksheet.insert_image('E9', self.image_dir + 'red.png')
workbook.close()
####################################################
got, exp = _compare_xlsx_files(self.got_filename,
self.exp_filename,
self.ignore_files,
self.ignore_elements)
self.assertEqual(got, exp)
def tearDown(self):
# Cleanup.
if os.path.exists(self.got_filename):
os.remove(self.got_filename)
if __name__ == '__main__':
unittest.main()
| ivmech/iviny-scope | lib/xlsxwriter/test/comparison/test_image01.py | Python | gpl-3.0 | 2,429 |
# coding: utf-8
# Copyright (C) 1994-2016 Altair Engineering, Inc.
# For more information, contact Altair at www.altair.com.
#
# This file is part of the PBS Professional ("PBS Pro") software.
#
# Open Source License Information:
#
# PBS Pro is free software. You can redistribute it and/or modify it under the
# terms of the GNU Affero General Public License as published by the Free
# Software Foundation, either version 3 of the License, or (at your option) any
# later version.
#
# PBS Pro is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A
# PARTICULAR PURPOSE. See the GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License along
# with this program. If not, see <http://www.gnu.org/licenses/>.
#
# Commercial License Information:
#
# The PBS Pro software is licensed under the terms of the GNU Affero General
# Public License agreement ("AGPL"), except where a separate commercial license
# agreement for PBS Pro version 14 or later has been executed in writing with Altair.
#
# Altair’s dual-license business model allows companies, individuals, and
# organizations to create proprietary derivative works of PBS Pro and distribute
# them - whether embedded or bundled with other software - under a commercial
# license agreement.
#
# Use of Altair’s trademarks, including but not limited to "PBS™",
# "PBS Professional®", and "PBS Pro™" and Altair’s logos is subject to Altair's
# trademark licensing policies.
from setuptools import setup, find_packages
import os
os.chdir(os.path.dirname(os.path.abspath(os.path.abspath(__file__))))
def get_reqs():
install_requires = open('requirements.txt').readlines()
return [r.strip() for r in install_requires]
def get_scripts():
return ['bin/%s' % (x) for x in os.listdir('bin')]
setup(
name='PbsTestLab',
version='1.0.0',
author='Vincent Matossian',
author_email='[email protected]',
packages=find_packages(),
scripts=get_scripts(),
url='http://www.pbspro.com',
license='AGPLv3 with exceptions',
description='PBS Pro Testing and Benchmarking Framework',
long_description=open('README.txt').read(),
install_requires=get_reqs(),
keywords='PbsTestLab ptl pbspro',
zip_safe=False,
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Other Environment',
'Intended Audience :: Developers',
'License :: AGPLv3 with exceptions',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Topic :: Software Development :: Testing',
'Topic :: Software Development :: Quality Assurance',
]
)
| subhasisb/test2 | test/fw/setup.py | Python | agpl-3.0 | 2,871 |
#
# Copyright (c) 2015 Red Hat
# Licensed under The MIT License (MIT)
# http://opensource.org/licenses/MIT
#
from django import template
from django.conf import settings
from django.template.loader import get_template
from pdc import get_version
def pdc_version():
return get_version()
def login_url(redirect=None):
"""Create login url based on settings.
Optionally, append redirection URL.
"""
url = settings.LOGIN_URL
redirect = redirect or settings.LOGIN_REDIRECT_URL
if redirect:
url += '?next=' + redirect
return url
def do_help_popover(parser, token):
nodelist = parser.parse(('endhelppopover',))
parser.delete_first_token()
return HelpPopoverNode(nodelist)
class HelpPopoverNode(template.Node):
"""Create a Bootstrap popover with help.
Use the ``help_button.html`` template to create a button to display this
dialog.
To use it, just wrap the dialog contents with template tags like ::
{% helppopover %}
Contents of the popover...
{% endhelppopover %}
On each page, there can be only one popover created using this tag.
"""
def __init__(self, nodelist):
self.nodelist = nodelist
def render(self, context):
t = get_template('help_popover.html')
output = self.nodelist.render(context)
context['content'] = output
return t.render(context)
register = template.Library()
register.simple_tag(pdc_version)
register.simple_tag(login_url)
register.tag('helppopover', do_help_popover)
| xychu/product-definition-center | pdc/apps/utils/templatetags/pdctags.py | Python | mit | 1,551 |
import os
from setuptools import setup, find_packages
# Utility function to read the README file.
# Used for the long_description. It's nice, because now 1) we have a top level
# README file and 2) it's easier to type in the README file than to put a raw
# string in below ...
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name = "sound4python",
version = "0.1.3",
author = "Peter Rennert, [email protected]",
author_email = "[email protected], [email protected]",
description = ("comfortable playback of wav files"),
packages=find_packages(),
license = read('LICENSE.txt'),
keywords = "wav",
url = "https://github.com/groakat/sound4python",
long_description=read('README.md'),
classifiers=[
"Development Status :: 4 - Beta",
"Topic :: Utilities",
],
) | groakat/sound4python | setup.py | Python | mit | 879 |
from tamproxy import SyncedSketch, Timer
from tamproxy.devices import Motor, Encoder
# Cycles a motor back and forth between -255 and 255 PWM every ~5 seconds
HUGS_MOTOR_CONTROLLER_DIRECTION = 8
HUGS_MOTOR_CONTROLLER_PWM = 9
HUGS_MOTOR_ENCODER_YELLOW = 31
HUGS_MOTOR_ENCODER_WHITE = 32
# The limit point at which the motor is considered stalled.
INTAKE_ENCODER_LIMIT = 150
# The speed of the intake motors.
INTAKE_POWER = 120
class HugTest(SyncedSketch):
def setup(self):
# Motor object representing the intake mechanism motors.
self.intakeMotor = Motor(self.tamp, HUGS_MOTOR_CONTROLLER_DIRECTION, HUGS_MOTOR_CONTROLLER_PWM)
# Encoder object for the intake motor.
self.intakeEncoder = Encoder(self.tamp, HUGS_MOTOR_ENCODER_YELLOW, HUGS_MOTOR_ENCODER_WHITE)
# Timer object to moderate checking for intake errors.
self.intakeTimer = Timer()
# Are the intake motors going forward? True if so, False if reversing.
self.intakeDirection = False
# Start the intake motor.
self.intakeMotor.write(self.intakeDirection, INTAKE_POWER)
def loop(self):
self.checkForIntakeErrors()
def checkForIntakeErrors(self, checkTime = 1000, reverseTime = 3000):
if self.intakeDirection: # We are moving forward.
if self.intakeTimer.millis() > checkTime:
self.intakeTimer.reset()
if self.intakeEncoder.val < INTAKE_ENCODER_LIMIT: # if we're stalled
self.intakeDirection = True
self.intakeMotor.write(self.intakeDirection, INTAKE_POWER)
else: # if we're not stalled
self.intakeEncoder.write(0)
else: # We are reversing the motors.
if self.intakeTimer.millis() > reverseTime:
self.intakeTimer.reset()
self.intakeDirection = False
self.intakeMotor.write(self.intakeDirection, INTAKE_POWER)
self.intakeEncoder.write(0)
self.intakeMotor.write(self.intakeDirection, INTAKE_POWER)
if __name__ == "__main__":
sketch = HugTest(1, -0.00001, 100)
sketch.run() | pravinas/et-maslab-2016 | sandbox/test_hugs.py | Python | mit | 2,236 |
# Copyright (c) 2018 Charles University, Faculty of Arts,
# Institute of the Czech National Corpus
# Copyright (c) 2018 Tomas Machalek <[email protected]>
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; version 2
# dated June, 1991.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
import re
from functools import wraps
from plugins.rdbms_corparch.registry import Attribute, PosAttribute, Struct, SimpleAttr, RegistryConf
DEBUG = 0
class RegistrySyntaxError(Exception):
pass
def infer_encoding(file_path):
with open(file_path) as fr:
for line in fr:
if 'ENCODING' in line:
if 'utf8' in line.lower() or 'utf-8' in line.lower():
return 'utf-8'
elif 'iso' in line.lower() and '8859-2' in line:
return 'iso-8859-2'
break
return 'utf-8'
def watchable(f):
@wraps(f)
def fn(slf, token, obj):
if DEBUG:
print(u'fn: {0}, tok: {1}, curr: {2}'.format(f.__name__, token if len(token) > 0 else '',
obj.__class__.__name__ if obj else None))
return f(slf, token, obj)
return fn
class Token(object):
def __init__(self, type, value=None):
self._type = type
self._value = value
class Tokenizer(object):
def __init__(self, infile, encoding):
self._fr = infile
self._encoding = encoding
def __call__(self):
ans = []
for line in self._fr:
line = line.decode(self._encoding)
items = re.split(r'\s+', line)
line_ans = []
is_q = False
for item in items:
if item == '':
continue
if item[0] == '"' and item[-1] == '"' and len(item) > 1:
line_ans.append(item[1:-1])
elif item[0] == '"' and not is_q:
line_ans.append([])
is_q = True
v = item[1:]
line_ans[-1].append(v)
elif item[-1] == '"':
is_q = False
v = item[:-1]
if type(line_ans[-1]) is list and line_ans[-1][0] == '"' and v == '"':
line_ans[-1].append(' ')
else:
line_ans[-1].append(v)
else:
if is_q:
line_ans[-1].append(item)
else:
line_ans.append(item)
tmp = [u' '.join(v) if type(v) is list else v for v in line_ans] + ['$']
if len(tmp) > 0:
ans.append(tmp)
return [v for subl in ans for v in subl]
class Parser(object):
def __init__(self, corpus_id, variant, tokens, backend):
self._tokens = tokens
self._items = RegistryConf(corpus_id, variant, backend)
self._posattr_idx = 0
@staticmethod
def is_key(s):
return re.match(r'[A-Z]+', s)
@staticmethod
def is_value(s):
return s != '{' and s != '}' and s != '#'
@watchable
def state_0(self, token, obj):
if token == '$':
return self.state_0, obj
elif token.startswith('#'):
return self.state_3, obj
elif self.is_key(token):
if obj:
self._items.add_item(obj)
if token == 'ATTRIBUTE':
attr = PosAttribute(position=self._posattr_idx, name=token)
self._posattr_idx += 1
return self.state_1, attr
elif token == 'STRUCTURE':
return self.state_1, Struct(token)
else:
return self.state_1, SimpleAttr(token)
else:
raise RegistrySyntaxError(u'in state 0 cannot process: {0}'.format(token))
@watchable
def state_1(self, token, obj):
if self.is_value(token):
if isinstance(obj, SimpleAttr):
obj.value = token
else:
obj.name = token
return self.state_2, obj
else:
raise RegistrySyntaxError(u'in state 1 cannot process: {0}'.format(token))
@watchable
def state_2(self, token, obj):
if token == '$':
return self.state_0, obj
elif token == '{':
return self.state_4, obj
@watchable
def state_3(self, token, obj):
if token == '$':
return self.state_0, obj
return self.state_3, obj
def state_3b(self, token, obj):
if token == '$':
return self.state_4, obj
return self.state_3b, obj
def state_3c(self, token, obj):
if token == '$':
return self.state_7, obj
return self.state_3c, obj
@watchable
def state_4(self, token, obj):
if token == '}':
return self.state_0, obj
elif self.is_key(token):
if token == 'ATTRIBUTE':
obj.new_item(Attribute())
else:
obj.new_item(SimpleAttr(token))
return self.state_5, obj
elif token == '$':
return self.state_4, obj
elif token.startswith('#'):
return self.state_3b, obj
@watchable
def state_5(self, token, obj):
if self.is_value(token):
if isinstance(obj.last_item, Attribute):
obj.last_item.name = token
else:
obj.last_item.value = token
return self.state_6, obj
@watchable
def state_6(self, token, obj):
if token == '$':
return self.state_4, obj
elif token == '{':
return self.state_7, obj
@watchable
def state_7(self, token, obj):
if self.is_key(token):
obj.last_item.new_item(SimpleAttr(token))
return self.state_8, obj
elif token == '$':
return self.state_7, obj
elif token == '}':
return self.state_4, obj
elif token.startswith('#'):
return self.state_3c, obj
@watchable
def state_8(self, token, obj):
if self.is_value(token):
obj.last_item.last_item.value = token
return self.state_9, obj
@watchable
def state_9(self, token, obj):
if token == '$':
return self.state_7, obj
def __call__(self):
i = 0
fn = self.state_0
obj = None
while fn is not None and i < len(self._tokens):
fn, obj = fn(self._tokens[i], obj)
i += 1
if obj:
self._items.add_item(obj)
return self._items
| tomachalek/kontext | lib/plugins/rdbms_corparch/registry/parser.py | Python | gpl-2.0 | 7,234 |
"""
Django settings for rational_whimsy project.
Generated by 'django-admin startproject' using Django 1.10.3.
For more information on this file, see
https://docs.djangoproject.com/en/1.10/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.10/ref/settings/
"""
import os
import dj_database_url
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.10/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = os.environ.get('SECRET_KEY', 'potatoes')
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = bool(os.environ.get("DEBUG", "True"))
ALLOWED_HOSTS = os.environ.get('ALLOWED_HOSTS', '').split(' ')
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'blog',
'my_profile',
'rational_whimsy',
'blog_images',
'projects',
'redactor',
'taggit',
'django_extensions',
'scripts',
'rest_framework',
'storages'
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'rational_whimsy.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'rational_whimsy.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.10/ref/settings/#databases
DATABASES = {
'default': {
"ENGINE": "django.db.backends.postgresql_psycopg2",
"NAME": os.environ.get("DB_NAME", 'rationalwhimsy'),
"USER": os.environ.get("DB_USER", ''),
"PASSWORD": os.environ.get("DB_PASS", ''),
"HOST": os.environ.get("DB_HOST", "localhost"),
"TEST": {
"NAME": "rwtest"
}
}
}
DATABASES = {
'default': dj_database_url.config(default=os.environ.get('DATABASE_URL', ''))
}
DATABASES['default']['TEST'] = {'NAME': 'rwtest'}
# Password validation
# https://docs.djangoproject.com/en/1.10/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.10/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
# USE_TZ = True
# Redirect URL for login
LOGIN_REDIRECT_URL = "/"
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.10/howto/static-files/
# STATIC_URL = '/static/'
# STATIC_ROOT = os.path.join(BASE_DIR, "static")
# MEDIA_URL = '/imgs/'
# MEDIA_ROOT = os.path.join(BASE_DIR, "MEDIA_ASSETS")
REDACTOR_OPTIONS = {'lang': 'en'}
REDACTOR_UPLOAD = 'uploads/'
# AWS Configuration
if not DEBUG:
AWS_HEADERS = { # see http://developer.yahoo.com/performance/rules.html#expires
'Expires': 'Thu, 31 Dec 2099 20:00:00 GMT',
'Cache-Control': 'max-age=60',
}
AWS_STORAGE_BUCKET_NAME = 'rationalwhimsy'
AWS_ACCESS_KEY_ID = os.environ.get('AWS_ACCESS_KEY_ID', '')
AWS_SECRET_ACCESS_KEY = os.environ.get('AWS_SECRET_ACCESS_KEY', '')
AWS_S3_CUSTOM_DOMAIN = '{}.s3.amazonaws.com'.format(AWS_STORAGE_BUCKET_NAME)
STATICFILES_LOCATION = 'static'
STATICFILES_STORAGE = 'rational_whimsy.custom_storages.StaticStorage'
STATIC_URL = "https://{}/{}/".format(AWS_S3_CUSTOM_DOMAIN, STATICFILES_LOCATION)
MEDIAFILES_LOCATION = 'media'
DEFAULT_FILE_STORAGE = 'rational_whimsy.custom_storages.MediaStorage'
MEDIA_URL = "https://{}/{}/".format(AWS_S3_CUSTOM_DOMAIN, MEDIAFILES_LOCATION)
else:
STATIC_URL = '/static/'
STATIC_ROOT = os.path.join(BASE_DIR, "static")
MEDIA_URL = '/imgs/'
MEDIA_ROOT = os.path.join(BASE_DIR, "MEDIA_ASSETS")
| nhuntwalker/rational_whimsy | rational_whimsy/rational_whimsy/settings.py | Python | mit | 5,068 |
"""Execute Ansible sanity tests."""
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import abc
import glob
import os
import re
import collections
from ... import types as t
from ...io import (
read_json_file,
)
from ...util import (
ApplicationError,
SubprocessError,
display,
import_plugins,
load_plugins,
parse_to_list_of_dict,
ABC,
ANSIBLE_TEST_CONTROLLER_ROOT,
ANSIBLE_TEST_TARGET_ROOT,
is_binary_file,
read_lines_without_comments,
get_available_python_versions,
find_python,
is_subdir,
paths_to_dirs,
get_ansible_version,
str_to_version,
SUPPORTED_PYTHON_VERSIONS,
CONTROLLER_PYTHON_VERSIONS,
REMOTE_ONLY_PYTHON_VERSIONS,
)
from ...util_common import (
run_command,
intercept_command,
handle_layout_messages,
)
from ...ansible_util import (
ansible_environment,
)
from ...target import (
walk_internal_targets,
walk_sanity_targets,
TestTarget,
)
from ...executor import (
get_changes_filter,
AllTargetsSkipped,
Delegate,
install_command_requirements,
)
from ...config import (
SanityConfig,
)
from ...test import (
TestSuccess,
TestFailure,
TestSkipped,
TestMessage,
calculate_best_confidence,
)
from ...data import (
data_context,
)
from ...content_config import (
get_content_config,
)
COMMAND = 'sanity'
SANITY_ROOT = os.path.join(ANSIBLE_TEST_CONTROLLER_ROOT, 'sanity')
TARGET_SANITY_ROOT = os.path.join(ANSIBLE_TEST_TARGET_ROOT, 'sanity')
def command_sanity(args):
"""
:type args: SanityConfig
"""
handle_layout_messages(data_context().content.sanity_messages)
changes = get_changes_filter(args)
require = args.require + changes
targets = SanityTargets.create(args.include, args.exclude, require)
if not targets.include:
raise AllTargetsSkipped()
if args.delegate:
raise Delegate(require=changes, exclude=args.exclude)
tests = sanity_get_tests()
if args.test:
tests = [target for target in tests if target.name in args.test]
else:
disabled = [target.name for target in tests if not target.enabled and not args.allow_disabled]
tests = [target for target in tests if target.enabled or args.allow_disabled]
if disabled:
display.warning('Skipping tests disabled by default without --allow-disabled: %s' % ', '.join(sorted(disabled)))
if args.skip_test:
tests = [target for target in tests if target.name not in args.skip_test]
total = 0
failed = []
for test in tests:
if args.list_tests:
display.info(test.name)
continue
available_versions = sorted(get_available_python_versions().keys())
if args.python:
# specific version selected
versions = (args.python,)
elif isinstance(test, SanityMultipleVersion):
# try all supported versions for multi-version tests when a specific version has not been selected
versions = test.supported_python_versions
elif not test.supported_python_versions or args.python_version in test.supported_python_versions:
# the test works with any version or the version we're already running
versions = (args.python_version,)
else:
# available versions supported by the test
versions = tuple(sorted(set(available_versions) & set(test.supported_python_versions)))
# use the lowest available version supported by the test or the current version as a fallback (which will be skipped)
versions = versions[:1] or (args.python_version,)
for version in versions:
if isinstance(test, SanityMultipleVersion):
skip_version = version
else:
skip_version = None
options = ''
if test.supported_python_versions and version not in test.supported_python_versions:
# There are two ways this situation can occur:
#
# - A specific Python version was requested with the `--python` option and that version is not supported by the test.
# This means that the test supports only a subset of the controller supported Python versions, and not the one given by the `--python` option.
# Or that a remote-only Python version was specified for a Python based sanity test that is not multi-version.
#
# - No specific Python version was requested and no supported version was found on the system.
# This means that the test supports only a subset of the controller supported Python versions, and not the one used to run ansible-test.
# Or that the Python version used to run ansible-test is not supported by the controller, a condition which will soon not be possible.
#
# Neither of these are affected by the Python versions supported by a collection.
result = SanitySkipped(test.name, skip_version)
result.reason = "Skipping sanity test '%s' on Python %s. Supported Python versions: %s" % (
test.name, version, ', '.join(test.supported_python_versions))
else:
if isinstance(test, SanityCodeSmellTest):
settings = test.load_processor(args)
elif isinstance(test, SanityMultipleVersion):
settings = test.load_processor(args, version)
elif isinstance(test, SanitySingleVersion):
settings = test.load_processor(args)
elif isinstance(test, SanityVersionNeutral):
settings = test.load_processor(args)
else:
raise Exception('Unsupported test type: %s' % type(test))
all_targets = targets.targets
if test.all_targets:
usable_targets = targets.targets
elif test.no_targets:
usable_targets = tuple()
else:
usable_targets = targets.include
all_targets = SanityTargets.filter_and_inject_targets(test, all_targets)
usable_targets = SanityTargets.filter_and_inject_targets(test, usable_targets)
usable_targets = sorted(test.filter_targets_by_version(list(usable_targets), version))
usable_targets = settings.filter_skipped_targets(usable_targets)
sanity_targets = SanityTargets(tuple(all_targets), tuple(usable_targets))
test_needed = bool(usable_targets or test.no_targets)
result = None
if test_needed and not args.python and version not in available_versions:
# Deferred checking of Python availability. Done here since it is now known to be required for running the test.
# Earlier checking could cause a spurious warning to be generated for a collection which does not support the Python version.
# If the `--python` option was used, this warning will be skipped and an error will be reported when running the test instead.
result = SanitySkipped(test.name, skip_version)
result.reason = "Skipping sanity test '%s' on Python %s due to missing interpreter." % (test.name, version)
if not result:
if test.supported_python_versions:
display.info("Running sanity test '%s' with Python %s" % (test.name, version))
else:
display.info("Running sanity test '%s'" % test.name)
if test_needed and not result:
install_command_requirements(args, version, context=test.name, enable_pyyaml_check=True)
if isinstance(test, SanityCodeSmellTest):
result = test.test(args, sanity_targets, version)
elif isinstance(test, SanityMultipleVersion):
result = test.test(args, sanity_targets, version)
options = ' --python %s' % version
elif isinstance(test, SanitySingleVersion):
result = test.test(args, sanity_targets, version)
elif isinstance(test, SanityVersionNeutral):
result = test.test(args, sanity_targets)
else:
raise Exception('Unsupported test type: %s' % type(test))
elif result:
pass
else:
result = SanitySkipped(test.name, skip_version)
result.write(args)
total += 1
if isinstance(result, SanityFailure):
failed.append(result.test + options)
if failed:
message = 'The %d sanity test(s) listed below (out of %d) failed. See error output above for details.\n%s' % (
len(failed), total, '\n'.join(failed))
if args.failure_ok:
display.error(message)
else:
raise ApplicationError(message)
def collect_code_smell_tests(): # type: () -> t.Tuple[SanityFunc, ...]
"""Return a tuple of available code smell sanity tests."""
paths = glob.glob(os.path.join(SANITY_ROOT, 'code-smell', '*.py'))
if data_context().content.is_ansible:
# include Ansible specific code-smell tests which are not configured to be skipped
ansible_code_smell_root = os.path.join(data_context().content.root, 'test', 'sanity', 'code-smell')
skip_tests = read_lines_without_comments(os.path.join(ansible_code_smell_root, 'skip.txt'), remove_blank_lines=True, optional=True)
paths.extend(path for path in glob.glob(os.path.join(ansible_code_smell_root, '*.py')) if os.path.basename(path) not in skip_tests)
paths = sorted(p for p in paths if os.access(p, os.X_OK) and os.path.isfile(p))
tests = tuple(SanityCodeSmellTest(p) for p in paths)
return tests
def sanity_get_tests():
"""
:rtype: tuple[SanityFunc]
"""
return SANITY_TESTS
class SanityIgnoreParser:
"""Parser for the consolidated sanity test ignore file."""
NO_CODE = '_'
def __init__(self, args): # type: (SanityConfig) -> None
if data_context().content.collection:
ansible_version = '%s.%s' % tuple(get_ansible_version().split('.')[:2])
ansible_label = 'Ansible %s' % ansible_version
file_name = 'ignore-%s.txt' % ansible_version
else:
ansible_label = 'Ansible'
file_name = 'ignore.txt'
self.args = args
self.relative_path = os.path.join(data_context().content.sanity_path, file_name)
self.path = os.path.join(data_context().content.root, self.relative_path)
self.ignores = collections.defaultdict(lambda: collections.defaultdict(dict)) # type: t.Dict[str, t.Dict[str, t.Dict[str, int]]]
self.skips = collections.defaultdict(lambda: collections.defaultdict(int)) # type: t.Dict[str, t.Dict[str, int]]
self.parse_errors = [] # type: t.List[t.Tuple[int, int, str]]
self.file_not_found_errors = [] # type: t.List[t.Tuple[int, str]]
lines = read_lines_without_comments(self.path, optional=True)
targets = SanityTargets.get_targets()
paths = set(target.path for target in targets)
tests_by_name = {} # type: t.Dict[str, SanityTest]
versioned_test_names = set() # type: t.Set[str]
unversioned_test_names = {} # type: t.Dict[str, str]
directories = paths_to_dirs(list(paths))
paths_by_test = {} # type: t.Dict[str, t.Set[str]]
display.info('Read %d sanity test ignore line(s) for %s from: %s' % (len(lines), ansible_label, self.relative_path), verbosity=1)
for test in sanity_get_tests():
test_targets = SanityTargets.filter_and_inject_targets(test, targets)
if isinstance(test, SanityMultipleVersion):
versioned_test_names.add(test.name)
for python_version in test.supported_python_versions:
test_name = '%s-%s' % (test.name, python_version)
paths_by_test[test_name] = set(target.path for target in test.filter_targets_by_version(test_targets, python_version))
tests_by_name[test_name] = test
else:
unversioned_test_names.update(dict(('%s-%s' % (test.name, python_version), test.name) for python_version in SUPPORTED_PYTHON_VERSIONS))
paths_by_test[test.name] = set(target.path for target in test.filter_targets_by_version(test_targets, ''))
tests_by_name[test.name] = test
for line_no, line in enumerate(lines, start=1):
if not line:
self.parse_errors.append((line_no, 1, "Line cannot be empty or contain only a comment"))
continue
parts = line.split(' ')
path = parts[0]
codes = parts[1:]
if not path:
self.parse_errors.append((line_no, 1, "Line cannot start with a space"))
continue
if path.endswith(os.path.sep):
if path not in directories:
self.file_not_found_errors.append((line_no, path))
continue
else:
if path not in paths:
self.file_not_found_errors.append((line_no, path))
continue
if not codes:
self.parse_errors.append((line_no, len(path), "Error code required after path"))
continue
code = codes[0]
if not code:
self.parse_errors.append((line_no, len(path) + 1, "Error code after path cannot be empty"))
continue
if len(codes) > 1:
self.parse_errors.append((line_no, len(path) + len(code) + 2, "Error code cannot contain spaces"))
continue
parts = code.split('!')
code = parts[0]
commands = parts[1:]
parts = code.split(':')
test_name = parts[0]
error_codes = parts[1:]
test = tests_by_name.get(test_name)
if not test:
unversioned_name = unversioned_test_names.get(test_name)
if unversioned_name:
self.parse_errors.append((line_no, len(path) + len(unversioned_name) + 2, "Sanity test '%s' cannot use a Python version like '%s'" % (
unversioned_name, test_name)))
elif test_name in versioned_test_names:
self.parse_errors.append((line_no, len(path) + len(test_name) + 1, "Sanity test '%s' requires a Python version like '%s-%s'" % (
test_name, test_name, args.python_version)))
else:
self.parse_errors.append((line_no, len(path) + 2, "Sanity test '%s' does not exist" % test_name))
continue
if path.endswith(os.path.sep) and not test.include_directories:
self.parse_errors.append((line_no, 1, "Sanity test '%s' does not support directory paths" % test_name))
continue
if path not in paths_by_test[test_name] and not test.no_targets:
self.parse_errors.append((line_no, 1, "Sanity test '%s' does not test path '%s'" % (test_name, path)))
continue
if commands and error_codes:
self.parse_errors.append((line_no, len(path) + len(test_name) + 2, "Error code cannot contain both '!' and ':' characters"))
continue
if commands:
command = commands[0]
if len(commands) > 1:
self.parse_errors.append((line_no, len(path) + len(test_name) + len(command) + 3, "Error code cannot contain multiple '!' characters"))
continue
if command == 'skip':
if not test.can_skip:
self.parse_errors.append((line_no, len(path) + len(test_name) + 2, "Sanity test '%s' cannot be skipped" % test_name))
continue
existing_line_no = self.skips.get(test_name, {}).get(path)
if existing_line_no:
self.parse_errors.append((line_no, 1, "Duplicate '%s' skip for path '%s' first found on line %d" % (test_name, path, existing_line_no)))
continue
self.skips[test_name][path] = line_no
continue
self.parse_errors.append((line_no, len(path) + len(test_name) + 2, "Command '!%s' not recognized" % command))
continue
if not test.can_ignore:
self.parse_errors.append((line_no, len(path) + 1, "Sanity test '%s' cannot be ignored" % test_name))
continue
if test.error_code:
if not error_codes:
self.parse_errors.append((line_no, len(path) + len(test_name) + 1, "Sanity test '%s' requires an error code" % test_name))
continue
error_code = error_codes[0]
if len(error_codes) > 1:
self.parse_errors.append((line_no, len(path) + len(test_name) + len(error_code) + 3, "Error code cannot contain multiple ':' characters"))
continue
if error_code in test.optional_error_codes:
self.parse_errors.append((line_no, len(path) + len(test_name) + 3, "Optional error code '%s' cannot be ignored" % (
error_code)))
continue
else:
if error_codes:
self.parse_errors.append((line_no, len(path) + len(test_name) + 2, "Sanity test '%s' does not support error codes" % test_name))
continue
error_code = self.NO_CODE
existing = self.ignores.get(test_name, {}).get(path, {}).get(error_code)
if existing:
if test.error_code:
self.parse_errors.append((line_no, 1, "Duplicate '%s' ignore for error code '%s' for path '%s' first found on line %d" % (
test_name, error_code, path, existing)))
else:
self.parse_errors.append((line_no, 1, "Duplicate '%s' ignore for path '%s' first found on line %d" % (
test_name, path, existing)))
continue
self.ignores[test_name][path][error_code] = line_no
@staticmethod
def load(args): # type: (SanityConfig) -> SanityIgnoreParser
"""Return the current SanityIgnore instance, initializing it if needed."""
try:
return SanityIgnoreParser.instance
except AttributeError:
pass
SanityIgnoreParser.instance = SanityIgnoreParser(args)
return SanityIgnoreParser.instance
class SanityIgnoreProcessor:
"""Processor for sanity test ignores for a single run of one sanity test."""
def __init__(self,
args, # type: SanityConfig
test, # type: SanityTest
python_version, # type: t.Optional[str]
): # type: (...) -> None
name = test.name
code = test.error_code
if python_version:
full_name = '%s-%s' % (name, python_version)
else:
full_name = name
self.args = args
self.test = test
self.code = code
self.parser = SanityIgnoreParser.load(args)
self.ignore_entries = self.parser.ignores.get(full_name, {})
self.skip_entries = self.parser.skips.get(full_name, {})
self.used_line_numbers = set() # type: t.Set[int]
def filter_skipped_targets(self, targets): # type: (t.List[TestTarget]) -> t.List[TestTarget]
"""Return the given targets, with any skipped paths filtered out."""
return sorted(target for target in targets if target.path not in self.skip_entries)
def process_errors(self, errors, paths): # type: (t.List[SanityMessage], t.List[str]) -> t.List[SanityMessage]
"""Return the given errors filtered for ignores and with any settings related errors included."""
errors = self.filter_messages(errors)
errors.extend(self.get_errors(paths))
errors = sorted(set(errors))
return errors
def filter_messages(self, messages): # type: (t.List[SanityMessage]) -> t.List[SanityMessage]
"""Return a filtered list of the given messages using the entries that have been loaded."""
filtered = []
for message in messages:
if message.code in self.test.optional_error_codes and not self.args.enable_optional_errors:
continue
path_entry = self.ignore_entries.get(message.path)
if path_entry:
code = message.code if self.code else SanityIgnoreParser.NO_CODE
line_no = path_entry.get(code)
if line_no:
self.used_line_numbers.add(line_no)
continue
filtered.append(message)
return filtered
def get_errors(self, paths): # type: (t.List[str]) -> t.List[SanityMessage]
"""Return error messages related to issues with the file."""
messages = []
# unused errors
unused = [] # type: t.List[t.Tuple[int, str, str]]
if self.test.no_targets or self.test.all_targets:
# tests which do not accept a target list, or which use all targets, always return all possible errors, so all ignores can be checked
targets = SanityTargets.get_targets()
test_targets = SanityTargets.filter_and_inject_targets(self.test, targets)
paths = [target.path for target in test_targets]
for path in paths:
path_entry = self.ignore_entries.get(path)
if not path_entry:
continue
unused.extend((line_no, path, code) for code, line_no in path_entry.items() if line_no not in self.used_line_numbers)
messages.extend(SanityMessage(
code=self.code,
message="Ignoring '%s' on '%s' is unnecessary" % (code, path) if self.code else "Ignoring '%s' is unnecessary" % path,
path=self.parser.relative_path,
line=line,
column=1,
confidence=calculate_best_confidence(((self.parser.path, line), (path, 0)), self.args.metadata) if self.args.metadata.changes else None,
) for line, path, code in unused)
return messages
class SanitySuccess(TestSuccess):
"""Sanity test success."""
def __init__(self, test, python_version=None):
"""
:type test: str
:type python_version: str
"""
super(SanitySuccess, self).__init__(COMMAND, test, python_version)
class SanitySkipped(TestSkipped):
"""Sanity test skipped."""
def __init__(self, test, python_version=None):
"""
:type test: str
:type python_version: str
"""
super(SanitySkipped, self).__init__(COMMAND, test, python_version)
class SanityFailure(TestFailure):
"""Sanity test failure."""
def __init__(self, test, python_version=None, messages=None, summary=None):
"""
:type test: str
:type python_version: str
:type messages: list[SanityMessage]
:type summary: unicode
"""
super(SanityFailure, self).__init__(COMMAND, test, python_version, messages, summary)
class SanityMessage(TestMessage):
"""Single sanity test message for one file."""
class SanityTargets:
"""Sanity test target information."""
def __init__(self, targets, include): # type: (t.Tuple[TestTarget], t.Tuple[TestTarget]) -> None
self.targets = targets
self.include = include
@staticmethod
def create(include, exclude, require): # type: (t.List[str], t.List[str], t.List[str]) -> SanityTargets
"""Create a SanityTargets instance from the given include, exclude and require lists."""
_targets = SanityTargets.get_targets()
_include = walk_internal_targets(_targets, include, exclude, require)
return SanityTargets(_targets, _include)
@staticmethod
def filter_and_inject_targets(test, targets): # type: (SanityTest, t.Iterable[TestTarget]) -> t.List[TestTarget]
"""Filter and inject targets based on test requirements and the given target list."""
test_targets = list(targets)
if not test.include_symlinks:
# remove all symlinks unless supported by the test
test_targets = [target for target in test_targets if not target.symlink]
if not test.include_directories or not test.include_symlinks:
# exclude symlinked directories unless supported by the test
test_targets = [target for target in test_targets if not target.path.endswith(os.path.sep)]
if test.include_directories:
# include directories containing any of the included files
test_targets += tuple(TestTarget(path, None, None, '') for path in paths_to_dirs([target.path for target in test_targets]))
if not test.include_symlinks:
# remove all directory symlinks unless supported by the test
test_targets = [target for target in test_targets if not target.symlink]
return test_targets
@staticmethod
def get_targets(): # type: () -> t.Tuple[TestTarget, ...]
"""Return a tuple of sanity test targets. Uses a cached version when available."""
try:
return SanityTargets.get_targets.targets
except AttributeError:
SanityTargets.get_targets.targets = tuple(sorted(walk_sanity_targets()))
return SanityTargets.get_targets.targets
class SanityTest(ABC):
"""Sanity test base class."""
__metaclass__ = abc.ABCMeta
ansible_only = False
def __init__(self, name):
self.name = name
self.enabled = True
# Optional error codes represent errors which spontaneously occur without changes to the content under test, such as those based on the current date.
# Because these errors can be unpredictable they behave differently than normal error codes:
# * They are not reported by default. The `--enable-optional-errors` option must be used to display these errors.
# * They cannot be ignored. This is done to maintain the integrity of the ignore system.
self.optional_error_codes = set()
@property
def error_code(self): # type: () -> t.Optional[str]
"""Error code for ansible-test matching the format used by the underlying test program, or None if the program does not use error codes."""
return None
@property
def can_ignore(self): # type: () -> bool
"""True if the test supports ignore entries."""
return True
@property
def can_skip(self): # type: () -> bool
"""True if the test supports skip entries."""
return not self.all_targets and not self.no_targets
@property
def all_targets(self): # type: () -> bool
"""True if test targets will not be filtered using includes, excludes, requires or changes. Mutually exclusive with no_targets."""
return False
@property
def no_targets(self): # type: () -> bool
"""True if the test does not use test targets. Mutually exclusive with all_targets."""
return False
@property
def include_directories(self): # type: () -> bool
"""True if the test targets should include directories."""
return False
@property
def include_symlinks(self): # type: () -> bool
"""True if the test targets should include symlinks."""
return False
@property
def py2_compat(self): # type: () -> bool
"""True if the test only applies to code that runs on Python 2.x."""
return False
@property
def supported_python_versions(self): # type: () -> t.Optional[t.Tuple[str, ...]]
"""A tuple of supported Python versions or None if the test does not depend on specific Python versions."""
return CONTROLLER_PYTHON_VERSIONS
def filter_targets(self, targets): # type: (t.List[TestTarget]) -> t.List[TestTarget] # pylint: disable=unused-argument
"""Return the given list of test targets, filtered to include only those relevant for the test."""
if self.no_targets:
return []
raise NotImplementedError('Sanity test "%s" must implement "filter_targets" or set "no_targets" to True.' % self.name)
def filter_targets_by_version(self, targets, python_version): # type: (t.List[TestTarget], str) -> t.List[TestTarget]
"""Return the given list of test targets, filtered to include only those relevant for the test, taking into account the Python version."""
del python_version # python_version is not used here, but derived classes may make use of it
targets = self.filter_targets(targets)
if self.py2_compat:
# This sanity test is a Python 2.x compatibility test.
content_config = get_content_config()
if content_config.py2_support:
# This collection supports Python 2.x.
# Filter targets to include only those that require support for remote-only Python versions.
targets = self.filter_remote_targets(targets)
else:
# This collection does not support Python 2.x.
# There are no targets to test.
targets = []
return targets
def filter_remote_targets(self, targets): # type: (t.List[TestTarget]) -> t.List[TestTarget]
"""Return a filtered list of the given targets, including only those that require support for remote-only Python versions."""
targets = [target for target in targets if (
is_subdir(target.path, data_context().content.module_path) or
is_subdir(target.path, data_context().content.module_utils_path) or
is_subdir(target.path, data_context().content.unit_module_path) or
is_subdir(target.path, data_context().content.unit_module_utils_path) or
# include modules/module_utils within integration test library directories
re.search('^%s/.*/library/' % re.escape(data_context().content.integration_targets_path), target.path) or
# special handling for content in ansible-core
(data_context().content.is_ansible and (
# temporary solution until ansible-test code is reorganized when the split controller/remote implementation is complete
is_subdir(target.path, 'test/lib/ansible_test/') or
# integration test support modules/module_utils continue to require support for remote-only Python versions
re.search('^test/support/integration/.*/(modules|module_utils)/', target.path)
))
)]
return targets
class SanityCodeSmellTest(SanityTest):
"""Sanity test script."""
def __init__(self, path):
name = os.path.splitext(os.path.basename(path))[0]
config_path = os.path.splitext(path)[0] + '.json'
super(SanityCodeSmellTest, self).__init__(name)
self.path = path
self.config_path = config_path if os.path.exists(config_path) else None
self.config = None
if self.config_path:
self.config = read_json_file(self.config_path)
if self.config:
self.enabled = not self.config.get('disabled')
self.output = self.config.get('output') # type: t.Optional[str]
self.extensions = self.config.get('extensions') # type: t.List[str]
self.prefixes = self.config.get('prefixes') # type: t.List[str]
self.files = self.config.get('files') # type: t.List[str]
self.text = self.config.get('text') # type: t.Optional[bool]
self.ignore_self = self.config.get('ignore_self') # type: bool
self.intercept = self.config.get('intercept') # type: bool
self.minimum_python_version = self.config.get('minimum_python_version') # type: t.Optional[str]
self.__all_targets = self.config.get('all_targets') # type: bool
self.__no_targets = self.config.get('no_targets') # type: bool
self.__include_directories = self.config.get('include_directories') # type: bool
self.__include_symlinks = self.config.get('include_symlinks') # type: bool
self.__py2_compat = self.config.get('py2_compat', False) # type: bool
else:
self.output = None
self.extensions = []
self.prefixes = []
self.files = []
self.text = None # type: t.Optional[bool]
self.ignore_self = False
self.intercept = False
self.minimum_python_version = None # type: t.Optional[str]
self.__all_targets = False
self.__no_targets = True
self.__include_directories = False
self.__include_symlinks = False
self.__py2_compat = False
if self.no_targets:
mutually_exclusive = (
'extensions',
'prefixes',
'files',
'text',
'ignore_self',
'all_targets',
'include_directories',
'include_symlinks',
)
problems = sorted(name for name in mutually_exclusive if getattr(self, name))
if problems:
raise ApplicationError('Sanity test "%s" option "no_targets" is mutually exclusive with options: %s' % (self.name, ', '.join(problems)))
@property
def all_targets(self): # type: () -> bool
"""True if test targets will not be filtered using includes, excludes, requires or changes. Mutually exclusive with no_targets."""
return self.__all_targets
@property
def no_targets(self): # type: () -> bool
"""True if the test does not use test targets. Mutually exclusive with all_targets."""
return self.__no_targets
@property
def include_directories(self): # type: () -> bool
"""True if the test targets should include directories."""
return self.__include_directories
@property
def include_symlinks(self): # type: () -> bool
"""True if the test targets should include symlinks."""
return self.__include_symlinks
@property
def py2_compat(self): # type: () -> bool
"""True if the test only applies to code that runs on Python 2.x."""
return self.__py2_compat
@property
def supported_python_versions(self): # type: () -> t.Optional[t.Tuple[str, ...]]
"""A tuple of supported Python versions or None if the test does not depend on specific Python versions."""
versions = super(SanityCodeSmellTest, self).supported_python_versions
if self.minimum_python_version:
versions = tuple(version for version in versions if str_to_version(version) >= str_to_version(self.minimum_python_version))
return versions
def filter_targets(self, targets): # type: (t.List[TestTarget]) -> t.List[TestTarget]
"""Return the given list of test targets, filtered to include only those relevant for the test."""
if self.no_targets:
return []
if self.text is not None:
if self.text:
targets = [target for target in targets if not is_binary_file(target.path)]
else:
targets = [target for target in targets if is_binary_file(target.path)]
if self.extensions:
targets = [target for target in targets if os.path.splitext(target.path)[1] in self.extensions
or (is_subdir(target.path, 'bin') and '.py' in self.extensions)]
if self.prefixes:
targets = [target for target in targets if any(target.path.startswith(pre) for pre in self.prefixes)]
if self.files:
targets = [target for target in targets if os.path.basename(target.path) in self.files]
if self.ignore_self and data_context().content.is_ansible:
relative_self_path = os.path.relpath(self.path, data_context().content.root)
targets = [target for target in targets if target.path != relative_self_path]
return targets
def test(self, args, targets, python_version):
"""
:type args: SanityConfig
:type targets: SanityTargets
:type python_version: str
:rtype: TestResult
"""
cmd = [find_python(python_version), self.path]
env = ansible_environment(args, color=False)
pattern = None
data = None
settings = self.load_processor(args)
paths = [target.path for target in targets.include]
if self.config:
if self.output == 'path-line-column-message':
pattern = '^(?P<path>[^:]*):(?P<line>[0-9]+):(?P<column>[0-9]+): (?P<message>.*)$'
elif self.output == 'path-message':
pattern = '^(?P<path>[^:]*): (?P<message>.*)$'
else:
pattern = ApplicationError('Unsupported output type: %s' % self.output)
if not self.no_targets:
data = '\n'.join(paths)
if data:
display.info(data, verbosity=4)
try:
if self.intercept:
stdout, stderr = intercept_command(args, cmd, target_name='sanity.%s' % self.name, data=data, env=env, capture=True, disable_coverage=True)
else:
stdout, stderr = run_command(args, cmd, data=data, env=env, capture=True)
status = 0
except SubprocessError as ex:
stdout = ex.stdout
stderr = ex.stderr
status = ex.status
if args.explain:
return SanitySuccess(self.name)
if stdout and not stderr:
if pattern:
matches = parse_to_list_of_dict(pattern, stdout)
messages = [SanityMessage(
message=m['message'],
path=m['path'],
line=int(m.get('line', 0)),
column=int(m.get('column', 0)),
) for m in matches]
messages = settings.process_errors(messages, paths)
if not messages:
return SanitySuccess(self.name)
return SanityFailure(self.name, messages=messages)
if stderr or status:
summary = u'%s' % SubprocessError(cmd=cmd, status=status, stderr=stderr, stdout=stdout)
return SanityFailure(self.name, summary=summary)
messages = settings.process_errors([], paths)
if messages:
return SanityFailure(self.name, messages=messages)
return SanitySuccess(self.name)
def load_processor(self, args): # type: (SanityConfig) -> SanityIgnoreProcessor
"""Load the ignore processor for this sanity test."""
return SanityIgnoreProcessor(args, self, None)
class SanityFunc(SanityTest):
"""Base class for sanity test plugins."""
def __init__(self):
name = self.__class__.__name__
name = re.sub(r'Test$', '', name) # drop Test suffix
name = re.sub(r'(.)([A-Z][a-z]+)', r'\1-\2', name).lower() # use dashes instead of capitalization
super(SanityFunc, self).__init__(name)
class SanityVersionNeutral(SanityFunc):
"""Base class for sanity test plugins which are idependent of the python version being used."""
@abc.abstractmethod
def test(self, args, targets):
"""
:type args: SanityConfig
:type targets: SanityTargets
:rtype: TestResult
"""
def load_processor(self, args): # type: (SanityConfig) -> SanityIgnoreProcessor
"""Load the ignore processor for this sanity test."""
return SanityIgnoreProcessor(args, self, None)
@property
def supported_python_versions(self): # type: () -> t.Optional[t.Tuple[str, ...]]
"""A tuple of supported Python versions or None if the test does not depend on specific Python versions."""
return None
class SanitySingleVersion(SanityFunc):
"""Base class for sanity test plugins which should run on a single python version."""
@abc.abstractmethod
def test(self, args, targets, python_version):
"""
:type args: SanityConfig
:type targets: SanityTargets
:type python_version: str
:rtype: TestResult
"""
def load_processor(self, args): # type: (SanityConfig) -> SanityIgnoreProcessor
"""Load the ignore processor for this sanity test."""
return SanityIgnoreProcessor(args, self, None)
class SanityMultipleVersion(SanityFunc):
"""Base class for sanity test plugins which should run on multiple python versions."""
@abc.abstractmethod
def test(self, args, targets, python_version):
"""
:type args: SanityConfig
:type targets: SanityTargets
:type python_version: str
:rtype: TestResult
"""
def load_processor(self, args, python_version): # type: (SanityConfig, str) -> SanityIgnoreProcessor
"""Load the ignore processor for this sanity test."""
return SanityIgnoreProcessor(args, self, python_version)
@property
def supported_python_versions(self): # type: () -> t.Optional[t.Tuple[str, ...]]
"""A tuple of supported Python versions or None if the test does not depend on specific Python versions."""
return SUPPORTED_PYTHON_VERSIONS
def filter_targets_by_version(self, targets, python_version): # type: (t.List[TestTarget], str) -> t.List[TestTarget]
"""Return the given list of test targets, filtered to include only those relevant for the test, taking into account the Python version."""
if not python_version:
raise Exception('python_version is required to filter multi-version tests')
targets = super(SanityMultipleVersion, self).filter_targets_by_version(targets, python_version)
if python_version in REMOTE_ONLY_PYTHON_VERSIONS:
content_config = get_content_config()
if python_version not in content_config.modules.python_versions:
# when a remote-only python version is not supported there are no paths to test
return []
# when a remote-only python version is supported, tests must be applied only to targets that support remote-only Python versions
targets = self.filter_remote_targets(targets)
return targets
SANITY_TESTS = (
)
def sanity_init():
"""Initialize full sanity test list (includes code-smell scripts determined at runtime)."""
import_plugins('commands/sanity')
sanity_plugins = {} # type: t.Dict[str, t.Type[SanityFunc]]
load_plugins(SanityFunc, sanity_plugins)
sanity_tests = tuple(plugin() for plugin in sanity_plugins.values() if data_context().content.is_ansible or not plugin.ansible_only)
global SANITY_TESTS # pylint: disable=locally-disabled, global-statement
SANITY_TESTS = tuple(sorted(sanity_tests + collect_code_smell_tests(), key=lambda k: k.name))
| thnee/ansible | test/lib/ansible_test/_internal/commands/sanity/__init__.py | Python | gpl-3.0 | 43,765 |
from abc import ABCMeta, abstractmethod, abstractproperty
from ruleset import Ruleset
from device import Device
from propagation_model import PropagationModel
from region import Region
from boundary import Boundary
from data_map import DataMap2D, DataMap3D, DataMap2DWithFixedBoundingBox
from population import PopulationData
from custom_logging import getModuleLogger
import os
import textwrap
from configuration import base_data_directory
def _is_class(obj):
"""Returns True if ``obj`` is a class and False if it is an instance."""
return issubclass(obj.__class__, type)
def _is_object(obj):
"""Returns True if ``obj`` is an instance and False if it is a class."""
return not _is_class(obj)
def _make_string(obj):
def obj_belongs_to(class_object):
return (_is_class(obj) and issubclass(obj, class_object)) or (
_is_object(obj) and isinstance(obj, class_object))
def get_class_name():
if _is_class(obj):
return obj.__name__
else:
return obj.__class__.__name__
if obj_belongs_to(Ruleset) or obj_belongs_to(PropagationModel) or \
obj_belongs_to(Boundary) or obj_belongs_to(DataMap2D):
return get_class_name()
elif obj_belongs_to(Device):
if _is_class(obj):
raise TypeError("Expected an actual Device object.")
else:
if obj.is_portable():
return "Device(portable)"
else:
return "Device(fixed,HAAT=%d)" % obj.get_haat()
elif obj_belongs_to(Region):
return get_class_name()
class Specification(object):
"""
A Specification is the minimum amount of information needed to describe a
set of data. A Specification can be used to create data, fetch data,
and automatically map data.
Specifications are best-effort data caches which are meant to aid in data
generation and organization.
Guiding principles:
* The user is responsible for cache invalidation.
* A best-effort attempt at avoiding naming collisions has been made but
nothing should be considered certain.
* When possible, load data from disk. When not possible, generate the
data, save it, and then load it from disk.
* When possible, allow the user to specify either a class name or an
instance of the class. If an instance is specified, that instance
will be used if an instance is needed. Otherwise, an instance will
be created only when it becomes necessary for data generation.
Notes for extending this class:
* Become familiar with the use of the many helper functions. See e.g. the
init function for :class:`SpecificationWhitespaceMap` for example
usage.
* Be sure that :meth:`make_data` returns the data in addition to saving
it.
* Implement :meth:`get_map` if possible.
* Filenames should not exceed 255 characters in order to be compatible
with common file systems.
"""
__metaclass__ = ABCMeta
@abstractmethod
def to_string(self):
"""Returns the string representation of the Specification."""
pass
@abstractproperty
def subdirectory(self):
"""Returns a string with the name of the data subdirectory to be used
for storing the data created by this Specification."""
pass
@abstractmethod
def make_data(self):
"""
Creates the data based on the information in the Specification. Must
both save and return created data.
See also: :meth:`save_data`.
"""
pass
def get_map(self):
"""Optionally-implemented method which will create the default map for
the Specification."""
raise NotImplementedError("")
def _get_datamap_spec(self):
"""If possible, returns the internal :class:`SpecificationDataMap`. To
succeed, the Specification must satisfy at least one of the following:
* Be a SpecificationDataMap
* Have an attribute "datamap_spec" which is a SpecificationDataMap
object
* Have an attribute "region_map_spec" which is a
SpecificationRegionMap object
Raises an AttributeError if no SpecificationDataMap is found.
"""
if isinstance(self, SpecificationDataMap):
return self
if hasattr(self, "datamap_spec"):
return self.datamap_spec
if hasattr(self, "region_map_spec"):
return self.region_map_spec._get_datamap_spec()
raise AttributeError("No datamap specification found (expected to "
"find one of the following attributes: "
"datamap_spec, region_map_spec")
def _convert_to_class_and_object(self, var_name, obj,
may_create_new_objects=True, **kwargs):
"""
Sets the internal variables [var_name]_class, [var_name]_object based on
``obj``. ``obj`` may be either a class or an instance of a class.
If ``obj`` is a class, the object will be created only if
``may_create_new_objects`` is True. In that case, the keyword
arguments are passed to the constructor.
If ``obj`` is an instance, that instance will be used.
"""
if _is_class(obj):
setattr(self, var_name + "_class", obj)
if may_create_new_objects:
setattr(self, var_name + "_object", obj(**kwargs))
else:
setattr(self, var_name + "_object", obj)
setattr(self, var_name + "_class", obj.__class__)
def _boundary_to_class_and_object(self, boundary):
self._convert_to_class_and_object("boundary", boundary)
def _region_to_class_and_object(self, region):
self._convert_to_class_and_object("region", region)
def _ruleset_to_class_and_object(self, ruleset):
self._convert_to_class_and_object("ruleset", ruleset)
def _propagation_model_to_class_and_object(self, propagation_model):
self._convert_to_class_and_object("propagation_model",
propagation_model)
def _store_at_least_class(self, var_name, obj):
"""Stores at minimum the class of ``obj``. If ``obj`` is an instance
(rather than a class), ``obj`` will be stored as well."""
self._convert_to_class_and_object(var_name, obj,
may_create_new_objects=False)
def _create_obj_if_needed(self, var_name, **kwargs):
"""If [var_name]_object does not exist, create it. In that case, the
keyword arguments are passed to the constructor."""
if hasattr(self, var_name + "_object"):
return
obj_class = getattr(self, var_name + "_class")
setattr(self, var_name + "_object", obj_class(**kwargs))
def _expect_of_type(self, obj, expected_types):
"""Raise a TypeError if ``obj`` is neither a subclass nor an instance of
one of the expected types.
expected_types may be either a list or a singleton."""
if not isinstance(expected_types, list):
expected_types = [expected_types]
for e_type in expected_types:
if not _is_class(e_type):
raise TypeError("Expected type must be a class (got '%s' "
"instead)." % str(expected_types))
if _is_class(obj):
cls = obj
else:
cls = obj.__class__
is_wrong_type = True
for e_type in expected_types:
if issubclass(cls, e_type):
is_wrong_type = False
if is_wrong_type:
raise TypeError("Expected something of a type in %s (either a "
"class or object) but received something of "
"type %s." % (str(expected_types), cls.__name__))
def _expect_is_object(self, obj):
"""Raise a TypeError if ``obj`` is not an instance."""
if not _is_object(obj):
raise TypeError("Expected to receive an instance and instead "
"received %s." % str(obj))
def _expect_is_class(self, obj):
"""Raise a TypeError if ``obj`` is not a class."""
if not _is_class(obj):
raise TypeError("Expected to receive a class and instead "
"received %s." % str(obj))
@property
def filename(self):
"""Returns a string which is the full path to the file."""
return os.path.join(self.full_directory, self.to_string() + ".pkl")
@property
def full_directory(self):
"""Returns a string which is the full directory path in which the file
will be stored."""
return os.path.join(base_data_directory, self.subdirectory)
def data_exists(self):
"""Returns True if data with the associated filename already exists and
False otherwise."""
return os.path.isfile(self.filename)
def load_data(self):
"""Loads the :class:`data_map.DataMap2D` or :class:`data_map.DataMap3D`
from a pickle. The filename is determined by :meth:`filename`."""
if self._get_datamap_spec().is_datamap2d():
return DataMap2D.from_pickle(self.filename)
else:
return DataMap3D.from_pickle(self.filename)
def save_data(self, datamap):
"""Save the :class:`data_map.DataMap2D` or :class:`data_map.DataMap3D`
to a pickle. The filename is determined
by :meth:`filename`."""
self._expect_of_type(datamap, [DataMap2D, DataMap3D])
if not os.path.isdir(self.full_directory):
os.makedirs(self.full_directory)
datamap.to_pickle(self.filename)
def fetch_data(self):
"""Fetch the data described by this Specification. If none exists, the
data will be made and then loaded.
Components: :meth:`load_data`, :meth:`make_data`
"""
if not hasattr(self, "log"):
self.log = getModuleLogger(self)
if self.data_exists():
self.log.debug("Fetching data (LOAD): %s" % self.to_string())
data = self.load_data()
else:
self.log.debug("Fetching data (MAKE): %s" % self.to_string())
data = self.make_data()
if data is None:
raise ValueError("No data loaded")
return data
def _set_map_title(self, map):
"""Automatically sets the map title from the filename."""
map.title_font_size = 10
wrapped_title = "\n".join(textwrap.wrap(self.to_string(), 80))
map.set_title(wrapped_title)
class SpecificationDataMap(Specification):
"""
This Specification describes a :class:`data_map.DataMap2D`. The
Specification must be created with a class derived from
:class:`data_map.DataMap2DWithFixedBoundingBox`, e.g.
:class:`data_map.DataMap2DContinentalUnitedStates`.
Unlike other classes, it will *always* create a new DataMap2D/DataMap3D when
"making" or fetching data. Data will
never be saved.
"""
def __init__(self, datamap_derived_class, num_latitude_divisions,
num_longitude_divisions):
self._expect_of_type(datamap_derived_class,
DataMap2DWithFixedBoundingBox)
self._expect_of_type(num_latitude_divisions, int)
self._expect_of_type(num_longitude_divisions, int)
self._store_at_least_class("datamap", datamap_derived_class)
self.num_latitude_divisions = num_latitude_divisions
self.num_longitude_divisions = num_longitude_divisions
def to_string(self):
return "%s_%dx%d" % (_make_string(self.datamap_class),
self.num_latitude_divisions,
self.num_longitude_divisions)
def make_data(self):
return self.datamap_class.create(self.num_latitude_divisions,
self.num_longitude_divisions)
@property
def subdirectory(self):
# Data is never saved
return None
def is_datamap2d(self):
"""Returns True if this Specification describes a
:class:`data_map.DataMap2D`."""
return issubclass(self.datamap_class, DataMap2D)
def is_datamap3d(self):
"""Returns True if this Specification describes a
:class:`data_map.DataMap3D`."""
return issubclass(self.datamap_class, DataMap3D)
def data_exists(self):
# Override the natural behavior so that the Specification never tried
# to load the data
return False
class SpecificationRegionMap(Specification):
"""
This Specification describes a :class:`data_map.DataMap2D` which contains
boolean data. Values will be True (or truthy) if and only if the pixel's
center is inside the :class:`boundary.Boundary`.
"""
def __init__(self, boundary, datamap_spec):
self._expect_of_type(boundary, Boundary)
self._expect_of_type(datamap_spec, SpecificationDataMap)
if not datamap_spec.is_datamap2d():
raise TypeError("The datamap spec must describe a DataMap2D.")
self._store_at_least_class("boundary", boundary)
self.datamap_spec = datamap_spec
def make_data(self):
self._create_obj_if_needed("boundary")
boundary = self.boundary_object
datamap = self.datamap_spec.fetch_data()
def is_in_region(latitude, longitude, latitude_index,
longitude_index, current_value):
location = (latitude, longitude)
return boundary.location_inside_boundary(location)
datamap.update_all_values_via_function(update_function=is_in_region)
self.save_data(datamap)
return datamap
def to_string(self):
return " ".join(["REGION_MAP", _make_string(self.boundary_class),
self.datamap_spec.to_string()])
@property
def subdirectory(self):
return "REGION_MAP"
def get_map(self):
"""Creates a linear-scale :class:`map.Map` with boundary outlines and a
white background. The title is automatically set using the
Specification information but can be reset with
:meth:`map.Map.set_title`. Returns a handle to the map object; does
not save or show the map."""
datamap = self.fetch_data()
self._create_obj_if_needed("boundary")
map = datamap.make_map(is_in_region_map=datamap)
map.add_boundary_outlines(self.boundary_object)
self._set_map_title(map)
return map
class SpecificationWhitespaceMap(Specification):
"""
This Specification describes a :class:`data_map.DataMap3D` which is True (or
truthy) for pixels which are considered whitespace for the device in
accordance with the :class:`ruleset.Ruleset`.
The resulting DataMap3D has layers described by
:meth:`region.Region.get_tvws_channel_list()`.
.. note:: The naming conventions for this class assume that the default \
:class:`protected_entities.ProtectedEntities` for the \
:class:`region.Region` should be used. To specify alternative \
protected entities, create a new class derived from the desired Region.
"""
def __init__(self, region_map_spec, region, ruleset, device_object,
propagation_model=None):
# Type checking
self._expect_of_type(region_map_spec, SpecificationRegionMap)
self._expect_of_type(region, Region)
self._expect_of_type(ruleset, Ruleset)
self._expect_is_object(device_object)
# Store data
self.region_map_spec = region_map_spec
self._store_at_least_class("region", region)
self._store_at_least_class("ruleset", ruleset)
self._convert_to_class_and_object("device", device_object)
# Propagation model needs special handling
if propagation_model is None:
self._create_obj_if_needed("ruleset")
propagation_model = self.ruleset_object.get_default_propagation_model()
self._expect_of_type(propagation_model, PropagationModel)
self._store_at_least_class("propagation_model", propagation_model)
def to_string(self):
return " ".join(["WHITESPACE_MAP",
"(%s)" % self.region_map_spec.to_string(),
_make_string(self.region_class),
_make_string(self.ruleset_class),
_make_string(self.propagation_model_class),
_make_string(self.device_object)])
@property
def subdirectory(self):
return "WHITESPACE_MAP"
def make_data(self):
self._create_obj_if_needed("region")
self._create_obj_if_needed("propagation_model")
self.ruleset_object.set_propagation_model(self.propagation_model_object)
region_datamap = self.region_map_spec.fetch_data()
channel_list = self.region_object.get_tvws_channel_list()
whitespace_datamap3d = DataMap3D.from_DataMap2D(region_datamap, channel_list)
for channel in channel_list:
channel_layer = whitespace_datamap3d.get_layer(channel)
self.ruleset_object.apply_all_protections_to_map(self.region_object, channel_layer, channel,
self.device_object)
self.save_data(whitespace_datamap3d)
return whitespace_datamap3d
def get_map(self):
"""Creates a linear-scale :class:`map.Map` with boundary outlines, a
white background, and a colorbar. The title is automatically set
using the Specification information but can be reset with
:meth:`map.Map.set_title`. Returns a handle to the map object; does
not save or show the map."""
datamap3d = self.fetch_data()
datamap2d = datamap3d.sum_all_layers()
region_map = self.region_map_spec.fetch_data()
self.region_map_spec._create_obj_if_needed("boundary")
boundary = self.region_map_spec.boundary_object
map = datamap2d.make_map(is_in_region_map=region_map)
map.add_boundary_outlines(boundary)
map.add_colorbar(decimal_precision=0)
map.set_colorbar_label("Number of available whitespace channels")
self._set_map_title(map)
return map
class SpecificationRegionAreaMap(Specification):
"""
This Specification describes a :class:`data_map.DataMap2D` where the value
of each pixel describes the area (in square kilometers) of the pixel.
This data may be useful e.g. to create a CDF by area using
:meth:`data_manipulation.calculate_cdf_from_datamap2d`.
"""
def __init__(self, datamap_spec):
self._expect_of_type(datamap_spec, SpecificationDataMap)
self._expect_is_object(datamap_spec)
self.datamap_spec = datamap_spec
@property
def subdirectory(self):
return "REGION_AREA"
def to_string(self):
return " ".join(["REGION_AREA", "(%s)" % self.datamap_spec.to_string()])
def make_data(self):
from geopy.distance import vincenty
datamap = self.datamap_spec.fetch_data()
latitude_width = float(datamap.latitudes[1] - datamap.latitudes[0])
longitude_width = float(datamap.longitudes[1] - datamap.longitudes[0])
def create_pixel_area(latitude, longitude, latitude_index,
longitude_index, current_value):
# Calculate its area by pinpointing each corner of the trapezoid
# it represents.
# Assume that it extends lat_div_size/2 east-west.
NW_lat = latitude + latitude_width/2
SW_lat = NW_lat
NE_lat = latitude - latitude_width/2
SE_lat = NE_lat
# Assume that it extends long_div_size/2 north-south.
NW_lon = longitude + longitude_width/2
SW_lon = longitude - longitude_width/2
NE_lon = NW_lon
SE_lon = SW_lon
height = vincenty((NW_lat, NW_lon), (SW_lat, SW_lon)).kilometers
top = vincenty((NW_lat, NW_lon), (NE_lat, NE_lon)).kilometers
bottom = vincenty((SW_lat, SW_lon), (SE_lat, SE_lon)).kilometers
return 0.5*height*(top+bottom)
datamap.update_all_values_via_function(
update_function=create_pixel_area)
self.save_data(datamap)
return datamap
def get_map(self):
"""Creates a linear-scale :class:`map.Map` with a colorbar. The title is
automatically set using the Specification information but can be
reset with :meth:`map.Map.set_title`. Returns a handle to the map
object; does not save or show the map."""
datamap = self.fetch_data()
map = datamap.make_map()
map.add_colorbar()
map.set_colorbar_label("Area of pixel (km^2)")
self._set_map_title(map)
return map
class SpecificationPopulationMap(Specification):
"""
This Specification describes a :class:`data_map.DataMap2D` where the value
of each pixel is the population of the pixel (in people).
This data may be useful e.g. to create a CDF by population using
:meth:`data_manipulation.calculate_cdf_from_datamap2d`.
"""
def __init__(self, region_map_spec, population):
self._expect_of_type(region_map_spec, SpecificationRegionMap)
self._expect_of_type(population, PopulationData)
self.region_map_spec = region_map_spec
self._store_at_least_class("population", population)
@property
def subdirectory(self):
return "POPULATION"
def to_string(self):
return " ".join(["POPULATION", "(%s)" %
self.region_map_spec.to_string()])
def make_data(self):
self._create_obj_if_needed("population")
region_datamap = self.region_map_spec.fetch_data()
population_datamap = self.population_object.create_population_map(
is_in_region_datamap2d=region_datamap)
self.save_data(population_datamap)
return population_datamap
def get_map(self):
"""Creates a log-scale :class:`map.Map` with boundary outlines, a white
background, and a colorbar. The title is automatically set using the
Specification information but can be reset with
:meth:`map.Map.set_title`. Returns a handle to the map object; does not
save or show the map."""
datamap = self.fetch_data()
region_datamap = self.region_map_spec.fetch_data()
self.region_map_spec._create_obj_if_needed("boundary")
boundary = self.region_map_spec.boundary_object
map = datamap.make_map(transformation='log',
is_in_region_map=region_datamap)
map.add_colorbar()
map.set_colorbar_label("Population")
map.add_boundary_outlines(boundary)
self._set_map_title(map)
return map
| kate-harrison/west | west/data_management.py | Python | gpl-2.0 | 23,067 |
# -*- coding: utf-8 -*-
# Copyright 2007-2020 The HyperSpy developers
#
# This file is part of HyperSpy.
#
# HyperSpy is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# HyperSpy is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with HyperSpy. If not, see <http://www.gnu.org/licenses/>.
"""
Import sklearn.* and randomized_svd from scikit-learn
"""
import warnings
try:
with warnings.catch_warnings():
warnings.simplefilter("ignore")
import sklearn
import sklearn.decomposition
from sklearn.utils.extmath import randomized_svd
sklearn_installed = True
except ImportError:
randomized_svd = None
sklearn_installed = False
| dnjohnstone/hyperspy | hyperspy/misc/machine_learning/import_sklearn.py | Python | gpl-3.0 | 1,124 |
"""
sphinx.cmd.make_mode
~~~~~~~~~~~~~~~~~~~~
sphinx-build -M command-line handling.
This replaces the old, platform-dependent and once-generated content
of Makefile / make.bat.
This is in its own module so that importing it is fast. It should not
import the main Sphinx modules (like sphinx.applications, sphinx.builders).
:copyright: Copyright 2007-2021 by the Sphinx team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import os
import subprocess
import sys
from os import path
from typing import List
import sphinx
from sphinx.cmd.build import build_main
from sphinx.util.console import blue, bold, color_terminal, nocolor # type: ignore
from sphinx.util.osutil import cd, rmtree
BUILDERS = [
("", "html", "to make standalone HTML files"),
("", "dirhtml", "to make HTML files named index.html in directories"),
("", "singlehtml", "to make a single large HTML file"),
("", "pickle", "to make pickle files"),
("", "json", "to make JSON files"),
("", "htmlhelp", "to make HTML files and an HTML help project"),
("", "qthelp", "to make HTML files and a qthelp project"),
("", "devhelp", "to make HTML files and a Devhelp project"),
("", "epub", "to make an epub"),
("", "latex", "to make LaTeX files, you can set PAPER=a4 or PAPER=letter"),
("posix", "latexpdf", "to make LaTeX and PDF files (default pdflatex)"),
("posix", "latexpdfja", "to make LaTeX files and run them through platex/dvipdfmx"),
("", "text", "to make text files"),
("", "man", "to make manual pages"),
("", "texinfo", "to make Texinfo files"),
("posix", "info", "to make Texinfo files and run them through makeinfo"),
("", "gettext", "to make PO message catalogs"),
("", "changes", "to make an overview of all changed/added/deprecated items"),
("", "xml", "to make Docutils-native XML files"),
("", "pseudoxml", "to make pseudoxml-XML files for display purposes"),
("", "linkcheck", "to check all external links for integrity"),
("", "doctest", "to run all doctests embedded in the documentation "
"(if enabled)"),
("", "coverage", "to run coverage check of the documentation (if enabled)"),
("", "clean", "to remove everything in the build directory"),
]
class Make:
def __init__(self, srcdir: str, builddir: str, opts: List[str]) -> None:
self.srcdir = srcdir
self.builddir = builddir
self.opts = opts
self.makecmd = os.environ.get('MAKE', 'make') # refer $MAKE to determine make command
def builddir_join(self, *comps: str) -> str:
return path.join(self.builddir, *comps)
def build_clean(self) -> int:
srcdir = path.abspath(self.srcdir)
builddir = path.abspath(self.builddir)
if not path.exists(self.builddir):
return 0
elif not path.isdir(self.builddir):
print("Error: %r is not a directory!" % self.builddir)
return 1
elif srcdir == builddir:
print("Error: %r is same as source directory!" % self.builddir)
return 1
elif path.commonpath([srcdir, builddir]) == builddir:
print("Error: %r directory contains source directory!" % self.builddir)
return 1
print("Removing everything under %r..." % self.builddir)
for item in os.listdir(self.builddir):
rmtree(self.builddir_join(item))
return 0
def build_help(self) -> None:
if not color_terminal():
nocolor()
print(bold("Sphinx v%s" % sphinx.__display_version__))
print("Please use `make %s' where %s is one of" % ((blue('target'),) * 2))
for osname, bname, description in BUILDERS:
if not osname or os.name == osname:
print(' %s %s' % (blue(bname.ljust(10)), description))
def build_latexpdf(self) -> int:
if self.run_generic_build('latex') > 0:
return 1
if sys.platform == 'win32':
makecmd = os.environ.get('MAKE', 'make.bat')
else:
makecmd = self.makecmd
try:
with cd(self.builddir_join('latex')):
return subprocess.call([makecmd, 'all-pdf'])
except OSError:
print('Error: Failed to run: %s' % makecmd)
return 1
def build_latexpdfja(self) -> int:
if self.run_generic_build('latex') > 0:
return 1
if sys.platform == 'win32':
makecmd = os.environ.get('MAKE', 'make.bat')
else:
makecmd = self.makecmd
try:
with cd(self.builddir_join('latex')):
return subprocess.call([makecmd, 'all-pdf'])
except OSError:
print('Error: Failed to run: %s' % makecmd)
return 1
def build_info(self) -> int:
if self.run_generic_build('texinfo') > 0:
return 1
try:
with cd(self.builddir_join('texinfo')):
return subprocess.call([self.makecmd, 'info'])
except OSError:
print('Error: Failed to run: %s' % self.makecmd)
return 1
def build_gettext(self) -> int:
dtdir = self.builddir_join('gettext', '.doctrees')
if self.run_generic_build('gettext', doctreedir=dtdir) > 0:
return 1
return 0
def run_generic_build(self, builder: str, doctreedir: str = None) -> int:
# compatibility with old Makefile
papersize = os.getenv('PAPER', '')
opts = self.opts
if papersize in ('a4', 'letter'):
opts.extend(['-D', 'latex_elements.papersize=' + papersize + 'paper'])
if doctreedir is None:
doctreedir = self.builddir_join('doctrees')
args = ['-b', builder,
'-d', doctreedir,
self.srcdir,
self.builddir_join(builder)]
return build_main(args + opts)
def run_make_mode(args: List[str]) -> int:
if len(args) < 3:
print('Error: at least 3 arguments (builder, source '
'dir, build dir) are required.', file=sys.stderr)
return 1
make = Make(args[1], args[2], args[3:])
run_method = 'build_' + args[0]
if hasattr(make, run_method):
return getattr(make, run_method)()
return make.run_generic_build(args[0])
| sonntagsgesicht/regtest | .aux/venv/lib/python3.9/site-packages/sphinx/cmd/make_mode.py | Python | apache-2.0 | 6,580 |
from flask.ext.assets import Bundle
from . import wa
js_libs = Bundle('js/libs/jquery.min.js',
'js/libs/bootstrap.min.js',
'js/libs/lodash.min.js',
#filters='jsmin',
output='js/libs.js')
js_board = Bundle('js/libs/drawingboard.min.js',
#filters='jsmin',
output='js/board.js')
js_main = Bundle('js/main.js',
#filters='jsmin',
output='js/snh.js')
css_main = Bundle('css/bootstrap.min.css',
'css/font-awesome.min.css',
'css/main.css',
filters='cssmin',
output='css/snh.css')
css_board = Bundle('css/drawingboard.min.css',
filters='cssmin',
output='css/board.css')
wa.register('js_libs', js_libs)
wa.register('js_board', js_board)
wa.register('js_main', js_main)
wa.register('css_main', css_main)
wa.register('css_board', css_board)
| luizdepra/sketch_n_hit | app/assets.py | Python | mit | 984 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.