repo_name
stringlengths 5
92
| path
stringlengths 4
232
| copies
stringclasses 19
values | size
stringlengths 4
7
| content
stringlengths 721
1.04M
| license
stringclasses 15
values | hash
int64 -9,223,277,421,539,062,000
9,223,102,107B
| line_mean
float64 6.51
99.9
| line_max
int64 15
997
| alpha_frac
float64 0.25
0.97
| autogenerated
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|
pudo/krauler | krauler/mf.py | 1 | 2609 | import os
import logging
from lxml import html
from dateutil.parser import parse
import metafolder
from krauler.threaded import ThreadedKrauler
from krauler.signals import on_meta
log = logging.getLogger(__name__)
class MetaFolderKrauler(ThreadedKrauler):
@property
def metafolder(self):
if not hasattr(self, '_metafolder'):
path = self.config.data.get('path', '.')
path = os.path.expandvars(path)
path = os.path.expanduser(path)
path = os.path.abspath(path)
log.info("Saving output to: %r", path)
self._metafolder = metafolder.open(path)
return self._metafolder
@property
def overwrite(self):
return self.config.data.get('overwrite', False)
def get_content(self, page, meta):
if not page.is_html:
return page.content
check_path = self.config.data.get('check_path')
if check_path is not None:
if page.doc.find(check_path) is None:
log.info("Failed XML path check: %r", page.url)
return None
for meta_el in ['title', 'author', 'date']:
path = self.config.data.get('%s_path' % meta_el)
if path is not None and page.doc.findtext(path):
meta[meta_el] = page.doc.findtext(path)
if 'date' in meta:
try:
date = meta.pop('date')
date = parse(date)
if 'dates' not in meta:
meta['dates'] = []
meta['dates'].append(date.isoformat())
except Exception as ex:
log.exception(ex)
body = page.doc
if self.config.data.get('body_path') is not None:
body = page.doc.find(self.config.data.get('body_path'))
for path in self.config.data.get('remove_paths', []):
for el in body.findall(path):
el.drop_tree()
return html.tostring(body)
def emit(self, page):
if not self.overwrite:
if self.metafolder.get(page.url).exists:
return
meta = self.config.data.get('meta', {}).copy()
data = self.get_content(page, meta)
if data is None:
return
meta['source_url'] = page.url
meta['foreign_id'] = page.url
if page.file_name:
meta['file_name'] = page.file_name
meta['mime_type'] = page.mime_type
meta['headers'] = dict(page.response.headers)
on_meta.send(self, page=page, meta=meta)
self.metafolder.add_data(data, page.url, meta=meta)
| mit | -4,767,756,229,546,484,000 | 30.433735 | 67 | 0.560368 | false |
andrewgiessel/folium | folium/utilities.py | 1 | 19979 | # -*- coding: utf-8 -*-
"""
Utilities
-------
Utility module for Folium helper functions.
"""
from __future__ import absolute_import
from __future__ import print_function
from __future__ import division
import time
import math
import zlib
import struct
import json
import base64
from jinja2 import Environment, PackageLoader
try:
import pandas as pd
except ImportError:
pd = None
try:
import numpy as np
except ImportError:
np = None
from folium.six import iteritems, text_type, binary_type
def get_templates():
"""Get Jinja templates."""
return Environment(loader=PackageLoader('folium', 'templates'))
def legend_scaler(legend_values, max_labels=10.0):
"""
Downsamples the number of legend values so that there isn't a collision
of text on the legend colorbar (within reason). The colorbar seems to
support ~10 entries as a maximum.
"""
if len(legend_values) < max_labels:
legend_ticks = legend_values
else:
spacer = int(math.ceil(len(legend_values)/max_labels))
legend_ticks = []
for i in legend_values[::spacer]:
legend_ticks += [i]
legend_ticks += ['']*(spacer-1)
return legend_ticks
def linear_gradient(hexList, nColors):
"""
Given a list of hexcode values, will return a list of length
nColors where the colors are linearly interpolated between the
(r, g, b) tuples that are given.
Example:
linear_gradient([(0, 0, 0), (255, 0, 0), (255, 255, 0)], 100)
"""
def _scale(start, finish, length, i):
"""
Return the value correct value of a number that is in between start
and finish, for use in a loop of length *length*.
"""
base = 16
fraction = float(i) / (length - 1)
raynge = int(finish, base) - int(start, base)
thex = hex(int(int(start, base) + fraction * raynge)).split('x')[-1]
if len(thex) != 2:
thex = '0' + thex
return thex
allColors = []
# Separate (R, G, B) pairs.
for start, end in zip(hexList[:-1], hexList[1:]):
# Linearly intepolate between pair of hex ###### values and
# add to list.
nInterpolate = 765
for index in range(nInterpolate):
r = _scale(start[1:3], end[1:3], nInterpolate, index)
g = _scale(start[3:5], end[3:5], nInterpolate, index)
b = _scale(start[5:7], end[5:7], nInterpolate, index)
allColors.append(''.join(['#', r, g, b]))
# Pick only nColors colors from the total list.
result = []
for counter in range(nColors):
fraction = float(counter) / (nColors - 1)
index = int(fraction * (len(allColors) - 1))
result.append(allColors[index])
return result
def color_brewer(color_code, n=6):
"""
Generate a colorbrewer color scheme of length 'len', type 'scheme.
Live examples can be seen at http://colorbrewer2.org/
"""
maximum_n = 253
scheme_info = {'BuGn': 'Sequential',
'BuPu': 'Sequential',
'GnBu': 'Sequential',
'OrRd': 'Sequential',
'PuBu': 'Sequential',
'PuBuGn': 'Sequential',
'PuRd': 'Sequential',
'RdPu': 'Sequential',
'YlGn': 'Sequential',
'YlGnBu': 'Sequential',
'YlOrBr': 'Sequential',
'YlOrRd': 'Sequential',
'BrBg': 'Diverging',
'PiYG': 'Diverging',
'PRGn': 'Diverging',
'PuOr': 'Diverging',
'RdBu': 'Diverging',
'RdGy': 'Diverging',
'RdYlBu': 'Diverging',
'RdYlGn': 'Diverging',
'Spectral': 'Diverging',
'Accent': 'Qualitative',
'Dark2': 'Qualitative',
'Paired': 'Qualitative',
'Pastel1': 'Qualitative',
'Pastel2': 'Qualitative',
'Set1': 'Qualitative',
'Set2': 'Qualitative',
'Set3': 'Qualitative',
}
schemes = {'BuGn': ['#EDF8FB', '#CCECE6', '#CCECE6',
'#66C2A4', '#41AE76', '#238B45', '#005824'],
'BuPu': ['#EDF8FB', '#BFD3E6', '#9EBCDA',
'#8C96C6', '#8C6BB1', '#88419D', '#6E016B'],
'GnBu': ['#F0F9E8', '#CCEBC5', '#A8DDB5',
'#7BCCC4', '#4EB3D3', '#2B8CBE', '#08589E'],
'OrRd': ['#FEF0D9', '#FDD49E', '#FDBB84',
'#FC8D59', '#EF6548', '#D7301F', '#990000'],
'PuBu': ['#F1EEF6', '#D0D1E6', '#A6BDDB',
'#74A9CF', '#3690C0', '#0570B0', '#034E7B'],
'PuBuGn': ['#F6EFF7', '#D0D1E6', '#A6BDDB',
'#67A9CF', '#3690C0', '#02818A', '#016450'],
'PuRd': ['#F1EEF6', '#D4B9DA', '#C994C7',
'#DF65B0', '#E7298A', '#CE1256', '#91003F'],
'RdPu': ['#FEEBE2', '#FCC5C0', '#FA9FB5',
'#F768A1', '#DD3497', '#AE017E', '#7A0177'],
'YlGn': ['#FFFFCC', '#D9F0A3', '#ADDD8E',
'#78C679', '#41AB5D', '#238443', '#005A32'],
'YlGnBu': ['#FFFFCC', '#C7E9B4', '#7FCDBB',
'#41B6C4', '#1D91C0', '#225EA8', '#0C2C84'],
'YlOrBr': ['#FFFFD4', '#FEE391', '#FEC44F',
'#FE9929', '#EC7014', '#CC4C02', '#8C2D04'],
'YlOrRd': ['#FFFFB2', '#FED976', '#FEB24C',
'#FD8D3C', '#FC4E2A', '#E31A1C', '#B10026'],
'BrBg': ['#8c510a', '#d8b365', '#f6e8c3',
'#c7eae5', '#5ab4ac', '#01665e'],
'PiYG': ['#c51b7d', '#e9a3c9', '#fde0ef',
'#e6f5d0', '#a1d76a', '#4d9221'],
'PRGn': ['#762a83', '#af8dc3', '#e7d4e8',
'#d9f0d3', '#7fbf7b', '#1b7837'],
'PuOr': ['#b35806', '#f1a340', '#fee0b6',
'#d8daeb', '#998ec3', '#542788'],
'RdBu': ['#b2182b', '#ef8a62', '#fddbc7',
'#d1e5f0', '#67a9cf', '#2166ac'],
'RdGy': ['#b2182b', '#ef8a62', '#fddbc7',
'#e0e0e0', '#999999', '#4d4d4d'],
'RdYlBu': ['#d73027', '#fc8d59', '#fee090',
'#e0f3f8', '#91bfdb', '#4575b4'],
'RdYlGn': ['#d73027', '#fc8d59', '#fee08b',
'#d9ef8b', '#91cf60', '#1a9850'],
'Spectral': ['#d53e4f', '#fc8d59', '#fee08b',
'#e6f598', '#99d594', '#3288bd'],
'Accent': ['#7fc97f', '#beaed4', '#fdc086',
'#ffff99', '#386cb0', '#f0027f'],
'Dark2': ['#1b9e77', '#d95f02', '#7570b3',
'#e7298a', '#66a61e', '#e6ab02'],
'Paired': ['#a6cee3', '#1f78b4', '#b2df8a',
'#33a02c', '#fb9a99', '#e31a1c'],
'Pastel1': ['#fbb4ae', '#b3cde3', '#ccebc5',
'#decbe4', '#fed9a6', '#ffffcc'],
'Pastel2': ['#b3e2cd', '#fdcdac', '#cbd5e8',
'#f4cae4', '#e6f5c9', '#fff2ae'],
'Set1': ['#e41a1c', '#377eb8', '#4daf4a',
'#984ea3', '#ff7f00', '#ffff33'],
'Set2': ['#66c2a5', '#fc8d62', '#8da0cb',
'#e78ac3', '#a6d854', '#ffd92f'],
'Set3': ['#8dd3c7', '#ffffb3', '#bebada',
'#fb8072', '#80b1d3', '#fdb462'],
}
# Raise an error if the n requested is greater than the maximum.
if n > maximum_n:
raise ValueError("The maximum number of colors in a"
" ColorBrewer sequential color series is 253")
# Only if n is greater than six do we interpolate values.
if n > 6:
if color_code not in schemes:
color_scheme = None
else:
# Check to make sure that it is not a qualitative scheme.
if scheme_info[color_code] == 'Qualitative':
raise ValueError("Expanded color support is not available"
" for Qualitative schemes, restrict"
" number of colors to 6")
else:
color_scheme = linear_gradient(schemes.get(color_code), n)
else:
color_scheme = schemes.get(color_code, None)
return color_scheme
def transform_data(data):
"""
Transform Pandas DataFrame into JSON format.
Parameters
----------
data: DataFrame or Series
Pandas DataFrame or Series
Returns
-------
JSON compatible dict
Example
-------
>>> transform_data(df)
"""
if pd is None:
raise ImportError("The Pandas package is required"
" for this functionality")
if np is None:
raise ImportError("The NumPy package is required"
" for this functionality")
def type_check(value):
"""
Type check values for JSON serialization. Native Python JSON
serialization will not recognize some Numpy data types properly,
so they must be explicitly converted.
"""
if pd.isnull(value):
return None
elif (isinstance(value, pd.tslib.Timestamp) or
isinstance(value, pd.Period)):
return time.mktime(value.timetuple())
elif isinstance(value, (int, np.integer)):
return int(value)
elif isinstance(value, (float, np.float_)):
return float(value)
elif isinstance(value, str):
return str(value)
else:
return value
if isinstance(data, pd.Series):
json_data = [{type_check(x): type_check(y) for
x, y in iteritems(data)}]
elif isinstance(data, pd.DataFrame):
json_data = [{type_check(y): type_check(z) for
x, y, z in data.itertuples()}]
return json_data
def split_six(series=None):
"""
Given a Pandas Series, get a domain of values from zero to the 90% quantile
rounded to the nearest order-of-magnitude integer. For example, 2100 is
rounded to 2000, 2790 to 3000.
Parameters
----------
series: Pandas series, default None
Returns
-------
list
"""
if pd is None:
raise ImportError("The Pandas package is required"
" for this functionality")
if np is None:
raise ImportError("The NumPy package is required"
" for this functionality")
def base(x):
if x > 0:
base = pow(10, math.floor(math.log10(x)))
return round(x/base)*base
else:
return 0
quants = [0, 50, 75, 85, 90]
# Some weirdness in series quantiles a la 0.13.
arr = series.values
return [base(np.percentile(arr, x)) for x in quants]
def mercator_transform(data, lat_bounds, origin='upper', height_out=None):
"""Transforms an image computed in (longitude,latitude) coordinates into
the a Mercator projection image.
Parameters
----------
data: numpy array or equivalent list-like object.
Must be NxM (mono), NxMx3 (RGB) or NxMx4 (RGBA)
lat_bounds : length 2 tuple
Minimal and maximal value of the latitude of the image.
origin : ['upper' | 'lower'], optional, default 'upper'
Place the [0,0] index of the array in the upper left or lower left
corner of the axes.
height_out : int, default None
The expected height of the output.
If None, the height of the input is used.
"""
if np is None:
raise ImportError("The NumPy package is required"
" for this functionality")
mercator = lambda x: np.arcsinh(np.tan(x*np.pi/180.))*180./np.pi
array = np.atleast_3d(data).copy()
height, width, nblayers = array.shape
lat_min, lat_max = lat_bounds
if height_out is None:
height_out = height
# Eventually flip the image
if origin == 'upper':
array = array[::-1, :, :]
lats = (lat_min + np.linspace(0.5/height, 1.-0.5/height, height) *
(lat_max-lat_min))
latslats = (mercator(lat_min) +
np.linspace(0.5/height_out, 1.-0.5/height_out, height_out) *
(mercator(lat_max)-mercator(lat_min)))
out = np.zeros((height_out, width, nblayers))
for i in range(width):
for j in range(4):
out[:, i, j] = np.interp(latslats, mercator(lats), array[:, i, j])
# Eventually flip the image.
if origin == 'upper':
out = out[::-1, :, :]
return out
def image_to_url(image, mercator_project=False, colormap=None,
origin='upper', bounds=((-90, -180), (90, 180))):
"""Infers the type of an image argument and transforms it into a URL.
Parameters
----------
image: string, file or array-like object
* If string, it will be written directly in the output file.
* If file, it's content will be converted as embedded in the
output file.
* If array-like, it will be converted to PNG base64 string and
embedded in the output.
origin : ['upper' | 'lower'], optional, default 'upper'
Place the [0, 0] index of the array in the upper left or
lower left corner of the axes.
colormap : callable, used only for `mono` image.
Function of the form [x -> (r,g,b)] or [x -> (r,g,b,a)]
for transforming a mono image into RGB.
It must output iterables of length 3 or 4, with values between
0. and 1. Hint : you can use colormaps from `matplotlib.cm`.
mercator_project : bool, default False, used for array-like image.
Transforms the data to project (longitude,latitude)
coordinates to the Mercator projection.
bounds: list-like, default ((-90, -180), (90, 180))
Image bounds on the map in the form
[[lat_min, lon_min], [lat_max, lon_max]].
Only used if mercator_project is True.
"""
if hasattr(image, 'read'):
# We got an image file.
if hasattr(image, 'name'):
# We try to get the image format from the file name.
fileformat = image.name.lower().split('.')[-1]
else:
fileformat = 'png'
url = "data:image/{};base64,{}".format(
fileformat, base64.b64encode(image.read()).decode('utf-8'))
elif (not (isinstance(image, text_type) or
isinstance(image, binary_type))) and hasattr(image, '__iter__'):
# We got an array-like object.
if mercator_project:
data = mercator_transform(image,
[bounds[0][0], bounds[1][0]],
origin=origin)
else:
data = image
png = write_png(data, origin=origin, colormap=colormap)
url = "data:image/png;base64," + base64.b64encode(png).decode('utf-8')
else:
# We got an URL.
url = json.loads(json.dumps(image))
return url.replace('\n', ' ')
def write_png(data, origin='upper', colormap=None):
"""
Transform an array of data into a PNG string.
This can be written to disk using binary I/O, or encoded using base64
for an inline PNG like this:
>>> png_str = write_png(array)
>>> "data:image/png;base64,"+png_str.encode('base64')
Inspired from
http://stackoverflow.com/questions/902761/saving-a-numpy-array-as-an-image
Parameters
----------
data: numpy array or equivalent list-like object.
Must be NxM (mono), NxMx3 (RGB) or NxMx4 (RGBA)
origin : ['upper' | 'lower'], optional, default 'upper'
Place the [0,0] index of the array in the upper left or lower left
corner of the axes.
colormap : callable, used only for `mono` image.
Function of the form [x -> (r,g,b)] or [x -> (r,g,b,a)]
for transforming a mono image into RGB.
It must output iterables of length 3 or 4, with values between
0. and 1. Hint: you can use colormaps from `matplotlib.cm`.
Returns
-------
PNG formatted byte string
"""
if np is None:
raise ImportError("The NumPy package is required"
" for this functionality")
if colormap is None:
colormap = lambda x: (x, x, x, 1)
array = np.atleast_3d(data)
height, width, nblayers = array.shape
if nblayers not in [1, 3, 4]:
raise ValueError("Data must be NxM (mono), "
"NxMx3 (RGB), or NxMx4 (RGBA)")
assert array.shape == (height, width, nblayers)
if nblayers == 1:
array = np.array(list(map(colormap, array.ravel())))
nblayers = array.shape[1]
if nblayers not in [3, 4]:
raise ValueError("colormap must provide colors of"
"length 3 (RGB) or 4 (RGBA)")
array = array.reshape((height, width, nblayers))
assert array.shape == (height, width, nblayers)
if nblayers == 3:
array = np.concatenate((array, np.ones((height, width, 1))), axis=2)
nblayers = 4
assert array.shape == (height, width, nblayers)
assert nblayers == 4
# Normalize to uint8 if it isn't already.
if array.dtype != 'uint8':
array = array * 255./array.max(axis=(0, 1)).reshape((1, 1, 4))
array = array.astype('uint8')
# Eventually flip the image.
if origin == 'lower':
array = array[::-1, :, :]
# Transform the array to bytes.
raw_data = b''.join([b'\x00' + array[i, :, :].tobytes()
for i in range(height)])
def png_pack(png_tag, data):
chunk_head = png_tag + data
return (struct.pack("!I", len(data)) +
chunk_head +
struct.pack("!I", 0xFFFFFFFF & zlib.crc32(chunk_head)))
return b''.join([
b'\x89PNG\r\n\x1a\n',
png_pack(b'IHDR', struct.pack("!2I5B", width, height, 8, 6, 0, 0, 0)),
png_pack(b'IDAT', zlib.compress(raw_data, 9)),
png_pack(b'IEND', b'')])
def _camelify(out):
return (''.join(["_" + x.lower() if i < len(out)-1 and x.isupper() and out[i+1].islower() # noqa
else x.lower() + "_" if i < len(out)-1 and x.islower() and out[i+1].isupper() # noqa
else x.lower() for i, x in enumerate(list(out))])).lstrip('_').replace('__', '_') # noqa
def _parse_size(value):
try:
if isinstance(value, int) or isinstance(value, float):
value_type = 'px'
value = float(value)
assert value > 0
else:
value_type = '%'
value = float(value.strip('%'))
assert 0 <= value <= 100
except:
msg = "Cannot parse value {!r} as {!r}".format
raise ValueError(msg(value, value_type))
return value, value_type
def _locations_mirror(x):
"""Mirrors the points in a list-of-list-of-...-of-list-of-points.
For example:
>>> _locations_mirror([[[1, 2], [3, 4]], [5, 6], [7, 8]])
[[[2, 1], [4, 3]], [6, 5], [8, 7]]
"""
if hasattr(x, '__iter__'):
if hasattr(x[0], '__iter__'):
return list(map(_locations_mirror, x))
else:
return list(x[::-1])
else:
return x
def _locations_tolist(x):
"""Transforms recursively a list of iterables into a list of list.
"""
if hasattr(x, '__iter__'):
return list(map(_locations_tolist, x))
else:
return x
| mit | -2,512,162,047,460,299,000 | 34.740608 | 101 | 0.516743 | false |
FDelporte/PiGameConsole | Main.py | 1 | 5104 | '''
Created on 22/09/2017
@author: Frank Delporte
'''
import thread
import Tkinter as tk
import tkFont
import time
from ButtonHandler import *
from KeyReader import *
from PongGui import *
from SlideShow import *
from ConsoleMenu import *
from Legend import *
try:
import keyboard # pip install keyboard
keyAvailable = True
except ImportError:
keyAvailable = False
class PiGameConsole():
# general vars
pongBusy = False
slideShowBusy = False
keepRunning = False
# frame holders
menu = None
legend = None
win = None
slideShow = None
pong = None
def __init__(self):
print("PiGameConsole initiated")
def preventScreensaver(self):
while (self.keepRunning):
if keyAvailable == True:
keyboard.write('A', delay=0)
time.sleep(10)
def checkInput(self):
btn = ButtonHandler()
key = KeyReader()
while (self.keepRunning):
if btn.getButton(2) == True or key.getKey("1") == True:
#print("Controller red")
if self.slideShowBusy == True and self.slideShow != None:
self.slideShow.stop()
self.startPong()
elif self.pongBusy == True and self.pong != None:
self.pong.stop()
self.startSlideShow()
if btn.getButton(1) == True or key.getKey("2") == True:
#print("Controller green")
print("Controller green")
if btn.getButton(4) == True or key.getKey("3") == True:
#print("Player1 red")
if self.pongBusy == True and self.pong != None:
self.pong.move_player(1, "up")
if btn.getButton(3) == True or key.getKey("4") == True:
#print("Player1 green")
if self.pongBusy == True and self.pong != None:
self.pong.move_player(1, "down")
if btn.getButton(6) == True or key.getKey("5") == True:
#print("Player2 red")
if self.pongBusy == True and self.pong != None:
self.pong.move_player(2, "up")
if btn.getButton(5) == True or key.getKey("6") == True:
#print("Player2 green")
if self.pongBusy == True and self.pong != None:
self.pong.move_player(2, "down")
time.sleep(0.1)
def startGUI(self):
# Start the GUI
self.win = tk.Tk()
self.win.title("PI Gaming console")
self.win.attributes("-fullscreen", True)
self.exitButton = tk.Button(self.win, text = "Quit", command = self.exitProgram)
self.exitButton.grid(row = 0, column = 0, sticky=tk.NW, padx=(10, 0), pady=(10, 0))
self.menu = ConsoleMenu(self.win, 300, 250)
self.menu.grid(row = 1, column = 0, sticky=tk.NW, padx=(10, 10), pady=(0, 0))
self.legend = Legend(self.win, 300, 400)
self.legend.grid(row = 2, column = 0, sticky=tk.NW, padx=(10, 10), pady=(0, 0))
self.startSlideShow()
self.win.mainloop()
def exitProgram(self):
self.keepRunning = False
print "Finished"
self.win.quit()
def clearWindow(self):
if self.slideShow != None:
self.slideShow.stop()
self.slideShow = None
if self.pong != None:
self.pong.stop()
self.pong = None
self.slideShowBusy = False
self.pongBusy = False
time.sleep(0.5)
def startSlideShow(self):
self.clearWindow()
self.menu.setSelected(1)
self.legend.setLegend(1)
self.slideShow = SlideShow(self.win, self.win.winfo_screenwidth() - 300, self.win.winfo_screenheight() - 50)
self.slideShow.grid(row = 0, column = 2, rowspan = 3, sticky=tk.NSEW, pady=(10, 10))
self.slideShowBusy = True
def startPong(self):
self.clearWindow()
self.menu.setSelected(2)
self.legend.setLegend(2)
self.pong = PongGui(self.win, self.win.winfo_screenwidth() - 300, self.win.winfo_screenheight() - 50)
self.pong.grid(row = 0, column = 2, rowspan = 3, sticky=tk.NSEW, pady=(10, 10))
self.pongBusy = True
if __name__ == "__main__":
piGameConsole = PiGameConsole()
# Start a thread to check if a game is running
piGameConsole.keepRunning = True
thread.start_new_thread(piGameConsole.preventScreensaver, ())
thread.start_new_thread(piGameConsole.checkInput, ())
piGameConsole.startGUI()
| apache-2.0 | 330,923,120,042,564,540 | 30.121951 | 116 | 0.508817 | false |
xiiicyw/Data-Wrangling-with-MongoDB | Lesson_4_Problem_Set/03-Updating_Schema/update.py | 1 | 3482 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
In this problem set you work with another type of infobox data, audit it, clean it,
come up with a data model, insert it into a MongoDB and then run some queries against your database.
The set contains data about Arachnid class.
The data is already in the database. But you have been given a task to also include 'binomialAuthority'
information in the data, so you have to go through the data and update the existing entries.
The following things should be done in the function add_field:
- process the csv file and extract 2 fields - 'rdf-schema#label' and 'binomialAuthority_label'
- clean up the 'rdf-schema#label' same way as in the first exercise - removing redundant "(spider)" suffixes
- return a dictionary, with 'label' being the key, and 'binomialAuthority_label' the value
- if 'binomialAuthority_label' is "NULL", skip the item
The following should be done in the function update_db:
- query the database by using the field 'label'
- update the data, by adding a new item under 'classification' with a key 'binomialAuthority'
The resulting data should look like this:
- the output structure should be as follows:
{ 'label': 'Argiope',
'uri': 'http://dbpedia.org/resource/Argiope_(spider)',
'description': 'The genus Argiope includes rather large and spectacular spiders that often ...',
'name': 'Argiope',
'synonym': ["One", "Two"],
'classification': {
'binomialAuthority': None,
'family': 'Orb-weaver spider',
'class': 'Arachnid',
'phylum': 'Arthropod',
'order': 'Spider',
'kingdom': 'Animal',
'genus': None
}
}
"""
import codecs
import csv
import json
import pprint
DATAFILE = 'arachnid.csv'
FIELDS ={'rdf-schema#label': 'label',
'binomialAuthority_label': 'binomialAuthority'}
def add_field(filename, fields):
process_fields = fields.keys()
data = {}
with open(filename, "r") as f:
reader = csv.DictReader(f)
for i in range(3):
l = reader.next()
# YOUR CODE HERE
for line in reader:
# YOUR CODE HERE
for field in process_fields:
element = line[field]
if field == 'rdf-schema#label' and element.find('(') != -1:
element = element.split('(')[0].strip()
key = line['rdf-schema#label']
value = line['binomialAuthority_label']
if value != "NULL":
data[key] = value
return data
def update_db(data, db):
# YOUR CODE HERE
for element in data:
query = db.arachnid.update({'label': element},
{"$set": {"classification.binomialAuthority" : data[element]}})
def test():
# Please change only the add_field and update_db functions!
# Changes done to this function will not be taken into account
# when doing a Test Run or Submit, they are just for your own reference
# and as an example for running this code locally!
data = add_field(DATAFILE, FIELDS)
from pymongo import MongoClient
client = MongoClient("mongodb://localhost:27017")
db = client.examples
update_db(data, db)
updated = db.arachnid.find_one({'label': 'Opisthoncana'})
assert updated['classification']['binomialAuthority'] == 'Embrik Strand'
pprint.pprint(data)
if __name__ == "__main__":
test() | agpl-3.0 | -41,363,206,728,056,340 | 32.815534 | 108 | 0.63297 | false |
jalavik/plotextractor | plotextractor/version.py | 1 | 1233 | # -*- coding: utf-8 -*-
#
# This file is part of plotextractor.
# Copyright (C) 2015 CERN.
#
# plotextractor is free software; you can redistribute it
# and/or modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# plotextractor is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with plotextractor; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston,
# MA 02111-1307, USA.
#
# In applying this license, CERN does not
# waive the privileges and immunities granted to it by virtue of its status
# as an Intergovernmental Organization or submit itself to any jurisdiction.
"""Version information for plotextractor.
This file is imported by ``plotextractor.__init__``,
and parsed by ``setup.py``.
"""
from __future__ import absolute_import, print_function, unicode_literals
__version__ = "0.1.0.dev20150722"
| gpl-2.0 | -6,563,958,830,303,103,000 | 36.363636 | 76 | 0.751014 | false |
rogerscristo/BotFWD | env/lib/python3.6/site-packages/telegram/payment/labeledprice.py | 1 | 1817 | #!/usr/bin/env python
#
# A library that provides a Python interface to the Telegram Bot API
# Copyright (C) 2015-2017
# Leandro Toledo de Souza <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser Public License for more details.
#
# You should have received a copy of the GNU Lesser Public License
# along with this program. If not, see [http://www.gnu.org/licenses/].
"""This module contains an object that represents a Telegram LabeledPrice."""
from telegram import TelegramObject
class LabeledPrice(TelegramObject):
"""This object represents a portion of the price for goods or services.
Attributes:
label (:obj:`str`): Portion label.
amount (:obj:`int`): Price of the product in the smallest units of the currency.
Args:
label (:obj:`str`): Portion label
amount (:obj:`int`): Price of the product in the smallest units of the currency (integer,
not float/double). For example, for a price of US$ 1.45 pass amount = 145. See the exp
parameter in currencies.json, it shows the number of digits past the decimal point for
each currency (2 for the majority of currencies).
**kwargs (:obj:`dict`): Arbitrary keyword arguments.
"""
def __init__(self, label, amount, **kwargs):
self.label = label
self.amount = amount
| mit | 3,730,019,111,044,197,400 | 40.255814 | 98 | 0.687947 | false |
yunhaowang/IDP-APA | utilities/py_idpapa_assign_sr.py | 1 | 7753 | #!/usr/bin/env python
import sys,re,time,argparse
from multiprocessing import cpu_count,Pool
def main(args):
# print >>sys.stdout, "Start analysis: " + time.strftime("%a,%d %b %Y %H:%M:%S")
output_gpd = args.output
iso_list = get_iso_info(args.isoform)
p = Pool(processes=args.cpu)
csize = 100
results = p.imap(func=assignment,iterable=generate_tx(args.short_reads,iso_list),chunksize=csize)
for res in results:
if not res: continue
output_gpd.write(res+"\n")
output_gpd.close()
# print >>sys.stdout, "Finish analysis: " + time.strftime("%a,%d %b %Y %H:%M:%S")
def generate_tx(input_sr,iso_list):
z = 0
for line in input_sr:
z += 1
yield (line,z,iso_list)
# align first mate without splice alignment
def align_first_mate_s(strand,iso_exon_start,iso_exon_end,sr_exon_start,sr_exon_end):
indic = "mismatch"
if strand == "+":
if int(sr_exon_start.split(",")[0]) >= int(iso_exon_start.split(",")[-2]) and int(sr_exon_end.split(",")[0]) <= int(iso_exon_end.split(",")[-2]):
indic = "match"
else:
indic = "mismatch"
else:
if int(sr_exon_start.split(",")[0]) >= int(iso_exon_start.split(",")[0]) and int(sr_exon_end.split(",")[0]) <= int(iso_exon_end.split(",")[0]):
indic = "match"
else:
indic = "mismatch"
return indic
# align first mate with splice alignment
def align_first_mate_m(strand,iso_exon_number,iso_exon_start,iso_exon_end,sr_exon_number,sr_exon_start,sr_exon_end):
iso_junc_list = []
sr_junc_list = []
indic = "mismatch"
for i in range(0,int(iso_exon_number)-1):
iso_junc_list.append(iso_exon_end.split(",")[i])
iso_junc_list.append(iso_exon_start.split(",")[i+1])
iso_junc_set = "," + ",".join(iso_junc_list) + ","
iso_whole_set = "," + iso_exon_start.split(",")[0] + iso_junc_set + iso_exon_end.split(",")[-2] + ","
for i in range(0,int(sr_exon_number)-1):
sr_junc_list.append(sr_exon_end.split(",")[i])
sr_junc_list.append(sr_exon_start.split(",")[i+1])
sr_junc_set = "," + ",".join(sr_junc_list) + ","
if strand == "+":
pattern = sr_junc_set + "$"
if int(sr_exon_end.split(",")[-2]) <= int(iso_exon_end.split(",")[-2]) and re.search(pattern,iso_junc_set) and int(sr_exon_start.split(",")[0]) >= int(iso_whole_set.split(sr_junc_set)[0].split(",")[-1]):
indic = "match"
else:
indic = "mismatch"
else:
pattern = "^" + sr_junc_set
if int(sr_exon_start.split(",")[0]) >= int(iso_exon_start.split(",")[0]) and re.search(pattern,iso_junc_set) and int(sr_exon_end.split(",")[-2]) <= int(iso_whole_set.split(sr_junc_set)[1].split(",")[0]):
indic = "match"
else:
indic = "mismatch"
return indic
# align second mate without splice alignment
def align_second_mate_s(iso_exon_number,iso_exon_start,iso_exon_end,sr_exon_start,sr_exon_end):
indic = "mismatch"
if int(iso_exon_number) == 1:
if int(sr_exon_start.split(",")[0]) >= int(iso_exon_start.split(",")[0]) and int(sr_exon_end.split(",")[0]) <= int(iso_exon_end.split(",")[0]):
indic = "match"
else:
indic = "mismatch"
else:
for i in range(0,int(iso_exon_number)):
if int(sr_exon_start.split(",")[0]) >= int(iso_exon_start.split(",")[i]) and int(sr_exon_end.split(",")[0]) <= int(iso_exon_end.split(",")[i]):
indic = "match"
break
else:
indic = "mismatch"
return indic
# align second mate with splice alignment
def align_second_mate_m(iso_exon_number,iso_exon_start,iso_exon_end,sr_exon_number,sr_exon_start,sr_exon_end):
iso_junc_list = []
sr_junc_list = []
indic = "mismatch"
for i in range(0,int(iso_exon_number)-1):
iso_junc_list.append(iso_exon_end.split(",")[i])
iso_junc_list.append(iso_exon_start.split(",")[i+1])
iso_junc_set = "," + ",".join(iso_junc_list) + ","
iso_whole_set = "," + iso_exon_start.split(",")[0] + iso_junc_set + iso_exon_end.split(",")[-2] + ","
for i in range(0,int(sr_exon_number)-1):
sr_junc_list.append(sr_exon_end.split(",")[i])
sr_junc_list.append(sr_exon_start.split(",")[i+1])
sr_junc_set = "," + ",".join(sr_junc_list) + ","
if re.search(sr_junc_set,iso_junc_set) and len(iso_whole_set.split(sr_junc_set)[0].split(","))%2 == 0 and int(sr_exon_start.split(",")[0]) >= int(iso_whole_set.split(sr_junc_set)[0].split(",")[-1]) and int(sr_exon_end.split(",")[-2]) <= int(iso_whole_set.split(sr_junc_set)[1].split(",")[0]):
indic = "match"
else:
indic = "mismatch"
return indic
# extract pseudo isoform information
def get_iso_info(iso_gpd):
iso_list = []
for line in iso_gpd:
iso_list.append(line.strip())
return iso_list
iso_gpd.close()
def assignment(inputs):
(line,z,iso_list) = inputs
read_id,chr,strand,start,end,mapq_1,sf_1,exon_number_1,exon_start_1,exon_end_1,mapq_2,sf_2,exon_number_2,exon_start_2,exon_end_2 = line.rstrip("\n").split("\t")
sr_info = line.rstrip("\n")
sr_polya_iso = []
for iso in iso_list:
gene_id,isoform_id,iso_chr,iso_strand,tss,tts,cds_start,cds_end,exon_number,exon_start,exon_end = iso.split("\t")
if iso_chr == chr and iso_strand == strand and int(tss) <= int(start) and int(tts) >= int(end) and int(exon_number) >= int(exon_number_1) and int(exon_number) >= int(exon_number_2):
if int(exon_number_1) == 1 and int(exon_number_2) == 1:
indic_1 = align_first_mate_s(strand,exon_start,exon_end,exon_start_1,exon_end_1)
indic_2 = align_second_mate_s(exon_number,exon_start,exon_end,exon_start_2,exon_end_2)
if indic_1 == "match" and indic_2 == "match":
sr_polya_iso.append(isoform_id)
elif int(exon_number_1) == 1 and int(exon_number_2) > 1:
indic_1 = align_first_mate_s(strand,exon_start,exon_end,exon_start_1,exon_end_1)
indic_2 = align_second_mate_m(exon_number,exon_start,exon_end,exon_number_2,exon_start_2,exon_end_2)
if indic_1 == "match" and indic_2 == "match":
sr_polya_iso.append(isoform_id)
elif int(exon_number_1) > 1 and int(exon_number_2) == 1:
indic_1 = align_first_mate_m(strand,exon_number,exon_start,exon_end,exon_number_1,exon_start_1,exon_end_1)
indic_2 = align_second_mate_s(exon_number,exon_start,exon_end,exon_start_2,exon_end_2)
if indic_1 == "match" and indic_2 == "match":
sr_polya_iso.append(isoform_id)
else:
indic_1 = align_first_mate_m(strand,exon_number,exon_start,exon_end,exon_number_1,exon_start_1,exon_end_1)
indic_2 = align_second_mate_m(exon_number,exon_start,exon_end,exon_number_2,exon_start_2,exon_end_2)
if indic_1 == "match" and indic_2 == "match":
sr_polya_iso.append(isoform_id)
if sr_polya_iso != []:
return line.rstrip("\n") + "\t" + ",".join(sr_polya_iso)
else:
return None
def do_inputs():
output_gpd_format = '''
1. read id
2. chromosome
3. strand
4. start site of alignment of fragment
5. end site of alignment of fragment
6. MAPQ of read1 (mate1)
7. Number of nucleotides that are softly-clipped by aligner (mate1)
8. exon number (mate1)
9. exon start set (mate1)
10. exon end set (mate1)
11. MAPQ of read1 (mate2)
12. Number of nucleotides that are softly-clipped by aligner (mate2)
13. exon number (mate2)
14. exon start set (mate2)
15. exon end set (mate2)
16. isoform set containing this polyA site'''
parser = argparse.ArgumentParser(description="Function: assign the polyA sites identified by short reads to specific isoforms",formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('-r','--short_reads',type=argparse.FileType('r'),required=True,help="Short reads gpd file")
parser.add_argument('-i','--isoform',type=argparse.FileType('r'),required=True,help="Input: isoform gpd file")
parser.add_argument('-o','--output',type=argparse.FileType('w'),required=True,help="Output: short reads with assigned isoforms")
parser.add_argument('-p','--cpu',type=int,default=cpu_count(),help="Number of process")
args = parser.parse_args()
return args
if __name__=="__main__":
args = do_inputs()
main(args)
| apache-2.0 | -6,389,944,135,169,664,000 | 43.815029 | 293 | 0.659229 | false |
CybOXProject/python-cybox | cybox/objects/win_prefetch_object.py | 1 | 2182 | # Copyright (c) 2017, The MITRE Corporation. All rights reserved.
# See LICENSE.txt for complete terms.
from mixbox import entities
from mixbox import fields
import cybox.bindings.win_prefetch_object as win_prefetch_binding
from cybox.objects.device_object import Device
from cybox.objects.win_volume_object import WinVolume
from cybox.common import String, DateTime, Long, ObjectProperties
class AccessedFileList(entities.EntityList):
_binding = win_prefetch_binding
_binding_class = win_prefetch_binding.AccessedFileListType
_namespace = "http://cybox.mitre.org/objects#WinPrefetchObject-2"
accessed_file = fields.TypedField("Accessed_File", String, multiple=True)
class AccessedDirectoryList(entities.EntityList):
_binding = win_prefetch_binding
_binding_class = win_prefetch_binding.AccessedDirectoryListType
_namespace = "http://cybox.mitre.org/objects#WinPrefetchObject-2"
accessed_directory = fields.TypedField("Accessed_Directory", String, multiple=True)
class Volume(entities.Entity):
_binding = win_prefetch_binding
_binding_class = win_prefetch_binding.VolumeType
_namespace = "http://cybox.mitre.org/objects#WinPrefetchObject-2"
volumeitem = fields.TypedField("VolumeItem", WinVolume, multiple=True)
deviceitem = fields.TypedField("DeviceItem", Device, multiple=True)
class WinPrefetch(ObjectProperties):
_binding = win_prefetch_binding
_binding_class = win_prefetch_binding.WindowsPrefetchObjectType
_namespace = "http://cybox.mitre.org/objects#WinPrefetchObject-2"
_XSI_NS = "WinPrefetchObj"
_XSI_TYPE = "WindowsPrefetchObjectType"
application_file_name = fields.TypedField("Application_File_Name", String)
prefetch_hash = fields.TypedField("Prefetch_Hash", String)
times_executed = fields.TypedField("Times_Executed", Long)
first_run = fields.TypedField("First_Run", DateTime)
last_run = fields.TypedField("Last_Run", DateTime)
volume = fields.TypedField("Volume", WinVolume)
accessed_file_list = fields.TypedField("Accessed_File_List", AccessedFileList)
accessed_directory_list = fields.TypedField("Accessed_Directory_List", AccessedDirectoryList)
| bsd-3-clause | -1,243,254,732,700,253,700 | 40.961538 | 97 | 0.762603 | false |
django-erp/django-erp | djangoerp/core/tests/test_views.py | 1 | 25937 | #!/usr/bin/env python
"""This file is part of the django ERP project.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
__author__ = 'Emanuele Bertoldi <[email protected]>'
__copyright__ = 'Copyright (c) 2013-2015, django ERP Team'
__version__ = '0.0.5'
from django.test import TestCase
from django.test.utils import override_settings
from django.shortcuts import resolve_url
from django.contrib.auth import get_user_model
from django.contrib.auth.models import AnonymousUser
from . import FakeRequest
from ..models import User, ObjectPermission
from ..views import _get_user # Is not in the public API.
from ..views import *
class GetterTestCase(TestCase):
def test_get_user_from_kwargs(self):
"""Tests retrieving a user instance from view's kwargs dict.
"""
u1, n = User.objects.get_or_create(pk=1, username="u1")
kwargs = {"pk": u1.pk}
try:
u = _get_user(None, **kwargs)
self.assertEqual(u, u1)
except User.DoesNotExist:
self.assertFalse(True)
@override_settings(ROOT_URLCONF='djangoerp.core.tests.urls', LOGIN_REQUIRED_URLS_EXCEPTIONS=(r'/(.*)$',))
class SetCancelUrlMixinTestCase(TestCase):
def test_back_in_context_data(self):
"""Tests the presence of a "back" variable in context data.
"""
response = self.client.get('/default_cancel_url/')
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context_data.get("back"), "/")
def test_preset_cancel_url(self):
"""Tests setting of "cancel_url" variable to preset a default back url.
"""
response = self.client.get('/preset_cancel_url/')
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context_data.get("back"), "/go_to_cancel_url/")
def test_cancel_url_from_request(self):
"""Tests using a "cancel_url" retrieved from "request.GET".
"""
response = self.client.get('/default_cancel_url/?back=/custom_cancel_url/')
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context_data.get("back"), "/custom_cancel_url/")
class BaseModelListViewTestCase(TestCase):
def test_get_default_params(self):
"""Tests correct getting of view's default parameters.
"""
v = BaseModelListView()
self.assertEqual(v.field_list, None)
self.assertEqual(v.get_field_list(), v.field_list)
self.assertEqual(v.list_template_name, "elements/model_list.html")
self.assertEqual(v.get_list_template_name(), v.list_template_name)
self.assertEqual(v.list_uid, "")
self.assertEqual(v.get_list_uid(), v.list_uid)
self.assertEqual(v.get_list_prefix(), "")
def test_get_list_prefix(self):
"""Tests correct handling of list prefix.
"""
v = BaseModelListView(["uid"], "my_template.html", "my_list_uid")
self.assertEqual(v.list_uid, "my_list_uid")
self.assertEqual(v.get_list_uid(), v.list_uid)
self.assertEqual(v.get_list_prefix(), "my_list_uid_")
def test_paginate_queryset(self):
"""Tests correct handling of pagination based on list prefix.
"""
v = BaseModelListView()
v.kwargs = {}
v.request = FakeRequest()
v.paginate_queryset([], 1)
self.assertEqual(v.page_kwarg, "page")
v.list_uid = "my_list"
v.paginate_queryset([], 1)
self.assertEqual(v.page_kwarg, "my_list_page")
def test_get_context_data(self):
"""Tests adding list-related variables to context dict.
"""
v = BaseModelListView()
v.kwargs = {}
v.request = FakeRequest()
v.object_list = []
context = v.get_context_data()
self.assertTrue("field_list" in context)
self.assertEqual(context['field_list'], None)
self.assertTrue("list_template_name" in context)
self.assertEqual(context['list_template_name'], "elements/model_list.html")
self.assertTrue("list_uid" in context)
self.assertEqual(context['list_uid'], "")
class ModelListDeleteMixinTestCase(TestCase):
def setUp(self):
class FakeBase(object):
page_kwarg = "page"
def get(self, request, *args, **kwargs):
return "get"
def post(self, request, *args, **kwargs):
return "post"
def get_list_prefix(self):
return ""
def get_queryset(self):
return get_user_model().objects.all()
def get_paginate_by(self, qs):
return int(max(1, qs.count()))
class TestModelListDeleteMixin(ModelListDeleteMixin, FakeBase):
pass
self.request = FakeRequest()
self.m = TestModelListDeleteMixin()
def test_select_all_uids(self):
"""Tests selecting all UIDs in the model list.
"""
self.request.POST = {"select_all": True}
self.assertEqual(self.m.get_selected_uids(self.request), "*")
def test_get_selected_uids(self):
"""Tests selecting any UIDs in the model list.
"""
self.request.POST = {"select_1": True, "select_2": False, "select_3": True, "select_4": ""}
self.assertEqual(
sorted(self.m.get_selected_uids(self.request)),
["1", "3"]
)
def test_get_delete_template_name(self):
"""Tests retrieving delete template name.
"""
self.assertEqual(self.m.delete_template_name, "base_model_list_confirm_delete.html")
self.assertEqual(self.m.get_delete_template_name(), self.m.delete_template_name)
def test_handle_as_get_when_has_no_selected_items(self):
"""Tests calling "get" (instead of "post") when no items are selected.
"""
self.request.POST = {"select_all": True, "delete_selected": True}
response = self.m.post(self.request)
self.assertEqual(response, "get") # NOTE: convenient result just for test.
def test_post_to_confirm_deletion(self):
"""Tests performing confirm deletion after a proper POST request.
"""
from django.template.response import TemplateResponse
user_model = get_user_model()
u1 = user_model.objects.create(username="u1")
self.request.POST = {"select_all": True, "delete_selected": True}
response = self.m.post(self.request)
self.assertTrue(isinstance(response, TemplateResponse))
self.assertEqual(response.template_name, self.m.get_delete_template_name())
def test_post_to_deletion(self):
"""Tests performing real deletion after a proper POST request.
"""
user_model = get_user_model()
u1 = user_model.objects.create(username="u1")
self.request.POST = {"select_all": True, "confirm_delete_selected": True}
response = self.m.post(self.request)
self.assertEqual(response, "get") # NOTE: convenient result just for test.
self.assertEqual(user_model.objects.count(), 0)
def test_post_to_parent_post(self):
"""Tests redirecting to parent's POST when there's no need to delete.
"""
self.request.POST = {}
response = self.m.post(self.request)
self.assertEqual(response, "post") # NOTE: convenient result just for test.
def test_delete_all(self):
"""Tests deleting all items.
"""
from django.template.response import TemplateResponse
user_model = get_user_model()
u1 = user_model.objects.create(username="u1")
u2 = user_model.objects.create(username="u2")
u3 = user_model.objects.create(username="u3")
self.request.POST = {"delete_selected": True, "select_all": True}
qs = self.m.get_queryset()
# 1) Show confirm deletion template.
self.assertQuerysetEqual(
qs,
list(map(repr, user_model.objects.all())),
ordered=False
)
self.assertEqual(qs.count(), 3)
response = self.m.delete_selected(self.request)
self.assertTrue(isinstance(response, TemplateResponse))
self.assertEqual(response.template_name, self.m.get_delete_template_name())
self.assertQuerysetEqual(
response.context_data['object_list'],
list(map(repr, qs)),
ordered=False
)
# 2) Delete all items.
del self.request.POST['delete_selected']
self.request.POST['confirm_delete_selected'] = True
response = self.m.delete_selected(self.request)
self.assertEqual(response, "get") # NOTE: convenient result just for test.
self.assertEqual(user_model.objects.count(), 0)
def test_delete_selected(self):
"""Tests deleting selected items.
"""
from django.template.response import TemplateResponse
user_model = get_user_model()
u1 = user_model.objects.create(username="u1")
u2 = user_model.objects.create(username="u2")
u3 = user_model.objects.create(username="u3")
self.request.POST = {"delete_selected": True, "select_1": True, "select_2": False, "select_3": True}
qs = self.m.get_queryset()
# 1) Show confirm deletion template.
self.assertQuerysetEqual(
qs,
list(map(repr, user_model.objects.all())),
ordered=False
)
self.assertEqual(qs.count(), 3)
response = self.m.delete_selected(self.request)
self.assertTrue(isinstance(response, TemplateResponse))
self.assertEqual(response.template_name, self.m.get_delete_template_name())
self.assertQuerysetEqual(
response.context_data['object_list'],
list(map(repr, user_model.objects.filter(pk__in=[1, 3]))),
ordered=False
)
# 2) Delete all items.
del self.request.POST['delete_selected']
self.request.POST['confirm_delete_selected'] = True
response = self.m.delete_selected(self.request)
self.assertEqual(response, "get") # NOTE: convenient result just for test.
self.assertEqual(user_model.objects.count(), 1)
self.assertQuerysetEqual(
user_model.objects.values("pk"),
[repr({'pk': 2})],
ordered=False
)
def test_pagination_on_deletion(self):
"""Tests pagination handling after item deletion.
"""
from django.http import HttpResponseRedirect
def _new_get_paginate_by(qs):
return 2
old_get_paginate_by = self.m.get_paginate_by
self.m.get_paginate_by = _new_get_paginate_by
user_model = get_user_model()
u1 = user_model.objects.create(username="u1")
u2 = user_model.objects.create(username="u2")
u3 = user_model.objects.create(username="u3")
u4 = user_model.objects.create(username="u4")
u5 = user_model.objects.create(username="u5")
self.request.GET = {"page": 3}
self.request.POST = {"confirm_delete_selected": True, "select_5": True}
response = self.m.delete_selected(self.request)
self.assertTrue(isinstance(response, HttpResponseRedirect))
self.assertEqual(response.url, "/home/test/?page=2")
self.assertEqual(user_model.objects.count(), 4)
# Restores previous behavior.
self.m.get_paginate_by = old_get_paginate_by
class ModelListFilteringMixinTestCase(TestCase):
def setUp(self):
user_model = get_user_model()
class FakeBase(object):
def get_queryset(self):
return user_model.objects.all()
def get_list_prefix(self):
return ""
def get_context_data(self):
return {}
class TestModelListFilteringMixin(ModelListFilteringMixin, FakeBase):
pass
self.m = TestModelListFilteringMixin()
self.m.request = FakeRequest()
self.u1 = user_model.objects.create(username="u1", email="[email protected]")
self.u2 = user_model.objects.create(username="u2", email="[email protected]")
self.u3 = user_model.objects.create(username="u3", email="[email protected]")
self.u4 = user_model.objects.create(username="u4", email="[email protected]")
def test_get_queryset(self):
"""Tests returning filtered queryset.
"""
user_model = get_user_model()
self.m.request.GET = {}
qs = self.m.get_queryset()
self.assertEqual(qs.count(), 4)
self.assertQuerysetEqual(
qs,
list(map(repr, user_model.objects.all())),
ordered=False
)
self.m.request.GET = {"filter_by_username__lt": "u4"}
qs = self.m.get_queryset()
self.assertEqual(qs.count(), 3)
self.assertQuerysetEqual(
qs,
list(map(repr, user_model.objects.filter(username__lt="u4"))),
ordered=False
)
self.m.request.GET = {"filter_by_username__lt": "u4", "filter_by_email__gte": "[email protected]"}
qs = self.m.get_queryset()
self.assertEqual(qs.count(), 2)
self.assertQuerysetEqual(
qs,
list(map(repr, user_model.objects.filter(username__lt="u4", email__gte="[email protected]"))),
ordered=False
)
self.assertQuerysetEqual(
qs,
[repr(self.u2), repr(self.u3)],
ordered=False
)
def test_get_context_data(self):
"""Tests returning correct context variables.
"""
user_model = get_user_model()
self.m.request.GET = {"filter_by_username__lt": "u4", "filter_by_email__gte": "[email protected]"}
context = self.m.get_context_data()
self.assertTrue("unfiltered_object_list" in context)
self.assertQuerysetEqual(
context['unfiltered_object_list'],
list(map(repr, user_model.objects.all())),
ordered=False
)
self.assertTrue("list_filter_by" in context)
self.assertEqual(
context['list_filter_by'],
{
"username": ("lt", "u4"),
"email": ("gte", "[email protected]"),
}
)
def test_post(self):
"""Tests correct handling of POST requests.
"""
self.m.request.GET = {}
self.m.request.POST = {}
response = self.m.post(self.m.request)
self.assertTrue(isinstance(response, HttpResponseRedirect))
self.assertEqual(response.url, "/home/test/")
self.m.request.POST = {
"filter_by_username": "u4", "filter_expr_username": "lt",
"filter_by_email": "[email protected]", "filter_expr_email": "gte"
}
response = self.m.post(self.m.request)
self.assertTrue(isinstance(response, HttpResponseRedirect))
self.assertEqual(response.url, "/home/test/[email protected];filter_by_username__lt=u4")
def test_reset_filters(self):
"""Tests resetting (clearing) filters.
"""
self.m.request.GET = {}
self.m.request.POST = {
"filter_by_username": "u4", "filter_expr_username": "lt",
"filter_by_email": "[email protected]", "filter_expr_email": "gte",
"reset_filters": True
}
response = self.m.post(self.m.request)
self.assertTrue(isinstance(response, HttpResponseRedirect))
self.assertEqual(response.url, "/home/test/")
def test_get_filter_query_from_post(self):
"""Tests getting correct filter query from POST request.
"""
self.m.request.GET = {}
self.m.request.POST = {}
filter_query = self.m.get_filter_query_from_post()
self.assertEqual(filter_query, {})
self.m.request.POST = {
"filter_by_username": "u4", "filter_expr_username": "lt",
"filter_by_email": "[email protected]", "filter_expr_email": "gte",
"filter_by_is_staff": True,
}
filter_query = self.m.get_filter_query_from_post()
self.assertEqual(filter_query, {"email__gte": "[email protected]", "username__lt": "u4", "is_staff": True})
def test_get_filter_query_from_get(self):
"""Tests getting correct filter query from GET request.
"""
self.m.request.GET = {}
self.m.request.POST = {}
filter_query = self.m.get_filter_query_from_get()
self.assertEqual(filter_query, {})
self.m.request.GET = {
"filter_by_username__lt": "u4",
"filter_by_email__gte": "[email protected]",
"filter_by_is_staff": True,
}
filter_query = self.m.get_filter_query_from_get()
self.assertEqual(filter_query, {"email__gte": "[email protected]", "username__lt": "u4", "is_staff": True})
class ModelListOrderingMixinTestCase(TestCase):
def setUp(self):
user_model = get_user_model()
class FakeBase():
def get_queryset(self):
return user_model.objects.all()
def get_list_prefix(self):
return ""
def get_context_data(self):
return {}
class TestModelListOrderingMixin(ModelListOrderingMixin, FakeBase):
pass
self.m = TestModelListOrderingMixin()
self.m.request = FakeRequest()
self.u1 = user_model.objects.create(username="Anna", email="[email protected]")
self.u2 = user_model.objects.create(username="Berta", email="[email protected]")
self.u3 = user_model.objects.create(username="Wendy", email="[email protected]")
self.u4 = user_model.objects.create(username="Grace", email="[email protected]")
def test_get_queryset(self):
"""Tests returning ordered queryset.
"""
user_model = get_user_model()
self.m.request.GET = {}
qs = self.m.get_queryset()
self.assertEqual(
list(map(repr, qs)),
list(map(repr, user_model.objects.all())),
)
self.m.request.GET = {"order_by": "username"}
qs = self.m.get_queryset()
self.assertEqual(
list(map(repr, qs)),
list(map(repr, user_model.objects.order_by("username"))),
)
self.m.request.GET = {"order_by": "-username"}
qs = self.m.get_queryset()
self.assertEqual(
list(map(repr, qs)),
list(map(repr, user_model.objects.order_by("-username"))),
)
self.m.request.GET = {"order_by": "email"}
qs = self.m.get_queryset()
self.assertEqual(
list(map(repr, qs)),
list(map(repr, user_model.objects.order_by("email"))),
)
def test_get_context_data(self):
"""Tests returning correct context variables.
"""
user_model = get_user_model()
self.m.request.GET = {}
context = self.m.get_context_data()
self.assertTrue("list_order_by" in context)
self.assertEqual(context['list_order_by'], None)
self.m.request.GET = {"order_by": "username"}
context = self.m.get_context_data()
self.assertTrue("list_order_by" in context)
self.assertEqual(context['list_order_by'], "username")
self.m.request.GET = {"order_by": "-username"}
context = self.m.get_context_data()
self.assertTrue("list_order_by" in context)
self.assertEqual(context['list_order_by'], "-username")
self.m.request.GET = {"order_by": "email"}
context = self.m.get_context_data()
self.assertTrue("list_order_by" in context)
self.assertEqual(context['list_order_by'], "email")
class DetailUserViewTestCase(TestCase):
def setUp(self):
self.u1 = get_user_model().objects.create_user("u1", "[email protected]", "password")
self.u2 = get_user_model().objects.create_user("u2", "[email protected]", "password")
def test_deny_anonymous_user(self):
"""Tests anonymous users can not access the view.
"""
self.client.logout()
response = self.client.get(resolve_url("user_detail", pk=self.u2.pk))
self.assertEqual(response.status_code, 302)
def test_logged_user_without_perms(self):
"""Tests logged users (but without correct perms) can not access the view.
"""
self.client.login(username='u1', password='password')
response = self.client.get(resolve_url("user_detail", pk=self.u2.pk))
self.assertEqual(response.status_code, 302)
def test_logged_user_with_perms(self):
"""Tests logged users with correct perms can access the view.
"""
p, n = ObjectPermission.objects.get_or_create_by_natural_key("view_user", "core", "user", self.u2.pk)
self.u2.objectpermissions.add(p)
self.client.login(username='u2', password='password')
response = self.client.get(resolve_url("user_detail", pk=self.u2.pk))
self.assertEqual(response.status_code, 200)
class UpdateUserViewTestCase(TestCase):
def setUp(self):
self.u1 = get_user_model().objects.create_user("u1", "[email protected]", "password")
self.u2 = get_user_model().objects.create_user("u2", "[email protected]", "password")
def test_deny_anonymous_user(self):
"""Tests anonymous users can not access the view.
"""
self.client.logout()
response = self.client.get(resolve_url("user_edit", pk=self.u2.pk))
self.assertEqual(response.status_code, 302)
def test_logged_user_without_perms(self):
"""Tests logged users (but without correct perms) can not access the view.
"""
self.client.login(username='u1', password='password')
response = self.client.get(resolve_url("user_edit", pk=self.u2.pk))
self.assertEqual(response.status_code, 302)
def test_logged_user_with_perms(self):
"""Tests logged users with correct perms can access the view.
"""
p, n = ObjectPermission.objects.get_or_create_by_natural_key("change_user", "core", "user", self.u2.pk)
self.u2.objectpermissions.add(p)
self.client.login(username='u2', password='password')
response = self.client.get(resolve_url("user_edit", pk=self.u2.pk))
self.assertEqual(response.status_code, 200)
class DeleteUserViewTestCase(TestCase):
def setUp(self):
self.u1 = get_user_model().objects.create_user("u1", "[email protected]", "password")
self.u2 = get_user_model().objects.create_user("u2", "[email protected]", "password")
def test_deny_anonymous_user(self):
"""Tests anonymous users can not access the view.
"""
self.client.logout()
response = self.client.get(resolve_url("user_delete", pk=self.u2.pk))
self.assertEqual(response.status_code, 302)
def test_logged_user_without_perms(self):
"""Tests logged users (but without correct perms) can not access the view.
"""
self.client.login(username='u1', password='password')
response = self.client.get(resolve_url("user_delete", pk=self.u2.pk))
self.assertEqual(response.status_code, 302)
def test_logged_user_with_perms(self):
"""Tests logged users with correct perms can access the view.
"""
p, n = ObjectPermission.objects.get_or_create_by_natural_key("delete_user", "core", "user", self.u2.pk)
self.u2.objectpermissions.add(p)
self.client.login(username='u2', password='password')
response = self.client.get(resolve_url("user_delete", pk=self.u2.pk))
self.assertEqual(response.status_code, 200)
def test_success_url_on_current_user_deletion(self):
"""Tests returned "success_url" when the current user is deleted.
"""
view = DeleteUserView()
view.kwargs = {'pk': self.u2.pk}
view.request = FakeRequest()
view.request.user = self.u2
view.get_object()
self.assertEqual(view.success_url, resolve_url("user_logout"))
def test_success_url_on_different_user_deletion(self):
"""Tests returned "success_url" when a different user is deleted.
"""
view = DeleteUserView()
view.kwargs = {'pk': self.u1.pk}
view.request = FakeRequest()
view.request.user = self.u2
view.get_object()
self.assertEqual(view.success_url, "/")
| mit | -3,260,086,464,090,445,300 | 36.319424 | 111 | 0.576281 | false |
hoelsner/product-database | app/config/tests/test_config_views.py | 1 | 21046 | """
Test suite for the config.views module
"""
import pytest
from html import escape
from django.contrib.auth.models import AnonymousUser, User
from django.contrib.messages.storage.fallback import FallbackStorage
from django.core.cache import cache
from django.core.exceptions import PermissionDenied
from django.urls import reverse
from django.http import Http404
from django.test import RequestFactory
from django_project import celery
from app.config import views
from app.config import models
from app.config import utils
from app.config.settings import AppSettings
pytestmark = pytest.mark.django_db
def patch_contrib_messages(request):
setattr(request, 'session', 'session')
messages = FallbackStorage(request)
setattr(request, '_messages', messages)
return messages
@pytest.fixture
def mock_cisco_eox_api_access_available(monkeypatch):
app = AppSettings()
app.set_cisco_api_enabled(True)
app.set_cisco_api_client_id("client_id")
app.set_cisco_api_client_id("client_secret")
app.set_periodic_sync_enabled(True)
app.set_cisco_eox_api_queries("")
app.set_product_blacklist_regex("")
app.set_auto_create_new_products(True)
monkeypatch.setattr(utils, "check_cisco_eox_api_access",
lambda client_id, client_secret, drop_credentials=False: True)
@pytest.fixture
def mock_cisco_eox_api_access_broken(monkeypatch):
app = AppSettings()
app.set_cisco_api_enabled(True)
app.set_cisco_api_client_id("client_id")
app.set_cisco_api_client_id("client_secret")
app.set_periodic_sync_enabled(True)
app.set_cisco_eox_api_queries("")
app.set_product_blacklist_regex("")
app.set_auto_create_new_products(True)
monkeypatch.setattr(utils, "check_cisco_eox_api_access",
lambda client_id, client_secret, drop_credentials=False: False)
@pytest.fixture
def mock_cisco_eox_api_access_exception(monkeypatch):
def raise_exception():
raise Exception("totally broken")
app = AppSettings()
app.set_cisco_api_enabled(True)
app.set_cisco_api_client_id("client_id")
app.set_cisco_api_client_id("client_secret")
app.set_periodic_sync_enabled(True)
app.set_cisco_eox_api_queries("")
app.set_product_blacklist_regex("")
app.set_auto_create_new_products(True)
monkeypatch.setattr(utils, "check_cisco_eox_api_access",
lambda client_id, client_secret, drop_credentials: raise_exception())
@pytest.fixture
def mock_cisco_eox_api_access_disabled():
app = AppSettings()
app.set_cisco_api_enabled(False)
@pytest.mark.usefixtures("import_default_vendors")
class TestAddNotificationView:
URL_NAME = "productdb_config:notification-add"
def test_anonymous_default(self):
url = reverse(self.URL_NAME)
request = RequestFactory().get(url)
request.user = AnonymousUser()
response = views.add_notification(request)
assert response.status_code == 302
assert response.url.startswith("/productdb/login")
def test_authenticated_user(self):
# require super user permissions
user = User.objects.create(username="username", is_superuser=False)
url = reverse(self.URL_NAME)
request = RequestFactory().get(url)
request.user = user
with pytest.raises(PermissionDenied):
views.add_notification(request)
def test_superuser_access(self):
# require super user permissions
user = User.objects.create(username="username", is_superuser=True)
url = reverse(self.URL_NAME)
request = RequestFactory().get(url)
request.user = user
response = views.add_notification(request)
assert response.status_code == 200
def test_post(self):
# require super user permissions
user = User.objects.create(username="username", is_superuser=True)
url = reverse(self.URL_NAME)
data = {
"title": "MyTitle",
"type": "ERR",
"summary_message": "This is a summary",
"detailed_message": "This is the detail message"
}
request = RequestFactory().post(url, data=data)
request.user = user
response = views.add_notification(request)
assert response.status_code == 302
assert models.NotificationMessage.objects.count() == 1
n = models.NotificationMessage.objects.filter(title="MyTitle").first()
assert n.type == models.NotificationMessage.MESSAGE_ERROR
# test with missing input
data = {
"title": "MyTitle",
"type": "ERR",
"detailed_message": "This is the detail message"
}
request = RequestFactory().post(url, data=data)
request.user = user
response = views.add_notification(request)
assert response.status_code == 200
@pytest.mark.usefixtures("import_default_vendors")
class TestStatusView:
URL_NAME = "productdb_config:status"
@pytest.mark.usefixtures("mock_cisco_eox_api_access_available")
def test_anonymous_default(self):
url = reverse(self.URL_NAME)
request = RequestFactory().get(url)
request.user = AnonymousUser()
response = views.status(request)
assert response.status_code == 302
assert response.url.startswith("/productdb/login")
@pytest.mark.usefixtures("mock_cisco_eox_api_access_available")
def test_authenticated_user(self):
# require super user permissions
user = User.objects.create(username="username", is_superuser=False)
url = reverse(self.URL_NAME)
request = RequestFactory().get(url)
request.user = user
with pytest.raises(PermissionDenied):
views.status(request)
@pytest.mark.usefixtures("mock_cisco_eox_api_access_available")
@pytest.mark.usefixtures("mock_worker_not_available_state")
def test_superuser_access(self):
# require super user permissions
user = User.objects.create(username="username", is_superuser=True)
url = reverse(self.URL_NAME)
request = RequestFactory().get(url)
request.user = user
response = views.status(request)
assert response.status_code == 200
expected_content = [
"No backend worker found, asynchronous and scheduled tasks are not executed.",
"successful connected to the Cisco EoX API"
]
page_content = response.content.decode()
for line in expected_content:
assert line in page_content, page_content
assert cache.get("CISCO_EOX_API_TEST", None) is True
# cleanup
cache.delete("CISCO_EOX_API_TEST")
@pytest.mark.usefixtures("mock_cisco_eox_api_access_available")
def test_with_active_workers(self, monkeypatch):
monkeypatch.setattr(celery, "is_worker_active", lambda: True)
cache.delete("CISCO_EOX_API_TEST") # ensure that cache is not set
# require super user permissions
user = User.objects.create(username="username", is_superuser=True)
url = reverse(self.URL_NAME)
request = RequestFactory().get(url)
request.user = user
response = views.status(request)
assert response.status_code == 200
assert cache.get("CISCO_EOX_API_TEST", None) is True
expected_content = [
"Backend worker found.",
"successful connected to the Cisco EoX API"
]
for line in expected_content:
assert line in response.content.decode()
# cleanup
cache.delete("CISCO_EOX_API_TEST")
@pytest.mark.usefixtures("mock_cisco_eox_api_access_available")
def test_with_inactive_workers(self, monkeypatch):
monkeypatch.setattr(celery, "is_worker_active", lambda: False)
cache.delete("CISCO_EOX_API_TEST") # ensure that cache is not set
# require super user permissions
user = User.objects.create(username="username", is_superuser=True)
url = reverse(self.URL_NAME)
request = RequestFactory().get(url)
request.user = user
response = views.status(request)
assert response.status_code == 200
assert cache.get("CISCO_EOX_API_TEST", None) is True
expected_content = [
"No backend worker found, asynchronous and scheduled tasks are not executed.",
"successful connected to the Cisco EoX API"
]
for line in expected_content:
assert line in response.content.decode()
# cleanup
cache.delete("CISCO_EOX_API_TEST")
@pytest.mark.usefixtures("mock_cisco_eox_api_access_broken")
def test_access_with_broken_api(self):
# require super user permissions
user = User.objects.create(username="username", is_superuser=True)
url = reverse(self.URL_NAME)
request = RequestFactory().get(url)
request.user = user
response = views.status(request)
assert response.status_code == 200
assert cache.get("CISCO_EOX_API_TEST", None) is False
# cleanup
cache.delete("CISCO_EOX_API_TEST")
@pytest.mark.usefixtures("mock_cisco_eox_api_access_exception")
def test_access_with_broken_api_by_exception(self):
# require super user permissions
user = User.objects.create(username="username", is_superuser=True)
url = reverse(self.URL_NAME)
request = RequestFactory().get(url)
request.user = user
response = views.status(request)
assert response.status_code == 200
assert cache.get("CISCO_EOX_API_TEST", None) is None
# cleanup
cache.delete("CISCO_EOX_API_TEST")
@pytest.mark.usefixtures("import_default_vendors")
class TestChangeConfiguration:
URL_NAME = "productdb_config:change_settings"
@pytest.mark.usefixtures("mock_cisco_eox_api_access_available")
def test_anonymous_default(self):
url = reverse(self.URL_NAME)
request = RequestFactory().get(url)
request.user = AnonymousUser()
response = views.change_configuration(request)
assert response.status_code == 302
assert response.url.startswith("/productdb/login")
@pytest.mark.usefixtures("mock_cisco_eox_api_access_available")
def test_authenticated_user(self):
# require super user permissions
user = User.objects.create(username="username", is_superuser=False)
url = reverse(self.URL_NAME)
request = RequestFactory().get(url)
request.user = user
with pytest.raises(PermissionDenied):
views.change_configuration(request)
@pytest.mark.usefixtures("mock_cisco_eox_api_access_available")
@pytest.mark.usefixtures("import_default_text_blocks")
def test_superuser_access(self):
# require super user permissions
user = User.objects.create(username="username", is_superuser=True)
url = reverse(self.URL_NAME)
request = RequestFactory().get(url)
request.user = user
patch_contrib_messages(request)
response = views.change_configuration(request)
assert response.status_code == 200
for content in models.TextBlock.objects.all().values_list("html_content", flat=True):
assert escape(content) in response.content.decode()
def test_global_options_are_visible(self):
app_config = AppSettings()
test_internal_id = "My custom Internal ID"
app_config.set_internal_product_id_label(test_internal_id)
# require super user permissions
user = User.objects.create(username="username", is_superuser=True)
url = reverse(self.URL_NAME)
request = RequestFactory().get(url)
request.user = user
patch_contrib_messages(request)
response = views.change_configuration(request)
assert response.status_code == 200
assert test_internal_id in response.content.decode()
@pytest.mark.usefixtures("mock_cisco_eox_api_access_available")
@pytest.mark.usefixtures("import_default_text_blocks")
def test_post_with_active_api(self):
# require super user permissions
user = User.objects.create(username="username", is_superuser=True)
url = reverse(self.URL_NAME)
data = {}
request = RequestFactory().post(url, data=data)
request.user = user
patch_contrib_messages(request)
response = views.change_configuration(request)
assert response.status_code == 302
assert response.url == "/productdb/config/change/"
# test with invalid post value
data = {
"cisco_api_enabled": "on",
"cisco_api_client_id": "client_id",
"eox_api_blacklist": "("
}
request = RequestFactory().post(url, data=data)
request.user = user
msgs = patch_contrib_messages(request)
response = views.change_configuration(request)
assert response.status_code == 200
assert msgs.added_new
data = {
"cisco_api_client_id": "my changed client ID",
"cisco_api_client_secret": "my changed client secret",
}
request = RequestFactory().post(url, data=data)
request.user = user
patch_contrib_messages(request)
response = views.change_configuration(request)
assert response.status_code == 302
assert response.url == "/productdb/config/change/"
@pytest.mark.usefixtures("mock_cisco_eox_api_access_disabled")
@pytest.mark.usefixtures("import_default_text_blocks")
def test_post_with_inactive_api(self):
# require super user permissions
user = User.objects.create(username="username", is_superuser=True)
url = reverse(self.URL_NAME)
data = {
"cisco_api_enabled": "on",
}
request = RequestFactory().post(url, data=data)
request.user = user
msgs = patch_contrib_messages(request)
response = views.change_configuration(request)
assert response.status_code == 302
assert response.url == "/productdb/config/change/"
assert msgs.added_new
data = {
"cisco_api_enabled": "on",
"cisco_api_client_id": "client_id"
}
request = RequestFactory().post(url, data=data)
request.user = user
msgs = patch_contrib_messages(request)
response = views.change_configuration(request)
assert response.status_code == 302
assert response.url == "/productdb/config/change/"
assert msgs.added_new
@pytest.mark.usefixtures("mock_cisco_eox_api_access_disabled")
@pytest.mark.usefixtures("import_default_text_blocks")
def test_post_with_broken_api(self):
# require super user permissions
user = User.objects.create(username="username", is_superuser=True)
url = reverse(self.URL_NAME)
data = {
"cisco_api_enabled": "on",
"cisco_api_client_id": "client_id"
}
request = RequestFactory().post(url, data=data)
request.user = user
msgs = patch_contrib_messages(request)
response = views.change_configuration(request)
assert response.status_code == 302
assert response.url == "/productdb/config/change/"
assert msgs.added_new
@pytest.mark.usefixtures("import_default_vendors")
class TestServerMessagesList:
URL_NAME = "productdb_config:notification-list"
def test_anonymous_default(self):
url = reverse(self.URL_NAME)
request = RequestFactory().get(url)
request.user = AnonymousUser()
response = views.server_messages_list(request)
assert response.status_code == 200, "Should be callable"
@pytest.mark.usefixtures("enable_login_only_mode")
def test_anonymous_login_only_mode(self):
url = reverse(self.URL_NAME)
request = RequestFactory().get(url)
request.user = AnonymousUser()
response = views.server_messages_list(request)
assert response.status_code == 302, "Should redirect to login page"
assert response.url == reverse("login") + "?next=" + url, \
"Should contain a next parameter for redirect"
def test_authenticated_user(self):
models.NotificationMessage.objects.create(title="A1", summary_message="B", detailed_message="C")
models.NotificationMessage.objects.create(title="A2", summary_message="B", detailed_message="C")
models.NotificationMessage.objects.create(title="A3", summary_message="B", detailed_message="C")
models.NotificationMessage.objects.create(title="A4", summary_message="B", detailed_message="C")
models.NotificationMessage.objects.create(title="A5", summary_message="B", detailed_message="C")
url = reverse(self.URL_NAME)
request = RequestFactory().get(url)
request.user = User.objects.create(username="username", is_superuser=False, is_staff=False)
response = views.server_messages_list(request)
assert response.status_code == 200, "Should be callable"
@pytest.mark.usefixtures("import_default_vendors")
class TestServerMessagesDetail:
URL_NAME = "productdb_config:notification-detail"
def test_anonymous_default(self):
nm = models.NotificationMessage.objects.create(title="A1", summary_message="B", detailed_message="C")
url = reverse(self.URL_NAME, kwargs={"message_id": nm.id})
request = RequestFactory().get(url)
request.user = AnonymousUser()
response = views.server_message_detail(request, nm.id)
assert response.status_code == 200, "Should be callable"
@pytest.mark.usefixtures("enable_login_only_mode")
def test_anonymous_login_only_mode(self):
nm = models.NotificationMessage.objects.create(title="A1", summary_message="B", detailed_message="C")
url = reverse(self.URL_NAME, kwargs={"message_id": nm.id})
request = RequestFactory().get(url)
request.user = AnonymousUser()
response = views.server_message_detail(request, nm.id)
assert response.status_code == 302, "Should redirect to login page"
assert response.url == reverse("login") + "?next=" + url, \
"Should contain a next parameter for redirect"
def test_authenticated_user(self):
nm = models.NotificationMessage.objects.create(title="A1", summary_message="B", detailed_message="C")
url = reverse(self.URL_NAME, kwargs={"message_id": nm.id})
request = RequestFactory().get(url)
request.user = User.objects.create(username="username", is_superuser=False, is_staff=False)
response = views.server_message_detail(request, nm.id)
assert response.status_code == 200, "Should be callable"
def test_404(self):
url = reverse(self.URL_NAME, kwargs={"message_id": 9999})
request = RequestFactory().get(url)
request.user = User.objects.create(username="username", is_superuser=False, is_staff=False)
with pytest.raises(Http404):
views.server_message_detail(request, 9999)
@pytest.mark.usefixtures("import_default_vendors")
class TestFlushCache:
URL_NAME = "productdb_config:flush_cache"
def test_anonymous_default(self):
url = reverse(self.URL_NAME)
request = RequestFactory().get(url)
request.user = AnonymousUser()
response = views.flush_cache(request)
assert response.status_code == 302, "Should redirect to login page"
assert response.url == reverse("login") + "?next=" + url, \
"Should contain a next parameter for redirect"
@pytest.mark.usefixtures("enable_login_only_mode")
def test_anonymous_login_only_mode(self):
url = reverse(self.URL_NAME)
request = RequestFactory().get(url)
request.user = AnonymousUser()
response = views.flush_cache(request)
assert response.status_code == 302, "Should redirect to login page"
assert response.url == reverse("login") + "?next=" + url, \
"Should contain a next parameter for redirect"
def test_authenticated_user(self):
url = reverse(self.URL_NAME)
request = RequestFactory().get(url)
request.user = User.objects.create(username="username", is_superuser=False, is_staff=False)
with pytest.raises(PermissionDenied):
views.flush_cache(request)
@pytest.mark.usefixtures("import_default_users")
@pytest.mark.usefixtures("import_default_vendors")
def test_superuser(self):
url = reverse(self.URL_NAME)
request = RequestFactory().get(url)
request.user = User.objects.get(username="pdb_admin")
msgs = patch_contrib_messages(request)
response = views.flush_cache(request)
assert response.status_code == 302, "Should redirect to status page"
assert msgs.added_new
assert response.url == reverse("productdb_config:status")
| mit | 6,071,281,379,836,133,000 | 35.922807 | 109 | 0.654756 | false |
Hannimal/raspicar | ps3Controller/ps3joy.py | 1 | 1520 | #!/usr/bin/env python
# coding: Latin-1
import sys
import smbus
import time
bus = smbus.SMBus(1)
address = 0x2a
try:
pipe = open('/dev/input/js0', 'r')
print('/dev/input/js0 Available')
except:
print('/dev/input/js0 not Available')
sys.exit(0)
msg = []
position = [0,0,0,0]
def StringToBytes(val):
retVal = []
for c in val:
retVal.append(ord(c))
return retVal
def sendData(val):
try:
#print(val)
bus.write_i2c_block_data(address, 1, val)
except:
pass
def getRange(device):
status = bus.read_byte(device)
#time.sleep(0.01)
return status
while 1:
try:
for char in pipe.read(1):
msg += [char]
#print(msg)
if len(msg) == 8:
# Button event if 6th byte is 1
if ord(msg[6]) == 1:
position[3] = ord(msg[7])
position[2] = ord(msg[4])
print(getRange(address))
# Axis event if 6th byte is 2
if ord(msg[6]) == 2: # define Axis
if ord(msg[7]) == 2: # define right joy
position[0] = ord(msg[5])
if ord(msg[7]) == 1: # define left joy
position[1] = ord(msg[5])
sendData(position)
msg = []
except KeyboardInterrupt:
sendData([0,0])
raise
except:
print ('Lost Connection')
sendData([0,0])
sys.exit(0)
| unlicense | 6,163,764,360,669,899,000 | 23.126984 | 59 | 0.483553 | false |
shouldmakemusic/yaas | controller/RedFrameController.py | 1 | 3038 | # Copyright (C) 2015 Manuel Hirschauer ([email protected])
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
# For questions regarding this module contact
# Manuel Hirschauer <[email protected]>
"""
Control the behavior of the red frame
"""
from YaasController import *
class RedFrameController (YaasController):
"""
Control the behavior of the red frame
"""
def __init__(self, yaas):
YaasController.__init__(self, yaas)
self.log.debug("(RedFrameController) init")
def play_clip(self, params, value):
"""
Plays the xth clip in the red frame
At the moment this works only for the track style red frame
Has to be tested when triing different styles for the red frame
@param params[0]: clip_number
"""
self.log.verbose("(RedFrameController) play_clip called")
clip_number = params[0]
self.log.verbose("(RedFrameController) for clip " + str(clip_number))
self.log.verbose("(RedFrameController) scene_offset: " + str(self.yaas.get_session()._scene_offset))
#if (clip_number > 4):
"""clip_number = clip_number -1"""
clip_number = self.yaas.get_session()._scene_offset + clip_number
self.log.verbose("(RedFrameController) calculated number " + str(clip_number))
self.song_helper().get_selected_track().fire(clip_number);
def move_track_view_vertical(self, params, value):
"""
Moves the current position down or up
@param params[0]: True ? down : up
"""
self.log.verbose("(RedFrameController) move_track_view_vertical called")
down = params[0]
self.log.verbose("(RedFrameController) down? " + str(down))
self.view_helper().move_track_view_vertical(down)
def move_track_view_horizontal(self, params, value):
"""
Moves the red frame left or right
@param params[0]: True ? right : left
"""
self.log.verbose("(RedFrameController) move_track_view_horizontal called")
right = params[0]
self.log.verbose("(RedFrameController) right? " + str(right))
self.view_helper().move_track_view_horizontal(right)
| gpl-2.0 | -3,350,587,529,291,833,000 | 38.973684 | 108 | 0.634628 | false |
3dfxsoftware/cbss-addons | report_profit/wizard/wiz_trial_cost.py | 1 | 3732 | #!/usr/bin/python
# -*- encoding: utf-8 -*-
###########################################################################
# Module Writen to OpenERP, Open Source Management Solution
# Copyright (C) OpenERP Venezuela (<http://openerp.com.ve>).
# All Rights Reserved
# Credits######################################################
# Coded by: [email protected]
# Planified by: Nhomar Hernandez
# Audited by: Vauxoo C.A.
#############################################################################
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
##########################################################################
from openerp.osv import osv, fields
import openerp.tools as tools
from openerp.tools.translate import _
import openerp.netsvc as netsvc
import time
import datetime
from mx.DateTime import *
class trial_cost(osv.TransientModel):
logger = netsvc.Logger()
_name = "trial.cost"
_columns = {
'date_start': fields.date('Start Date', required=True),
'period_length': fields.integer('Period length (days)', required=True),
'user_res_id': fields.many2one('res.users', 'Salesman'),
'partner_res_id': fields.many2one('res.partner', 'Partner'),
'cat_res_id': fields.many2one('product.category', 'Category'),
'u_check': fields.boolean('Check salesman?'),
'p_check': fields.boolean('Check partner?'),
'c_check': fields.boolean('Check category?'),
}
_defaults = {
'period_length': lambda *a: 30,
}
def action_print(self, cr, uid, ids, data, context=None):
if context is None:
context = {}
data = {}
data['ids'] = context.get('active_ids', [])
data['model'] = context.get('active_model', 'ir.ui.menu')
data['form'] = self.read(cr, uid, ids[0])
form = data['form']
if not form['u_check'] and not form['p_check'] and not form['c_check']:
raise osv.except_osv(_('User Error'), _(
'You must check one box !'))
res = {}
period_length = data['form']['period_length']
if period_length <= 0:
raise osv.except_osv(_('UserError'), _(
'You must enter a period length that cannot be 0 or below !'))
start = datetime.date.fromtimestamp(time.mktime(
time.strptime(data['form']['date_start'], "%Y-%m-%d")))
start = DateTime(int(start.year), int(start.month), int(start.day))
for i in range(4)[::-1]:
stop = start - RelativeDateTime(days=period_length)
res[str(i)] = {
'name': str((4-(i+1))*period_length) +
'-' + str((4-i)*period_length),
'stop': start.strftime('%Y-%m-%d'),
'start': stop.strftime('%Y-%m-%d'),
}
start = stop - RelativeDateTime(days=1)
data['form'].update(res)
return {'type': 'ir.actions.report.xml',
'report_name': 'profit.trial.cost',
'datas': data}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| gpl-2.0 | 1,987,104,676,452,728,800 | 40.010989 | 79 | 0.55761 | false |
gannetson/sportschooldeopenlucht | apps/fund/migrations/0002_add_recurring_direct_debit_payment.py | 1 | 17485 | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'RecurringDirectDebitPayment'
db.create_table(u'fund_recurringdirectdebitpayment', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('user', self.gf('django.db.models.fields.related.OneToOneField')(to=orm['accounts.BlueBottleUser'], unique=True)),
('active', self.gf('django.db.models.fields.BooleanField')(default=False)),
('created', self.gf('django.db.models.fields.DateTimeField')(default=datetime.datetime.now, blank=True)),
('updated', self.gf('django.db.models.fields.DateTimeField')(default=datetime.datetime.now, blank=True)),
('name', self.gf('django.db.models.fields.CharField')(max_length=35)),
('city', self.gf('django.db.models.fields.CharField')(max_length=35)),
('account', self.gf('apps.fund.fields.DutchBankAccountField')(max_length=10)),
))
db.send_create_signal(u'fund', ['RecurringDirectDebitPayment'])
def backwards(self, orm):
# Deleting model 'RecurringDirectDebitPayment'
db.delete_table(u'fund_recurringdirectdebitpayment')
models = {
u'accounts.bluebottleuser': {
'Meta': {'object_name': 'BlueBottleUser'},
'about': ('django.db.models.fields.TextField', [], {'max_length': '265', 'blank': 'True'}),
'availability': ('django.db.models.fields.CharField', [], {'max_length': '25', 'blank': 'True'}),
'available_time': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'birthdate': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'contribution': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'deleted': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'email': ('django.db.models.fields.EmailField', [], {'unique': 'True', 'max_length': '254', 'db_index': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'gender': ('django.db.models.fields.CharField', [], {'max_length': '6', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'location': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'newsletter': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'phone_number': ('django.db.models.fields.CharField', [], {'max_length': '50', 'blank': 'True'}),
'picture': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'blank': 'True'}),
'primary_language': ('django.db.models.fields.CharField', [], {'max_length': '5'}),
'share_money': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'share_time_knowledge': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'user_type': ('django.db.models.fields.CharField', [], {'default': "'person'", 'max_length': '25'}),
'username': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '50'}),
'website': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
'why': ('django.db.models.fields.TextField', [], {'max_length': '265', 'blank': 'True'})
},
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'cowry.payment': {
'Meta': {'object_name': 'Payment'},
'amount': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'currency': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '3'}),
'fee': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'payment_method_id': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '20', 'blank': 'True'}),
'payment_submethod_id': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '20', 'blank': 'True'}),
'polymorphic_ctype': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'polymorphic_cowry.payment_set'", 'null': 'True', 'to': u"orm['contenttypes.ContentType']"}),
'status': ('django.db.models.fields.CharField', [], {'default': "'new'", 'max_length': '15', 'db_index': 'True'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'})
},
u'fund.customvoucherrequest': {
'Meta': {'object_name': 'CustomVoucherRequest'},
'contact': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['accounts.BlueBottleUser']", 'null': 'True'}),
'contact_email': ('django.db.models.fields.EmailField', [], {'default': "''", 'max_length': '75', 'blank': 'True'}),
'contact_name': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '100', 'blank': 'True'}),
'contact_phone': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '100', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'message': ('django.db.models.fields.TextField', [], {'default': "''", 'max_length': '500', 'blank': 'True'}),
'number': ('django.db.models.fields.PositiveIntegerField', [], {}),
'organization': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '200', 'blank': 'True'}),
'status': ('django.db.models.fields.CharField', [], {'default': "'new'", 'max_length': '20', 'db_index': 'True'}),
'type': ('django.db.models.fields.CharField', [], {'default': "'unknown'", 'max_length': '20'}),
'value': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '100', 'blank': 'True'})
},
u'fund.donation': {
'Meta': {'object_name': 'Donation'},
'amount': ('django.db.models.fields.PositiveIntegerField', [], {}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'currency': ('django.db.models.fields.CharField', [], {'max_length': '3'}),
'donation_type': ('django.db.models.fields.CharField', [], {'default': "'one_off'", 'max_length': '20', 'db_index': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['projects.Project']"}),
'status': ('django.db.models.fields.CharField', [], {'default': "'new'", 'max_length': '20', 'db_index': 'True'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['accounts.BlueBottleUser']", 'null': 'True', 'blank': 'True'})
},
u'fund.order': {
'Meta': {'object_name': 'Order'},
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'payments': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'orders'", 'symmetrical': 'False', 'to': u"orm['cowry.Payment']"}),
'recurring': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'status': ('django.db.models.fields.CharField', [], {'default': "'current'", 'max_length': '20', 'db_index': 'True'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['accounts.BlueBottleUser']", 'null': 'True', 'blank': 'True'})
},
u'fund.orderitem': {
'Meta': {'object_name': 'OrderItem'},
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'order': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['fund.Order']"})
},
u'fund.recurringdirectdebitpayment': {
'Meta': {'object_name': 'RecurringDirectDebitPayment'},
'account': ('apps.fund.fields.DutchBankAccountField', [], {'max_length': '10'}),
'active': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'city': ('django.db.models.fields.CharField', [], {'max_length': '35'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '35'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['accounts.BlueBottleUser']", 'unique': 'True'})
},
u'fund.voucher': {
'Meta': {'object_name': 'Voucher'},
'amount': ('django.db.models.fields.PositiveIntegerField', [], {}),
'code': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '100', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'currency': ('django.db.models.fields.CharField', [], {'max_length': '3', 'blank': 'True'}),
'donations': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['fund.Donation']", 'symmetrical': 'False'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.CharField', [], {'default': "'en'", 'max_length': '2'}),
'message': ('django.db.models.fields.TextField', [], {'default': "''", 'max_length': '500', 'blank': 'True'}),
'receiver': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'receiver'", 'null': 'True', 'to': u"orm['accounts.BlueBottleUser']"}),
'receiver_email': ('django.db.models.fields.EmailField', [], {'max_length': '75'}),
'receiver_name': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '100', 'blank': 'True'}),
'sender': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'sender'", 'null': 'True', 'to': u"orm['accounts.BlueBottleUser']"}),
'sender_email': ('django.db.models.fields.EmailField', [], {'max_length': '75'}),
'sender_name': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '100', 'blank': 'True'}),
'status': ('django.db.models.fields.CharField', [], {'default': "'new'", 'max_length': '20', 'db_index': 'True'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'})
},
u'projects.partnerorganization': {
'Meta': {'object_name': 'PartnerOrganization'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '100'})
},
u'projects.project': {
'Meta': {'ordering': "['title']", 'object_name': 'Project'},
'coach': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'team_member'", 'null': 'True', 'to': u"orm['accounts.BlueBottleUser']"}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'owner': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'owner'", 'to': u"orm['accounts.BlueBottleUser']"}),
'partner_organization': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['projects.PartnerOrganization']", 'null': 'True', 'blank': 'True'}),
'phase': ('django.db.models.fields.CharField', [], {'max_length': '20'}),
'popularity': ('django.db.models.fields.FloatField', [], {'default': '0'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '100'}),
'title': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'})
},
u'taggit.tag': {
'Meta': {'object_name': 'Tag'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '100'})
},
u'taggit.taggeditem': {
'Meta': {'object_name': 'TaggedItem'},
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'taggit_taggeditem_tagged_items'", 'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_id': ('django.db.models.fields.IntegerField', [], {'db_index': 'True'}),
'tag': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'taggit_taggeditem_items'", 'to': u"orm['taggit.Tag']"})
}
}
complete_apps = ['fund'] | bsd-3-clause | 2,081,662,664,729,660,000 | 83.883495 | 196 | 0.561796 | false |
khosrow/metpx | sundew/lib/MasterConfigurator.py | 1 | 10676 | """
MetPX Copyright (C) 2004-2007 Environment Canada
MetPX comes with ABSOLUTELY NO WARRANTY; For details type see the file
named COPYING in the root of the source directory tree.
"""
"""
#############################################################################################
# Name: MasterConfigurator.py
#
# Author: Daniel Lemay
#
# Date: 2007-11-15
#
# Description:
#
#############################################################################################
"""
import sys, os, os.path, commands, re, time, fnmatch
import PXPaths
from SystemManager import SystemManager
from PXManager import PXManager
class MasterConfigurator(object):
def __init__(self, rootPath=""):
if os.path.isdir('/users/dor/aspy/dan/data/master/'):
self.rootPath = '/users/dor/aspy/dan/data/master/' # developpment machine
elif rootPath:
self.rootPath = os.path.normpath(rootPath) + '/'
else:
self.rootPath = '/apps/master/' # path under wich are the clusters and all the configs. files
self.types = ['source', 'client', 'sourlient'] # Possible type of flows
self.initAll()
def initAll(self):
self.clusters = [] # cluster names (same as dsh)
self.dupSources = [] # Duplicate sources (when you combine sources from all clusters)
self.dupClients = [] # Duplicate clients (when you combine clients from all clusters)
self.dupSourlients = [] # Duplicate sourlients (when you combine sourlients from all clusters)
self.dupFlows = [] # Duplicate flows (when you combine flows (sources, clients, sourlients) from all clusters)
self.allSources = [] # All sources from all clusters (with duplicates removed)
self.allClients = [] # All clients from all clusters (with duplicates removed)
self.allSourlients = [] # All sourlients from all clusters (with duplicated removed)
self.allFlows = [] # All flows (sources, clients, sourlients) from all clusters (with duplicated removed)
self.sourceCluster = {} # A mapping from a source to it's cluster
self.clientCluster = {} # A mapping from a client to it's cluster
self.sourlientCluster = {} # A mapping from a sourlient to it's cluster
self.flowCluster = {} # A mapping from a flow to it's cluster
def printClusterInfos(self, flowCluster):
keys = flowCluster.keys()
keys.sort()
for key in keys:
print "%s: %s" % (key, flowCluster[key])
def setMachine(self, machine):
self.machine = machine
def setUser(self, user):
self.user = user
def setClusters(self, list):
self.clusters = list
def findClient(self, clusters=None, ip="", name=""):
"""
clusters: a list of clusters (ex: ['pds', 'px', 'pxatx'])
ip: IP address (ex: '192.168.1.1')
name: hostname (ex: 'metmgr')
Only one argument in (ip, name) must be non null
"""
import socket
clusters = clusters or self.clusters
cliClust = []
if ip:
try:
# get the first part of the fully qualified domain name
name = socket.gethostbyaddr(ip)[0].split('.')[0]
except:
pass
elif name:
try:
ip = socket.gethostbyname(name)
except:
pass
for cluster in clusters:
clusterRoot = self.rootPath + cluster
PXPaths.normalPaths(clusterRoot)
if ip and name:
command = "grep -l -E '%s|%s' %s" % (ip, name, PXPaths.TX_CONF + "*.conf")
elif ip:
command = "grep -l -E '%s' %s" % (ip, PXPaths.TX_CONF + "*.conf")
elif name:
command = "grep -l -E '%s' %s" % (name, PXPaths.TX_CONF + "*.conf")
#print "%s" % cluster.upper()
output = commands.getoutput(command)
clients = [ (os.path.basename(cli)[:-5], cluster) for cli in output.split()]
cliClust.extend(clients)
PXPaths.normalPaths() # Reset PXPaths variables
return cliClust
def getTypeCluster(self, flow, init=False):
"""
When init is not False, it is a cluster list
flow is the name of a client, source, sourlient
return a list of tuple
getTypeCluster('aftn') => [('sourlient', 'pxatx')]
getTypeCluster('pds5') => [('source', 'pxatx')]
getTypeCluster('metmgr3') => [('client', 'pds'), ('client', 'pxatx')]
"""
if init:
self.initAll()
self.clusters = init
self.getAllFlows()
return self.flowCluster.get(flow, [])
def getType(self, flow, init=False):
"""
When init is not False, it is a cluster list
flow is the name of a client, source, sourlient
return type of the flow
getType('aftn') => 'sourlient'
getType('pds5') => 'source'
getType('metmgr3') => 'client'
"""
if init:
self.initAll()
self.clusters = init
self.getAllFlows()
type_cluster = self.flowCluster.get(flow, [])
if len(type_cluster) == 1:
return type_cluster[0][0]
else:
return len(type_cluster)
def getCluster(self, flow, init=False):
"""
When init is not False, it is a cluster list
flow is the name of a client, source, sourlient
return the cluster's name on which the flow is present
or the number of clusters, if more than one.
getCluster('aftn') => 'pxatx'
getCluster('pds5') => 'pxatx'
gettCluster('metmgr3') => 2
"""
if init:
self.initAll()
self.clusters = init
self.getAllFlows()
type_cluster = self.flowCluster.get(flow, [])
if len(type_cluster) == 1:
return type_cluster[0][1]
else:
return len(type_cluster)
def createFlowDict(self):
mergedDict = SystemManager.mergeTwoDict(self.sourceCluster, self.clientCluster)
return SystemManager.mergeTwoDict(mergedDict, self.sourlientCluster)
def getAllFlows(self, noPrint=True):
if noPrint:
iprint = lambda *x: None
else:
iprint = lambda *x:sys.stdout.write(" ".join(map(str, x)) + '\n')
allSources = []
allClients = []
allSourlients = []
allFlows = []
if not os.path.isdir(self.rootPath):
return 1
for cluster in self.clusters:
pxm = PXManager(self.rootPath + cluster + '/')
if pxm.initNames():
#print (self.rootPath + cluster + " inexistant!")
continue
clients, sourlients, sources, aliases = pxm.getFlowNames(tuple=True)
# Populate flowCluster for current cluster
pxm.getFlowDict(self.sourceCluster, sources, 'source', cluster)
pxm.getFlowDict(self.clientCluster, clients, 'client', cluster)
pxm.getFlowDict(self.sourlientCluster, sourlients, 'sourlient', cluster)
allSources.extend(sources)
allClients.extend(clients)
allSourlients.extend(sourlients)
iprint("%s" % (80*'#'))
iprint("CLUSTER %s" % cluster.upper())
iprint("%s" % (80*'#'))
iprint("sources (%s): %s" % (len(sources), sources))
iprint("clients (%s): %s" % (len(clients), clients))
iprint("sourlients (%s): %s" % (len(sourlients), sourlients))
#print "aliases: %s" % aliases
iprint()
pxm = PXManager()
pxm.initNames()
self.flowCluster = self.createFlowDict()
self.dupSources = pxm.identifyDuplicate(allSources)
self.dupClients = pxm.identifyDuplicate(allClients)
self.dupSourlients = pxm.identifyDuplicate(allSourlients)
self.allSources = pxm.removeDuplicate(allSources)
self.allClients = pxm.removeDuplicate(allClients)
self.allSourlients = pxm.removeDuplicate(allSourlients)
self.allFlows.extend(allSources)
self.allFlows.extend(allClients)
self.allFlows.extend(allSourlients)
self.dupFlows = pxm.identifyDuplicate(allFlows)
self.allFlows = pxm.removeDuplicate(allFlows)
iprint("Duplicate between sources from all clusters: %s" % self.dupSources)
iprint("Duplicate between clients from all clusters: %s" % self.dupClients)
iprint("Duplicate between sourlients from all clusters: %s" % self.dupSourlients)
iprint("Duplicate beetween flows (sources, clients, sourlients) from all clusters: %s" % self.dupFlows)
iprint()
keys = self.flowCluster.keys()
keys.sort()
for key in keys:
if len(self.flowCluster[key]) > 1:
iprint("%s: %s" % (key, self.flowCluster[key]))
iprint("source cluster(%s)" % len(self.sourceCluster))
iprint(self.sourceCluster)
iprint("client cluster(%s)" % len(self.clientCluster))
iprint(self.clientCluster)
iprint("sourlient cluster(%s)" % len(self.sourlientCluster))
iprint(self.sourlientCluster)
iprint("flow cluster(%s)" % len(self.flowCluster))
iprint()
if __name__ == '__main__':
mc = MasterConfigurator()
mc.setClusters(['px', 'pds', 'pxatx'])
mc.getAllFlows(noPrint=True)
print("%s: %s" % ('metmgr1', mc.getTypeCluster('metmgr1')))
print mc.getType('metmgr1')
print mc.getCluster('metmgr1')
print("%s: %s" % ('aftn', mc.getTypeCluster('aftn')))
print("%s: %s" % ('pds5', mc.getTypeCluster('pds5')))
print("%s: %s" % ('metmgr3', mc.getTypeCluster('metmgr3')))
print mc.getType('metmgr3')
print mc.getCluster('metmgr3')
print("%s: %s" % ('px-stage', mc.getTypeCluster('px-stage')))
print mc.getType('px-stage')
print mc.getCluster('px-stage')
print("%s: %s" % ('pds_metser', mc.getTypeCluster('pds_metser')))
print mc.getType('pds_metser')
print mc.getCluster('pds_metser')
#print mc.sourceCluster
#print mc.clientCluster
#print mc.sourlientCluster
#print mc.flowCluster
mc1 = MasterConfigurator()
print mc1.getType('metmgr1', ['px', 'pds', 'pxatx'])
print mc1.getCluster('metmgr1')
mc1.findClient(ip='199.212.17.60', clusters=['px', 'pxatx', 'pds'])
| gpl-2.0 | -6,967,763,091,637,412,000 | 35.941176 | 133 | 0.569314 | false |
saicoco/mxnet_image_caption | old/main.py | 1 | 6142 | # -*- conding=utf-8 -*-
"""
train module
"""
import mxnet as mx
import numpy as np
import json
import config
import logging
import time
import collections
from sym import vgg16_fc7, caption_module
from data_provider import caption_dataIter, init_cnn
from mxnet.model import save_checkpoint
import argparse
logging.basicConfig(level=logging.INFO)
def parse_args():
parser = argparse.ArgumentParser()
parser.add_argument('--epoches', default=20, type=int, help="epoches in training-stage", dest='epoches')
parser.add_argument('--batch_size', default=50, type=int, help="batch_size in training-stage", dest='batch_size')
parser.add_argument('--num_hidden', default=256, type=int, help="the number of hidden unit", dest='num_hidden')
parser.add_argument('--lr', default=0.01, type=float, help="learning rate in training-stage", dest='lr')
parser.add_argument('--freq_val', default=5, type=int, help="frequence of validation", dest='freq_val')
parser.add_argument('--num_embed', default=256, type=int, help="the number of embedding dimension", dest='num_embed')
parser.add_argument('--num_lstm_layer', default=256, type=int, help="the number of hidden_unit", dest='num_lstm_layer')
parser.add_argument('--gpu', default=None, type=str, help="wether run on gpu device", dest='gpu')
parser.add_argument('--prefix', default='./checkpoint/train', type=str, help="prefix of save checkpoint", dest='prefix')
parser.add_argument('--period', default=5, type=int, help="times to save checkpoint in training-stage", dest='period')
return parser.parse_args()
class callbacks:
def __init__(self, nbatch, eval_metric, epoch):
self.nbatch = nbatch
self.eval_metric = eval_metric
self.epoch = epoch
def main(args):
learning_rate = args.lr
epoches = args.epoches
batch_size = args.batch_size
num_hidden = args.num_hidden
num_embed = args.num_embed
num_lstm_layer = args.num_lstm_layer
freq_val = args.freq_val
val_flag = True if args.freq_val > 0 else False
ctx = mx.cpu(0) if args.gpu is None else mx.gpu(int(args.gpu))
prefix = args.prefix
period = args.period
with open(config.text_root, 'r') as f:
captions = json.load(f)
buckets = [10, 20, 30]
# buckets = None
train_data = caption_dataIter(
captions=captions, batch_size=batch_size, mode='train')
val_data = caption_dataIter(
captions=captions, batch_size=batch_size, mode='val')
##########################################################################
########################### custom train process #########################
##########################################################################
cnn_shapes = {
'image_data': (batch_size, 3, 224, 224)
}
cnn_sym = vgg16_fc7('image_data')
cnn_exec = cnn_sym.simple_bind(ctx=ctx, is_train=False, **cnn_shapes)
lstm = caption_module(num_lstm_layer=num_lstm_layer, seq_len=train_data.sent_length+2,
vocab_size=train_data.vocab_size, num_hidden=num_hidden, num_embed=num_embed, batch_size=batch_size)
lstm_shapes = {
'image_feature': (batch_size, 4096),
'word_data': (batch_size, train_data.sent_length+2),
'softmax_label': (batch_size, train_data.sent_length+2)
}
lstm_exec = lstm.simple_bind(
ctx=ctx, is_train=True, **lstm_shapes)
# init params
pretrain = mx.nd.load(config.vgg_pretrain)
init_cnn(cnn_exec, pretrain)
# init optimazer
optimazer = mx.optimizer.create('adam')
optimazer.lr = learning_rate
updater = mx.optimizer.get_updater(optimazer)
# init metric
perplexity = mx.metric.Perplexity(ignore_label=-1)
perplexity.reset()
# callback
params = callbacks(nbatch=0, eval_metric=perplexity, epoch=0)
speedometer = mx.callback.Speedometer(batch_size=batch_size, frequent=20)
for epoch in range(epoches):
for i, batch in enumerate(train_data):
# cnn forward, get image_feature
cnn_exec.arg_dict['image_data'] = batch.data[0]
cnn_exec.forward()
image_feature = cnn_exec.outputs[0]
# lstm forward
lstm_exec.arg_dict['image_feature'] = image_feature
lstm_exec.arg_dict['word_data'] = batch.data[1]
lstm_exec.arg_dict['softmax_label'] = batch.label
lstm_exec.forward(is_train=True)
print batch.label
params.eval_metric.update(labels=batch.label,
preds=lstm_exec.outputs)
lstm_exec.backward()
params.epoch = epoch
params.nbatch += 1
speedometer(params)
for j, name in enumerate(lstm.list_arguments()):
if name not in lstm_shapes.keys():
updater(j, lstm_exec.grad_dict[
name], lstm_exec.arg_dict[name])
train_data.reset()
params.nbatch = 0
if val_flag and epoch % freq_val == 0:
for i, batch in enumerate(val_data):
# cnn forward, get image_feature
cnn_exec.arg_dict['image_data'] = batch.data[0]
cnn_exec.forward()
image_feature = cnn_exec.outputs[0]
# lstm forward
lstm_exec.arg_dict['image_feature'] = image_feature
lstm_exec.arg_dict['word_data'] = batch.data[1]
lstm_exec.arg_dict['softmax_label'] = batch.label
lstm_exec.forward(is_train=False)
params.eval_metric.update(labels=batch.label,
preds=lstm_exec.outputs)
params.epoch = epoch
params.nbatch += 1
speedometer(params)
params.nbatch = 0
val_data.reset()
if period:
save_checkpoint(prefix=prefix, epoch=epoch, symbol=lstm,
arg_params=lstm_exec.arg_dict,
aux_params=lstm_exec.aux_dict)
if __name__ == '__main__':
args = parse_args()
main(args)
| mit | -3,475,309,289,568,188,000 | 38.371795 | 126 | 0.588733 | false |
gghezzo/prettypython | PythonEveryDay2015/bigballs.py | 1 | 1044 | # Teaching Python Classes by Peter Farrell
# From http://hackingmathclass.blogspot.com/2015/08/finally-some-class.html
# Typer: Ginny C Ghezzo
# What I learned:
# why doesn't the first import bring in locals ??
import pygame
from pygame.locals import *
black = (0,0,0)
white = (255,255,255)
green = (0,255, 0)
# ball position
xcor = 100
ycor = 100
# velocity
xvel = 2
yvel = 1
diameter = 20
pygame.init()
screen = pygame.display.set_mode((600,500))
pygame.display.set_caption('Classy Balls')
done = False # loop until close is clicked
clock = pygame.time.Clock() # used to manage the screen updates
while not done:
for event in pygame.event.get():
if event.type == QUIT:
done = True
screen.fill(black)
if xcor < 0 or xcor > 600 - diameter:
xvel = -xvel # make it go the opposite direction
if ycor < 0 or ycor > 500 - diameter:
yvel = -yvel
xcor += xvel
ycor += yvel
pygame.draw.ellipse(screen, white, [xcor,ycor,diameter,diameter])
pygame.display.update()
clock.tick(120)
pygame.quit() | mit | 98,853,344,471,153,950 | 24.487805 | 75 | 0.681992 | false |
panholt/sparkpy | sparkpy/models/webhook.py | 1 | 2523 | # -*- coding: utf-8 -*-
from .base import SparkBase, SparkProperty
from .time import SparkTime
class SparkWebhook(SparkBase):
''' Cisco Spark Webhook Model
:param session: SparkSession object
:type session: `SparkSession`
:param \**kwargs: All standard Spark API properties for a Webhook
'''
# | Start of class attributes |-------------------------------------------|
API_BASE = 'https://api.ciscospark.com/v1/webhooks/'
WEBHOOK_RESOURCES = ['memberships', 'messages', 'rooms', 'all']
WEBHOOK_EVENTS = ['created', 'updated', 'deleted', 'all']
WEBHOOK_FILTERS = {'memberships': ['roomId',
'personId',
'personEmail',
'isModerator'],
'messages': ['roomId',
'roomType',
'personId',
'personEmail',
'mentionedPeople',
'hasFiles'],
'rooms': ['type',
'isLocked']}
PROPERTIES = {'id': SparkProperty('id'),
'name': SparkProperty('name', mutable=True),
'targetUrl': SparkProperty('targetUrl', mutable=True),
'event': SparkProperty('event'),
'resource': SparkProperty('resource'),
'filter': SparkProperty('filter', optional=True),
'secret': SparkProperty('secret', optional=True),
'orgId': SparkProperty('orgId', optional=True),
'createdBy': SparkProperty('createdBy', optional=True),
'appId': SparkProperty('appId', optional=True),
'ownedBy': SparkProperty('ownedBy', optional=True),
'status': SparkProperty('status', optional=True),
'created': SparkProperty('created', optional=True)}
# | Start of instance attributes |----------------------------------------|
def __init__(self, *args, **kwargs):
super().__init__(*args, path='webhooks', **kwargs)
def update(self, name, targetUrl):
data = {'name': name, 'targetUrl': targetUrl}
self.parent.session.put(self.API_BASE, json=data)
return
def __repr__(self):
return f'SparkWebhook("{self.id}")'
def __str__(self):
return f'SparkWebhook({self.name})'
| mit | 2,799,580,334,978,640,400 | 41.05 | 79 | 0.479191 | false |
m3wolf/xanespy | xanespy/txmstore.py | 1 | 21308 | # -*- coding: utf-8 -*-
#
# Copyright © 2016 Mark Wolf
#
# This file is part of Xanespy.
#
# Xanespy is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Xanespy is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Xanespy. If not, see <http://www.gnu.org/licenses/>.
"""Tools for accessing TXM data stored in an HDF5 file."""
import logging
from collections import namedtuple
import h5py
import numpy as np
from tqdm import tqdm
from exceptions import (GroupKeyError, CreateGroupError, FrameSourceError,)
import xanes_math as xm
from utilities import get_component
log = logging.getLogger(__name__)
def merge_stores(base_store, new_store, destination, energy_difference=0.25, upsample=True):
"""Merge two open txm stores into a third store.
Framesets will be combined from both ``base_store`` and
``new_store``. If frames in both sets are within
``energy_difference`` or each other, then the one from
``new_store`` will be used. The resulting frames will be cropped
and up-sampled. Maps will not be copied, since they are unlikely
to be reliable with the merged framesets. The metadata will
reflect the merging as best as possible.
"""
num_timesteps = len(base_store.timestep_names)
M = namedtuple('M', ('energy', 'idx', 'store'))
# Create some arrays to hold the results
energies = []
intensities = []
optical_depths = []
filenames = []
pixel_sizes = []
for t_idx in range(num_timesteps):
# Prepare a "master list" of frames to used
num_energies_A = len(base_store.energies[t_idx])
energies_A = base_store.energies[t_idx]
master_list = [M(energies_A[E_idx], E_idx, base_store)
for E_idx in range(num_energies_A)]
# Update the master list for each frame in the new store
energies_B = new_store.energies[t_idx]
for E_idx, E in enumerate(energies_B):
# Check if this entry exists in the master list already
matches = [m for m in master_list
if abs(m.energy - E) < energy_difference]
if matches:
# It already exists, so replace it
for m in matches:
idx = master_list.index(m)
master_list[idx] = M(E, E_idx, new_store)
else:
# It doesn't exist, so add it
master_list.append(M(E, E_idx, new_store))
# Sort the master list to be in energy ascending order
master_list.sort(key=lambda m: m.energy)
# Empty array for catching processed data
Es = []
Is = []
ODs = []
fnames = []
# Prepare the arguments for resizing each image
shapes = [m.store.intensities.shape[2:] for m in master_list]
max_shape = (max(s[0] for s in shapes),
max(s[1] for s in shapes))
dims = [np.array(m.store.intensities.shape[2:]) * m.store.pixel_sizes[t_idx,E_idx]
for m in master_list]
min_dims = (min(d[0] for d in dims), min(d[1] for d in dims))
target_px_size = min(m.store.pixel_sizes[t_idx,E_idx] for m in master_list)
pixel_sizes.append((target_px_size,) * len(master_list))
# Retrieve and resize each image
for m, dim in tqdm(zip(master_list, dims), total=len(master_list)):
Es.append(m.energy)
px_size = m.store.pixel_sizes[t_idx]
I = m.store.intensities[t_idx,m.idx]
if np.iscomplexobj(I):
comp = 'imag'
else:
comp = 'real'
I = get_component(I, comp)
I = xm.resample_image(I, new_shape=max_shape, src_dims=dim, new_dims=min_dims)
Is.append(I)
OD = m.store.optical_depths[t_idx,m.idx]
OD = get_component(OD, comp)
OD = xm.resample_image(OD, new_shape=max_shape, src_dims=dim, new_dims=min_dims)
ODs.append(OD)
# Save the necessary metadata
fnames.append(m.store.filenames[t_idx][m.idx])
# Save the combined framesets
energies.append(Es)
intensities.append(Is)
optical_depths.append(ODs)
filenames.append(fnames)
# Set the newly merged frames
destination.energies = energies
destination.intensities = np.array(intensities)
destination.optical_depths = np.array(optical_depths)
destination.filenames = filenames
destination.timestep_names = base_store.timestep_names
destination.pixel_sizes = pixel_sizes
destination.pixel_unit = base_store.pixel_unit
class TXMDataset():
"""Data descriptor for accessing HDF datasets.
Parameters
----------
name : str
The dataset name in the HDF file.
context : str, optional
Type of dataset this is: frameset, map, metadata, etc.
dtype : np.dtype, optional
The data-type to use when saving new data to disk. Using lower
precision datatypes can save significant disk space.
"""
def __init__(self, name, context=None, dtype=None):
self.name = name
self.context = context
self.dtype = dtype
def __get__(self, store, type=None):
dataset = store.get_dataset(self.name)
return dataset
def __set__(self, store, value):
store.replace_dataset(name=self.name, data=value,
context=self.context, dtype=self.dtype)
def __delete__(self, store):
del store.data_group()[self.name]
class TXMStore():
"""Wrapper around HDF5 file that stores TXM data.
It has a series of descriptors and properties that return the
corresponding HDF5 dataset object; the TXMStore().attribute.value
pattern can be used to get pure numpy arrays directly. These
objects should be used as a context manager to ensure that the
file is closed, especially if using a writing mode:
with TXMStore() as store:
# Do stuff with store here
Parameters
----------
hdf_filename : str
Path to the HDF file to be used.
parent_name : str
Name of the top-level HDF5 group.
data_name : str
Name of the second level HDF5 group, used for specific data
iterations (eg. imported, aligned)
mode : str
Eg. 'r' for read-only, 'r+' for read-write. Passed directly to
h5py.File constructor.
"""
VERSION = 1
_data_name = None
# HDF5 Descriptors
# ----------------
energies = TXMDataset('energies', context='metadata')
signals = TXMDataset('signals', context='metadata')
original_positions = TXMDataset('original_positions', context='metadata')
timestep_names = TXMDataset('timestep_names', context='metadata')
pixel_sizes = TXMDataset('pixel_sizes', context='metadata')
linear_combination_sources = TXMDataset('linear_combination_sources', context='metadata')
intensities = TXMDataset('intensities', context='frameset')
optical_depths = TXMDataset('optical_depths', context='frameset')
references = TXMDataset('references', context='frameset')
signal_weights = TXMDataset('signal_weights', context='frameset')
linear_combination_parameters = TXMDataset('linear_combination_parameters', context='frameset')
optical_depth_mean = TXMDataset('optical_depth_mean', context='map')
intensity_mean = TXMDataset('intensity_mean', context='map')
signal_map = TXMDataset('signal_map', context='map')
edge_mask = TXMDataset('edge_mask', context='map')
whiteline_max = TXMDataset('whiteline_max', context='map')
whiteline_fit = TXMDataset('whiteline_fit', context='map')
cluster_fit = TXMDataset('cluster_fit', context='map')
particle_labels = TXMDataset('particle_labels', context='map')
segments = TXMDataset('segments', context='map')
linear_combination_residuals = TXMDataset('linear_combination_residuals', context='map')
def __init__(self, hdf_filename: str,
parent_name: str, data_name=None,
mode='r'):
self.hdf_filename = hdf_filename
self._file = self.open_file(self.hdf_filename, mode=mode)
self.parent_name = parent_name
self.mode = mode
# Use the latest_data_name if one isn't provided
if data_name is None:
self.data_name = self.latest_data_name
else:
self.data_name = data_name
def __str__(self):
return self.parent_name + '-' + self.data_name
def __repr__(self):
fmt = '<TXMStore: {}/{}/{}>'
fmt = fmt.format(self.hdf_filename, self.parent_name, self.data_name)
return fmt
def __enter__(self):
return self
def __exit__(self, type, value, traceback):
self.close()
def frameset_names(self):
"""Returns a list of all the valid frameset representations."""
names = []
for key in self.data_group().keys():
# Check if it's a "frameset" or not
grp = self.data_group()
context = grp[key].attrs.get('context', '')
if context == 'frameset':
names.append(key)
return names
def map_names(self):
"""Returns a list of all the valid map representations."""
names = []
for key in self.data_group().keys():
# Check if it's a "map" or not
if self.data_group()[key].attrs.get('context','') == 'map':
names.append(key)
return names
def open_file(self, filename, mode):
return h5py.File(filename, mode=mode)
def close(self):
self._file.close()
def data_tree(self):
"""Create a tree of the possible groups this store could access. The
first level is samples, then data_groups (ie. same sample but
different analysis status), then representations.
"""
# Define a recursive function to walk the groups in the file
def walk_groups(parent, level):
# Recurse into children
keys = getattr(parent, 'keys', lambda: [])
datas = []
for key in keys():
# Check for whether this object is a frameset or a map
node = parent[key]
grpdata = {
'name': key,
'path': node.name,
'context': node.attrs.get('context', None),
'ndim': getattr(node, 'ndim', 0),
'level': level,
# Resurse
'children': walk_groups(parent=node, level=level+1),
}
datas.append(grpdata)
return datas
# Start the recursion at the top
tree = walk_groups(self._file, level=0)
return tree
def fork_data_group(self, dest, src=None):
"""Turn on different active data group for this store. This method
deletes the existing group and copies symlinks from the
current one.
"""
# Switch to the group given by `src`
if src is not None:
self.data_name = src
# Check that the current and target groups are not the same
if dest == self.data_name:
log.critical('Refusing to fork group "%s" to itself', dest)
msg = 'Refusing to fork myself to myself ({})'.format(dest)
raise CreateGroupError(msg)
log.info('Forking data group "%s" to "%s"', src, dest)
# Delete the old group and overwrite it
parent = self.parent_group()
if dest in parent.keys():
del parent[dest]
# Copy the old one with symlinks
new_group = parent.copy(self.data_group(), dest, shallow=True)
self.latest_data_name = dest
self.data_name = dest
return new_group
@property
def latest_data_name(self):
name = self.parent_group().attrs['latest_data_name']
return name
@latest_data_name.setter
def latest_data_name(self, val):
self.parent_group().attrs['latest_data_name'] = val
def validate_parent_group(self, name):
"""Retrieve the real parent group name for a possible parent_group.
If ``name`` is None and only one group exists in the file,
then that group name will be returned. If ``name`` is in the
file, then ``name`` will be returned. If ``name`` is not in
the file, a GroupKeyError will be raised.
"""
if name is None and len(self._file.keys()) == 1:
new_name = list(self._file.keys())[0]
elif name not in self._file.keys():
raise GroupKeyError("Cannot load parent group '{group}'. "
"Valid choices are {choices}."
"".format(group=name, choices=list(self._file.keys())))
else:
new_name = name
return new_name
def parent_group(self):
"""Retrieve the top-level HDF5 group object for this file and
groupname."""
try:
parent_group = self.validate_parent_group(self.parent_name)
group = self._file[parent_group]
except (TypeError, KeyError):
# Invalid group name, throw an exception
msg = 'Cannot load parent group "{group}". Valid choices are {choices}'
try:
choices = list(self._file.keys())
except:
choices = 'unavailable'
msg = msg.format(group=self.parent_name, choices=choices)
raise GroupKeyError(msg) from None
return group
def data_group(self):
"""Retrieve the currently active second-level HDF5 group object for
this file and groupname. Ex. "imported" or "aligned_frames".
"""
if self.data_name not in self.parent_group().keys():
msg = "Group {} does not exists. Run TXMStore.fork_data_group('{}') first"
raise CreateGroupError(msg.format(self.data_name, self.data_name))
return self.parent_group()[self.data_name]
def replace_dataset(self, name, data, context=None, attrs={},
compression=None, *args, **kwargs):
"""Wrapper for h5py.create_dataset that removes the existing dataset
if it exists.
Parameters
----------
name : str
HDF5 groupname name to give this dataset.
data : np.ndarray
Numpy array of data to be saved.
context : str, optional
Specifies what kind of data is stored. Eg. "frameset",
"metadata", "map".
attrs : dict, optional
Dictionary containing HDF5 metadata attributes to be set on
the resulting dataset.
compression : str, optional
What type of compression to use. See HDF5 documentation for
options.
*args
Arguments to pass to h5py's ``create_dataset`` method.
**kwargs
Keyword arguments to pass to h5py's ``create_dataset`` method.
"""
# Remove the existing dataset if possible
try:
attrs = self.data_group()[name].attrs
del self.data_group()[name]
except KeyError as e:
pass
# Perform the actual group creation
ds = self.data_group().create_dataset(name=name, data=data,
compression=compression,
*args, **kwargs)
# Set metadata attributes
if context is not None:
ds.attrs['context'] = context
for key, val in attrs.items():
ds.attrs[key] = val
return ds
def get_dataset(self, name):
"""Attempt to open the requested dataset.
Parameters
==========
name : str
The name of the dataset to open in the data group.
Returns
-------
data : hyp5.Dataset
An open HDF5 dataset
Raises
------
exceptions.GroupKeyError
If the dataset does not exist in the file.
"""
# Check for some bad dataset names
if name is None:
msg = "dataset `None` not found in file '{}'"
msg = msg.format(self.hdf_filename)
raise GroupKeyError(msg)
elif name not in self.data_group().keys():
all_valid_names = self.frameset_names() + self.map_names()
msg = ("dataset '{}' not found in group '{}' file '{}'. "
"Valid choices are: {}."
"".format(name, self.data_group().name,
self.hdf_filename, all_valid_names))
raise GroupKeyError(msg)
else:
data = self.data_group()[name]
return data
def has_dataset(self, name):
"""Return a boolean indicated whether this dataset exists in the HDF
file.
"""
try:
result = name in self.data_group().keys()
except TypeError:
result = False
return result
def frame_source(self, name):
"""Get the name of the frames that went into creating a map."""
attrs = getattr(self.get_dataset(name), 'attrs', {})
source = attrs.get('frame_source', name)
return source
def get_frames(self, name):
"""Return the source frame data for the given data name.
This is similar to ``get_dataset`` except that if the
data are a map, then get the frames that went into making it.
Parameters
----------
name : str
The dataset name for which to retrieve frames.
Returns
-------
dataset : h5py.Dataset
The requested frameset. If the dataset is actually a map, as
determined by the "context" attribute, then the related
frame source attribute will be retrieved.
"""
dataset = self.get_dataset(name)
# If it's a map, then return the source frames instead
if dataset.attrs.get('context', None) == 'map':
try:
dataset = self.get_dataset(dataset.attrs['frame_source'])
except KeyError:
source_desc = dataset.attrs.get('frame_source', 'None')
# raise FrameSourceError(
log.warning(
"Invalid frame source {} specified for group {}"
"".format(source_desc, self.data_name))
dataset = self.get_dataset('optical_depths')
return dataset
@property
def relative_positions(self):
"""(x, y, z) position values for each frame."""
return self.data_group()['relative_positions']
@relative_positions.setter
def relative_positions(self, val):
self.replace_dataset('relative_positions', val, context='metadata')
self.data_group()['relative_positions'].attrs['order'] = "(x, y, z)"
@property
def pixel_unit(self):
return self.data_group()['pixel_sizes'].attrs['unit']
@pixel_unit.setter
def pixel_unit(self, val):
self.data_group()['pixel_sizes'].attrs['unit'] = val
@property
def signal_method(self):
"""String describing how the previously extracted signals were
calculated.
"""
return self.data_group()['signals'].attrs['method']
@signal_method.setter
def signal_method(self, val):
self.data_group()['signals'].attrs['method'] = val
@property
def timestamps(self):
return self.data_group()['timestamps']
@timestamps.setter
def timestamps(self, val):
# S32 is the 32-character ACSII string type for numpy
val = np.array(val, dtype="S32")
self.replace_dataset('timestamps', val, dtype="S32", context='metadata',
attrs={'timezone', "UTC"})
@property
def filenames(self):
return self.data_group()['filenames']
@filenames.setter
def filenames(self, val):
# S100 is the 100-character ACSII string type for numpy
val = np.array(val, dtype="S100")
self.replace_dataset('filenames', val, dtype="S100", context='metadata')
@property
def fit_parameters(self):
return self.get_dataset('fit_parameters')
@fit_parameters.setter
def fit_parameters(self, val):
attrs = {
'parameters': str(xm.kedge_params),
}
return self.replace_dataset('fit_parameters', val,
attrs=attrs, context="metadata",
dtype=np.float64)
| gpl-3.0 | 6,021,047,221,681,861,000 | 36.980392 | 99 | 0.58056 | false |
chris48s/UK-Polling-Stations | polling_stations/apps/feedback/migrations/0001_initial.py | 1 | 1210 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
import django_extensions.db.fields
class Migration(migrations.Migration):
dependencies = [
]
operations = [
migrations.CreateModel(
name='Feedback',
fields=[
('id', models.AutoField(auto_created=True, verbose_name='ID', primary_key=True, serialize=False)),
('created', django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, verbose_name='created')),
('modified', django_extensions.db.fields.ModificationDateTimeField(verbose_name='modified', auto_now=True)),
('found_useful', models.CharField(choices=[('YES', 'Yes'), ('NO', 'No')], max_length=100, blank=True)),
('comments', models.TextField(blank=True)),
('source_url', models.CharField(max_length=800, blank=True)),
('token', models.CharField(max_length=100, blank=True)),
],
options={
'ordering': ('-modified', '-created'),
'abstract': False,
'get_latest_by': 'modified',
},
),
]
| bsd-3-clause | 8,918,969,895,613,432,000 | 38.032258 | 124 | 0.567769 | false |
Abjad/abjad | abjad/obgc.py | 1 | 30314 | import typing
from . import _iterate, mutate, typings
from .attach import attach, detach
from .bundle import LilyPondFormatBundle
from .duration import Duration
from .overrides import LilyPondLiteral, tweak
from .parentage import Parentage
from .pitch.sets import PitchSet
from .score import Chord, Container, Note, Voice
from .select import Selection
from .spanners import beam, slur
from .tag import Tag
class OnBeatGraceContainer(Container):
r"""
On-beat grace container.
.. note:: On-beat grace containers must be included in a named voice.
.. container:: example
On-beat grace containers implement custom formatting not available in
LilyPond:
>>> music_voice = abjad.Voice("c'4 d'4 e'4 f'4", name="Music_Voice")
>>> string = "<d' g'>8 a' b' c'' d'' c'' b' a' b' c'' d''"
>>> container = abjad.on_beat_grace_container(
... string, music_voice[1:3], leaf_duration=(1, 24)
... )
>>> abjad.attach(abjad.Articulation(">"), container[0])
>>> staff = abjad.Staff([music_voice])
>>> abjad.show(staff) # doctest: +SKIP
.. docs::
>>> string = abjad.lilypond(staff)
>>> print(string)
\new Staff
{
\context Voice = "Music_Voice"
{
c'4
<<
\context Voice = "On_Beat_Grace_Container"
{
\set fontSize = #-3
\slash
\voiceOne
<
\tweak font-size 0
\tweak transparent ##t
d'
g'
>8 * 1/3
- \accent
[
(
a'8 * 1/3
b'8 * 1/3
c''8 * 1/3
d''8 * 1/3
c''8 * 1/3
b'8 * 1/3
a'8 * 1/3
b'8 * 1/3
c''8 * 1/3
d''8 * 1/3
)
]
}
\context Voice = "Music_Voice"
{
\voiceTwo
d'4
e'4
}
>>
\oneVoice
f'4
}
}
"""
### CLASS VARIABLES ###
__slots__ = ("_leaf_duration",)
### INITIALIZER ###
def __init__(
self,
components=None,
identifier: str = None,
leaf_duration: typings.DurationTyping = None,
name: str = None,
tag: Tag = None,
) -> None:
super().__init__(components, identifier=identifier, name=name, tag=tag)
if leaf_duration is not None:
leaf_duration = Duration(leaf_duration)
self._leaf_duration = leaf_duration
### SPECIAL METHODS ###
def __getnewargs__(self):
"""
Gets new after grace container arguments.
Returns tuple of single empty list.
"""
return ([],)
### PRIVATE METHODS ###
# NOTE: format="absolute_before" for \oneVoice so that this works:
#
# \oneVoice
# \override Stem.direction = #down
#
# ... because this ...
#
# \override Stem.direction = #down
# \oneVoice
#
# ... doesn't work.
#
# This is hackish, and some sort of longer term solution should
# happen later.
def _attach_lilypond_one_voice(self):
anchor_leaf = self._get_on_beat_anchor_leaf()
anchor_voice = Parentage(anchor_leaf).get(Voice)
final_anchor_leaf = _iterate._get_leaf(anchor_voice, -1)
next_leaf = _iterate._get_leaf(final_anchor_leaf, 1)
literal = LilyPondLiteral(r"\oneVoice", format_slot="absolute_before")
if next_leaf._has_indicator(literal):
return
if isinstance(next_leaf._parent, OnBeatGraceContainer):
return
if self._is_on_beat_anchor_voice(next_leaf._parent):
return
site = "abjad.OnBeatGraceContainer._attach_lilypond_one_voice()"
tag = Tag(site)
tag = tag.append(Tag("ONE_VOICE_COMMAND"))
attach(literal, next_leaf, tag=tag)
def _format_invocation(self):
return r'\context Voice = "On_Beat_Grace_Container"'
def _format_open_brackets_slot(self, bundle):
indent = LilyPondFormatBundle.indent
result = []
if self.identifier:
open_bracket = f"{{ {self.identifier}"
else:
open_bracket = "{"
brackets_open = [open_bracket]
overrides = bundle.grob_overrides
settings = bundle.context_settings
if overrides or settings:
contributions = [self._format_invocation(), r"\with", "{"]
contributions = self._tag_strings(contributions)
contributions = tuple(contributions)
identifier_pair = ("context_brackets", "open")
result.append((identifier_pair, contributions))
contributions = [indent + _ for _ in overrides]
contributions = self._tag_strings(contributions)
contributions = tuple(contributions)
identifier_pair = ("overrides", "overrides")
result.append((identifier_pair, contributions))
contributions = [indent + _ for _ in settings]
contributions = self._tag_strings(contributions)
contributions = tuple(contributions)
identifier_pair = ("settings", "settings")
result.append((identifier_pair, contributions))
contributions = [f"}} {brackets_open[0]}"]
contributions = ["}", open_bracket]
contributions = self._tag_strings(contributions)
contributions = tuple(contributions)
identifier_pair = ("context_brackets", "open")
result.append((identifier_pair, contributions))
else:
contribution = self._format_invocation()
contribution += f" {brackets_open[0]}"
contributions = [contribution]
contributions = [self._format_invocation(), open_bracket]
contributions = self._tag_strings(contributions)
contributions = tuple(contributions)
identifier_pair = ("context_brackets", "open")
result.append((identifier_pair, contributions))
return tuple(result)
def _get_on_beat_anchor_leaf(self):
container = self._parent
if container is None:
return None
if len(container) != 2:
raise Exception("Combine on-beat grace container with one other voice.")
if container.index(self) == 0:
anchor_voice = container[-1]
else:
assert container.index(self) == 1
anchor_voice = container[0]
anchor_leaf = Selection(anchor_voice).leaf(0, grace=False)
return anchor_leaf
@staticmethod
def _is_on_beat_anchor_voice(CONTAINER):
wrapper = CONTAINER._parent
if wrapper is None:
return False
if not isinstance(CONTAINER, Voice):
return False
return OnBeatGraceContainer._is_on_beat_wrapper(wrapper)
@staticmethod
def _is_on_beat_wrapper(CONTAINER):
if not CONTAINER.simultaneous:
return False
if len(CONTAINER) != 2:
return False
if isinstance(CONTAINER[0], OnBeatGraceContainer) and isinstance(
CONTAINER[1], Voice
):
return True
if isinstance(CONTAINER[0], Voice) and isinstance(
CONTAINER[1], OnBeatGraceContainer
):
return True
return False
def _match_anchor_leaf(self):
first_grace = _iterate._get_leaf(self, 0)
if not isinstance(first_grace, (Note, Chord)):
message = "must start with note or chord:\n"
message += f" {repr(self)}"
raise Exception(message)
anchor_leaf = self._get_on_beat_anchor_leaf()
if isinstance(anchor_leaf, (Note, Chord)) and isinstance(
first_grace, (Note, Chord)
):
if isinstance(first_grace, Note):
chord = Chord(first_grace)
mutate.replace(first_grace, chord)
first_grace = chord
selection = Selection(anchor_leaf)
anchor_pitches = PitchSet.from_selection(selection)
highest_pitch = list(sorted(anchor_pitches))[-1]
if highest_pitch not in first_grace.note_heads:
first_grace.note_heads.append(highest_pitch)
grace_mate_head = first_grace.note_heads.get(highest_pitch)
tweak(grace_mate_head).font_size = 0
tweak(grace_mate_head).transparent = True
def _set_leaf_durations(self):
if self.leaf_duration is None:
return
for leaf in Selection(self).leaves():
duration = leaf._get_duration()
if duration != self.leaf_duration:
multiplier = self.leaf_duration / duration
leaf.multiplier = multiplier
### PUBLIC PROPERTIES ###
@property
def leaf_duration(self) -> typing.Optional[Duration]:
"""
Gets leaf duration.
"""
return self._leaf_duration
### FACTORY FUNCTIONS ###
def on_beat_grace_container(
contents,
anchor_voice_selection,
*,
anchor_voice_number=2,
do_not_beam=None,
do_not_slash=None,
do_not_slur=None,
do_not_stop_polyphony=None,
font_size=-3,
grace_voice_number=1,
leaf_duration=None,
):
r"""
Makes on-beat grace container and wraps around ``selection``.
.. container:: example
GRACE NOTES ABOVE.
Note-to-note anchor:
>>> music_voice = abjad.Voice("c'4 d' e' f'", name="Music_Voice")
>>> string = "g'8 a' b' c'' d'' c'' b' a' b' c'' d''"
>>> result = abjad.on_beat_grace_container(
... string, music_voice[1:3], leaf_duration=(1, 30)
... )
>>> staff = abjad.Staff([music_voice])
>>> abjad.show(staff) # doctest: +SKIP
.. docs::
>>> string = abjad.lilypond(staff)
>>> print(string)
\new Staff
{
\context Voice = "Music_Voice"
{
c'4
<<
\context Voice = "On_Beat_Grace_Container"
{
\set fontSize = #-3
\slash
\voiceOne
<
\tweak font-size 0
\tweak transparent ##t
d'
g'
>8 * 4/15
[
(
a'8 * 4/15
b'8 * 4/15
c''8 * 4/15
d''8 * 4/15
c''8 * 4/15
b'8 * 4/15
a'8 * 4/15
b'8 * 4/15
c''8 * 4/15
d''8 * 4/15
)
]
}
\context Voice = "Music_Voice"
{
\voiceTwo
d'4
e'4
}
>>
\oneVoice
f'4
}
}
Note-to-chord anchor:
>>> music_voice = abjad.Voice(
... "<a c'>4 <b d'> <c' e'> <d' f'>", name="Music_Voice"
... )
>>> string = "g'8 a' b' c'' d'' c'' b' a' b' c'' d''"
>>> result = abjad.on_beat_grace_container(
... string, music_voice[1:3], leaf_duration=(1, 30)
... )
>>> staff = abjad.Staff([music_voice])
>>> abjad.show(staff) # doctest: +SKIP
.. docs::
>>> string = abjad.lilypond(staff)
>>> print(string)
\new Staff
{
\context Voice = "Music_Voice"
{
<a c'>4
<<
\context Voice = "On_Beat_Grace_Container"
{
\set fontSize = #-3
\slash
\voiceOne
<
\tweak font-size 0
\tweak transparent ##t
d'
g'
>8 * 4/15
[
(
a'8 * 4/15
b'8 * 4/15
c''8 * 4/15
d''8 * 4/15
c''8 * 4/15
b'8 * 4/15
a'8 * 4/15
b'8 * 4/15
c''8 * 4/15
d''8 * 4/15
)
]
}
\context Voice = "Music_Voice"
{
\voiceTwo
<b d'>4
<c' e'>4
}
>>
\oneVoice
<d' f'>4
}
}
Chord-to-note anchor:
>>> music_voice = abjad.Voice("c'4 d' e' f'", name="Music_Voice")
>>> string = "<g' b'>8 a' b' c'' d'' c'' b' a' b' c'' d''"
>>> result = abjad.on_beat_grace_container(
... string, music_voice[1:3], leaf_duration=(1, 30)
... )
>>> staff = abjad.Staff([music_voice])
>>> abjad.show(staff) # doctest: +SKIP
.. docs::
>>> string = abjad.lilypond(staff)
>>> print(string)
\new Staff
{
\context Voice = "Music_Voice"
{
c'4
<<
\context Voice = "On_Beat_Grace_Container"
{
\set fontSize = #-3
\slash
\voiceOne
<
\tweak font-size 0
\tweak transparent ##t
d'
g'
b'
>8 * 4/15
[
(
a'8 * 4/15
b'8 * 4/15
c''8 * 4/15
d''8 * 4/15
c''8 * 4/15
b'8 * 4/15
a'8 * 4/15
b'8 * 4/15
c''8 * 4/15
d''8 * 4/15
)
]
}
\context Voice = "Music_Voice"
{
\voiceTwo
d'4
e'4
}
>>
\oneVoice
f'4
}
}
Chord-to-chord anchor:
>>> music_voice = abjad.Voice(
... "<a c'>4 <b d'> <c' e'> <d' f'>", name="Music_Voice"
... )
>>> string = "<g' b'>8 a' b' c'' d'' c'' b' a' b' c'' d''"
>>> result = abjad.on_beat_grace_container(
... string, music_voice[1:3], leaf_duration=(1, 30)
... )
>>> staff = abjad.Staff([music_voice])
>>> abjad.show(staff) # doctest: +SKIP
.. docs::
>>> string = abjad.lilypond(staff)
>>> print(string)
\new Staff
{
\context Voice = "Music_Voice"
{
<a c'>4
<<
\context Voice = "On_Beat_Grace_Container"
{
\set fontSize = #-3
\slash
\voiceOne
<
\tweak font-size 0
\tweak transparent ##t
d'
g'
b'
>8 * 4/15
[
(
a'8 * 4/15
b'8 * 4/15
c''8 * 4/15
d''8 * 4/15
c''8 * 4/15
b'8 * 4/15
a'8 * 4/15
b'8 * 4/15
c''8 * 4/15
d''8 * 4/15
)
]
}
\context Voice = "Music_Voice"
{
\voiceTwo
<b d'>4
<c' e'>4
}
>>
\oneVoice
<d' f'>4
}
}
.. container:: example
GRACE NOTES BELOW.
Note-to-note anchor:
>>> music_voice = abjad.Voice("c'4 d' e' f'", name="Music_Voice")
>>> string = "g8 a b c' d' c' b a b c' d'"
>>> result = abjad.on_beat_grace_container(
... string,
... music_voice[1:3],
... anchor_voice_number=1,
... grace_voice_number=2,
... leaf_duration=(1, 30),
... )
>>> staff = abjad.Staff([music_voice])
>>> abjad.show(staff) # doctest: +SKIP
.. docs::
>>> string = abjad.lilypond(staff)
>>> print(string)
\new Staff
{
\context Voice = "Music_Voice"
{
c'4
<<
\context Voice = "On_Beat_Grace_Container"
{
\set fontSize = #-3
\slash
\voiceTwo
<
g
\tweak font-size 0
\tweak transparent ##t
d'
>8 * 4/15
[
(
a8 * 4/15
b8 * 4/15
c'8 * 4/15
d'8 * 4/15
c'8 * 4/15
b8 * 4/15
a8 * 4/15
b8 * 4/15
c'8 * 4/15
d'8 * 4/15
)
]
}
\context Voice = "Music_Voice"
{
\voiceOne
d'4
e'4
}
>>
\oneVoice
f'4
}
}
Note-to-chord anchor:
>>> music_voice = abjad.Voice(
... "<c' e'>4 <d' f'> <e' g'> <f' a'>", name="Music_Voice"
... )
>>> string = "g8 a b c' d' c' b a b c' d'"
>>> result = abjad.on_beat_grace_container(
... string,
... music_voice[1:3],
... anchor_voice_number=1,
... grace_voice_number=2,
... leaf_duration=(1, 30),
... )
>>> staff = abjad.Staff([music_voice])
>>> abjad.show(staff) # doctest: +SKIP
.. docs::
>>> string = abjad.lilypond(staff)
>>> print(string)
\new Staff
{
\context Voice = "Music_Voice"
{
<c' e'>4
<<
\context Voice = "On_Beat_Grace_Container"
{
\set fontSize = #-3
\slash
\voiceTwo
<
g
\tweak font-size 0
\tweak transparent ##t
f'
>8 * 4/15
[
(
a8 * 4/15
b8 * 4/15
c'8 * 4/15
d'8 * 4/15
c'8 * 4/15
b8 * 4/15
a8 * 4/15
b8 * 4/15
c'8 * 4/15
d'8 * 4/15
)
]
}
\context Voice = "Music_Voice"
{
\voiceOne
<d' f'>4
<e' g'>4
}
>>
\oneVoice
<f' a'>4
}
}
Chord-to-note anchor:
>>> music_voice = abjad.Voice("c'4 d' e' f'", name="Music_Voice")
>>> string = "<e g>8 a b c' d' c' b a b c' d'"
>>> result = abjad.on_beat_grace_container(
... string,
... music_voice[1:3],
... anchor_voice_number=1,
... grace_voice_number=2,
... leaf_duration=(1, 30),
... )
>>> staff = abjad.Staff([music_voice])
>>> abjad.show(staff) # doctest: +SKIP
.. docs::
>>> string = abjad.lilypond(staff)
>>> print(string)
\new Staff
{
\context Voice = "Music_Voice"
{
c'4
<<
\context Voice = "On_Beat_Grace_Container"
{
\set fontSize = #-3
\slash
\voiceTwo
<
e
g
\tweak font-size 0
\tweak transparent ##t
d'
>8 * 4/15
[
(
a8 * 4/15
b8 * 4/15
c'8 * 4/15
d'8 * 4/15
c'8 * 4/15
b8 * 4/15
a8 * 4/15
b8 * 4/15
c'8 * 4/15
d'8 * 4/15
)
]
}
\context Voice = "Music_Voice"
{
\voiceOne
d'4
e'4
}
>>
\oneVoice
f'4
}
}
Chord-to-chord anchor:
>>> music_voice = abjad.Voice(
... "<c' e'>4 <d' f'> <e' g'> <f' a'>", name="Music_Voice"
... )
>>> string = "<e g>8 a b c' d' c' b a b c' d'"
>>> result = abjad.on_beat_grace_container(
... string,
... music_voice[1:3],
... anchor_voice_number=1,
... grace_voice_number=2,
... leaf_duration=(1, 30),
... )
>>> staff = abjad.Staff([music_voice])
>>> abjad.show(staff) # doctest: +SKIP
.. docs::
>>> string = abjad.lilypond(staff)
>>> print(string)
\new Staff
{
\context Voice = "Music_Voice"
{
<c' e'>4
<<
\context Voice = "On_Beat_Grace_Container"
{
\set fontSize = #-3
\slash
\voiceTwo
<
e
g
\tweak font-size 0
\tweak transparent ##t
f'
>8 * 4/15
[
(
a8 * 4/15
b8 * 4/15
c'8 * 4/15
d'8 * 4/15
c'8 * 4/15
b8 * 4/15
a8 * 4/15
b8 * 4/15
c'8 * 4/15
d'8 * 4/15
)
]
}
\context Voice = "Music_Voice"
{
\voiceOne
<d' f'>4
<e' g'>4
}
>>
\oneVoice
<f' a'>4
}
}
.. container:: example
Raises exception when duration of on-beat grace container exceeds
duration of anchor container:
>>> music_voice = abjad.Voice("c'4 d' e' f'", name="Music_Voice")
>>> string = "g'8 a' b' c'' d'' c'' b' a' b' c'' d''"
>>> result = abjad.on_beat_grace_container(
... string, music_voice[1:2], leaf_duration=(1, 8)
... )
Traceback (most recent call last):
...
Exception: grace Duration(11, 8) exceeds anchor Duration(1, 4).
"""
def _site(n):
return Tag(f"abjad.on_beat_grace_container({n})")
assert isinstance(anchor_voice_selection, Selection)
if not anchor_voice_selection.are_contiguous_same_parent(
ignore_before_after_grace=True
):
message = "selection must be contiguous in same parent:\n"
message += f" {repr(anchor_voice_selection)}"
raise Exception(message)
on_beat_grace_container = OnBeatGraceContainer(
contents, leaf_duration=leaf_duration
)
if not isinstance(anchor_voice_selection, Selection):
raise Exception(f"must be selection:\n {repr(anchor_voice_selection)}")
anchor_leaf = _iterate._get_leaf(anchor_voice_selection, 0)
anchor_voice = Parentage(anchor_leaf).get(Voice)
if anchor_voice.name is None:
raise Exception(f"anchor voice must be named:\n {repr(anchor_voice)}")
anchor_voice_insert = Voice(name=anchor_voice.name)
mutate.wrap(anchor_voice_selection, anchor_voice_insert)
container = Container(simultaneous=True)
mutate.wrap(anchor_voice_insert, container)
container.insert(0, on_beat_grace_container)
on_beat_grace_container._match_anchor_leaf()
on_beat_grace_container._set_leaf_durations()
insert_duration = anchor_voice_insert._get_duration()
grace_container_duration = on_beat_grace_container._get_duration()
if insert_duration < grace_container_duration:
message = f"grace {repr(grace_container_duration)}"
message += f" exceeds anchor {repr(insert_duration)}."
raise Exception(message)
if font_size is not None:
string = rf"\set fontSize = #{font_size}"
literal = LilyPondLiteral(string)
attach(literal, on_beat_grace_container, tag=_site(1))
if not do_not_beam:
beam(on_beat_grace_container[:])
if not do_not_slash:
literal = LilyPondLiteral(r"\slash")
attach(literal, on_beat_grace_container[0], tag=_site(2))
if not do_not_slur:
slur(on_beat_grace_container[:])
voice_number_to_string = {
1: r"\voiceOne",
2: r"\voiceTwo",
3: r"\voiceThree",
4: r"\voiceFour",
}
first_grace = _iterate._get_leaf(on_beat_grace_container, 0)
one_voice_literal = LilyPondLiteral(r"\oneVoice", format_slot="absolute_before")
string = voice_number_to_string.get(grace_voice_number, None)
if string is not None:
literal
detach(one_voice_literal, anchor_leaf)
attach(LilyPondLiteral(string), first_grace, tag=_site(3))
string = voice_number_to_string.get(anchor_voice_number, None)
if string is not None:
detach(one_voice_literal, anchor_leaf)
attach(LilyPondLiteral(string), anchor_leaf, tag=_site(4))
if not do_not_stop_polyphony:
last_anchor_leaf = _iterate._get_leaf(anchor_voice_selection, -1)
next_leaf = _iterate._get_leaf(last_anchor_leaf, 1)
if next_leaf is not None:
literal = LilyPondLiteral(r"\oneVoice", format_slot="absolute_before")
attach(literal, next_leaf, tag=_site(5))
return on_beat_grace_container
| gpl-3.0 | 6,486,861,756,668,666,000 | 33.565564 | 84 | 0.377911 | false |
UB-info/estructura-datos | RafaelArqueroGimeno_S6/ABB_Rafael_Arquero_Gimeno.py | 1 | 8132 | import copy
__author__ = "Rafael Arquero Gimeno"
class Node(object):
def __init__(self):
self.data = []
self.left = None
self.right = None
def clear(self):
"""Empty Node"""
self.data = []
self.left = None
self.right = None
def clearData(self):
"""Empty stored values"""
self.data = []
def append(self, data):
"""Appends given value"""
self.data.append(data)
return self # allow method chaining
def delete(self, data):
"""Deletes the given value from Node"""
self.data.remove(data)
@property
def key(self):
return self.data[0] if self else None
@property
def leftmost(self):
return self.left.leftmost if self.left else self.key
@property
def rightmost(self):
return self.right.rightmost if self.right else self.key
@property
def depth(self):
if self:
left_depth = self.left.depth if self.left else 0
right_depth = self.right.depth if self.right else 0
return 1 + max(left_depth, right_depth)
else:
return 0
def __copy__(self):
"""Returns a copy of self
:rtype : Node
"""
result = Node()
result.data = copy.copy(self.data)
if self.left:
result.left = copy.copy(self.left)
if self.right:
result.right = copy.copy(self.right)
return result
def __nonzero__(self):
return bool(self.data)
def __cmp__(self, other):
return cmp(self.key, other.key) if isinstance(other, Node) else cmp(self.key, other)
def __str__(self):
return reduce(lambda x, y: x + str(y) + "\n", self.data, "")
class ABB(object):
def __init__(self):
self.root = Node()
def clear(self):
"""Empty the tree"""
self.root.clear()
def insert(self, data):
"""Insert a value in tree
:param data: value to be inserted
:return: self to allow method chaining
"""
if not self:
self.root.append(data)
return self
parent, current = self._lookup(data)
if current: # data equivalent node found!
current.append(data)
else: # equivalent node not found!
setattr(parent, "right" if parent < data else "left", Node().append(data))
return self
def delete(self, data, wholeNode=False):
"""Deletes the given Node or Value if it is contained, Therefore do nothing
:type data: Node or ValueType (e.g. User)
:type wholeNode: bool
:param data: The node or value to delete
:param wholeNode: if whole matched node should be deleted or only the matched value
"""
parent, current = self._lookup(data)
if current: # data was found
current.clearData() if wholeNode else current.delete(data)
if not current: # we have deleted the last element from current node!
if current.left and current.right: # 2 children
newData = current.right.leftmost()
current.clearData()
current.append(newData)
self.delete(newData)
elif current.left: # only left child
current.data = current.left.data
current.right = current.left.right
current.left = current.left.left
# TODO
elif current.right: # only right child
current.data = current.right.data
current.left = current.right.left
current.right = current.right.right
# TODO
else: # no children
if not parent:
parent = self.root
setattr(parent, "right" if parent < data else "left", None)
def deleteLower(self, threshold, current=None, parent=None):
"""Deletes all values below threshold
:param threshold: All values below that will be deleted
:param current: The current inspected node (default root)
:param parent: The parent of current node
:return: self, allows method chaining
"""
if current is None:
if self:
current = self.root
else:
return self # break
if current > threshold:
if current.left:
self.deleteLower(threshold, current.left, current)
elif current < threshold:
if current.right:
current.data = current.right.data
current.left = current.right.left
current.right = current.right.right
self.deleteLower(threshold, current, parent)
else:
if parent:
parent.left = None # restart current
else:
self.clear() # restart root
else: # equals
current.left = None
return self
def deleteHigher(self, threshold, current=None, parent=None):
"""Deletes all values above threshold
:param threshold: All values above that will be deleted
:param current: The current inspected current (default root)
:param parent: The parent of current node
:return: self, allows method chaining
"""
if current is None:
if self:
current = self.root
else:
return self # break
if current < threshold:
if current.right:
self.deleteHigher(threshold, current.right, current)
elif current > threshold:
if current.left:
current.data = current.left.data
current.right = current.left.right
current.left = current.left.left
self.deleteHigher(threshold, current, parent)
else:
if parent:
parent.right = None # restart current
else:
self.clear() # restart root
else: # equals
current.right = None
return self
def _lookup(self, data):
"""Internal method. Finds the given value and return the node where it IS or where it SHOULD BE (i.e. None) and
also his parent node.
:rtype: Node, Node
"""
parent, current = None, self.root
while current:
if current < data: # data should be in right
parent, current = current, current.right
elif current > data: # data should be in left
parent, current = current, current.left
else: # equals
return parent, current
return parent, current
@property
def min(self):
"""Returns the minimum value of the tree"""
return self.root.leftmost
@property
def max(self):
"""Returns the maximum value of the tree"""
return self.root.rightmost
@property
def depth(self):
return self.root.depth
def __copy__(self):
"""Returns a copy of self
:rtype : ABB
"""
result = ABB()
result.root = copy.copy(self.root)
return result
def __nonzero__(self):
"""Returns false if the tree is empty, therefore returns true"""
return self.root.__nonzero__()
def __iter__(self, current=None):
"""Creates a generator that walks through the tree in descending order
:param current: The current node
:type current: Node
"""
if current is None: # first call
current = self.root
if current.right:
for x in self.__iter__(current.right):
yield x
for x in current.data:
yield x
if current.left:
for x in self.__iter__(current.left):
yield x
def __str__(self):
return reduce(lambda x, y: x + str(y) + "\n", self, "") | mit | -3,202,886,532,054,247,400 | 30.280769 | 119 | 0.539843 | false |
msaadat/paper | password_dlg.py | 1 | 2143 |
from PyQt5.QtWidgets import (QApplication, QMessageBox, QDialog, QGridLayout,
QHBoxLayout, QLabel, QPushButton, QLineEdit)
class PasswordDialog(QDialog):
def __init__(self, parent=None):
super(PasswordDialog, self).__init__(parent)
self.password = None
okButton = QPushButton("&Ok")
okButton.clicked.connect(self.ok_pressed)
self.pass1_edit = QLineEdit()
self.pass1_edit.setEchoMode(QLineEdit.Password)
self.pass2_edit = QLineEdit()
self.pass2_edit.setEchoMode(QLineEdit.Password)
lable1 = QLabel("Password:")
lable2 = QLabel("Repeat password:")
buttonsLayout = QHBoxLayout()
buttonsLayout.addStretch()
buttonsLayout.addWidget(okButton)
mainLayout = QGridLayout()
mainLayout.addWidget(lable1, 0, 0)
mainLayout.addWidget(self.pass1_edit, 0, 1)
mainLayout.addWidget(lable2, 1, 0)
mainLayout.addWidget(self.pass2_edit, 1, 1)
mainLayout.addLayout(buttonsLayout, 2, 1)
self.setLayout(mainLayout)
self.setWindowTitle("Set Password")
def ok_pressed(self):
pass1 = self.pass1_edit.text()
pass2 = self.pass2_edit.text()
if pass1 != pass2:
QMessageBox.warning(self, "Password",
"Passwords do not match.")
self.pass1_edit.setFocus()
self.pass1_edit.selectAll()
elif pass1 == '':
QMessageBox.information(self, "Password",
"Passwords cannot be empty.")
self.pass1_edit.setFocus()
self.pass1_edit.selectAll()
else:
self.password = pass1
self.accept()
@staticmethod
def getPassword(parent):
dialog = PasswordDialog(parent)
result = dialog.exec_()
return dialog.password, result
if __name__ == '__main__':
import sys
app = QApplication(sys.argv)
window = PasswordDialog()
window.show()
sys.exit(app.exec_())
| gpl-3.0 | -6,078,596,863,458,804,000 | 29.057971 | 77 | 0.569295 | false |
q2apro/graph-padowan | Install/MakeBuild.py | 1 | 2210 | # -*- coding: latin-1 -*-
from ftplib import FTP
from getpass import getpass
from VersionInfo import CalcVersionInfo
import os
import sys
import traceback
import datetime
import subprocess
try:
# Test that Graph can be started without some obscure dll files
os.system("Graph.exe /regserver")
# Sign Graph.exe
if "/sign" in sys.argv:
subprocess.check_call(['signtool.exe', 'sign', '/f', 'IvanMøllerJohansen.crt', '/t', 'http://timestamp.comodoca.com/authenticode', '/d', '"Graph"', 'Graph.exe'])
# Compile SetupGraphBeta-4.2.0.x.exe
print("Compiling...")
subprocess.check_call(["c:\\program files\\Inno Setup 5\\iscc.exe", "/Q", "Graph.iss"])
VersionInfo = CalcVersionInfo("Graph.exe")
FileName = "SetupGraphBeta-" + VersionInfo + ".exe"
# Sign SetupGraphBeta-x.x.x.exe
if "/sign" in sys.argv:
subprocess.check_call(['signtool.exe', 'sign', '/f', 'IvanMøllerJohansen.crt', '/t', 'http://timestamp.comodoca.com/authenticode', '/d', '"Graph"', FileName])
#Creating GraphBeta.inf
print("Writing GraphBeta.inf ...")
File = open("GraphBeta.inf", "w")
File.write("[Graph]\n")
File.write("Major = " + VersionInfo[0] + "\n")
File.write("Minor = " + VersionInfo[2] + "\n")
File.write("Release = " + VersionInfo[4] + "\n")
File.write("Build = " + VersionInfo[6:] + "\n")
File.write("Date = " + datetime.date.today().strftime("%d-%m-%Y\n"))
File.write("DownloadFile = http://www.padowan.dk/bin/" + FileName + '\n')
File.write("DownloadPage = http://www.padowan.dk/beta\n")
# Upload SetupGraphBeta.exe to the server
Password = getpass()
ftp = FTP('ftp.padowan.dk') # connect to host, default port
ftp.login('padowan.dk', Password)
ftp.cwd('bin')
print("Uploading", FileName, "...")
File = open(FileName, 'rb')
ftp.storbinary('STOR ' + FileName, File)
if ftp.size(FileName) != os.stat(FileName).st_size:
raise Exception("Wrong file size on server")
print("Uploading GraphBeta.inf ...")
ftp.cwd('../graph')
File = open("GraphBeta.inf", 'rb')
ftp.storbinary('STOR GraphBeta.inf', File)
ftp.quit()
print("Upload complete!")
except Exception:
traceback.print_exception(sys.exc_info()[0], sys.exc_info()[1], sys.exc_info()[2])
| gpl-2.0 | -7,936,042,453,462,303,000 | 34.645161 | 166 | 0.660633 | false |
ircah/cah-js | util/convert-csv.py | 1 | 1612 | #!/usr/bin/env python3
import re
import json # turns out the dump function of the json5 module just calls the normal json module (╯°□°)╯︵ ┻━┻
INPUT = "cards-DevOpsAgainstHumanity.csv"
META_NAME = "DevOps Against Humanity"
DELIM = ","
QUOTE = "\""
SKIPLINES = 2
def parse_csv(line):
a = []
tmp = ""
at_elem_start = True
in_quotes = False
in_escape = False
for c in line:
if at_elem_start:
if c == DELIM: # empty element
a.append("")
continue
in_quotes = (c == QUOTE)
if not in_quotes:
tmp += c
at_elem_start = False
continue
if c == QUOTE and in_quotes and not in_escape:
in_escape = True
elif c == QUOTE and in_quotes and in_escape:
tmp += QUOTE
in_escape = False
elif (c == DELIM and in_quotes and in_escape) or (c == DELIM and not in_quotes):
a.append(tmp)
tmp = ""
in_escape = False
at_elem_start = True
else:
tmp += c
a.append(tmp)
return a
r_blank = re.compile(r"_+")
odict = {}
odict["questions"] = []
odict["answers"] = []
odict["meta"] = {}
odict["meta"]["name"] = META_NAME
ifd = open(INPUT, "r")
for i in range(SKIPLINES):
ifd.readline()
n = 0
while True:
l = ifd.readline()
if not l:
break
l = l.rstrip("\r\n")
l = parse_csv(l)
if l[0] != "":
odict["answers"].append(l[0])
n += 1
if l[1] != "":
tmp = {}
tmp["text"] = re.sub(r_blank, "%s", l[1])
# pick is inferred from number of %s
odict["questions"].append(tmp)
n += 1
ifd.close()
ofd = open(INPUT.replace(".csv", ".json5"), "w")
json.dump(odict, ofd, indent=2, sort_keys=True)
ofd.close()
print("Processed %d cards." % (n, ))
| mit | 5,042,605,997,194,050,000 | 19.410256 | 108 | 0.600503 | false |
divio/askbot-devel | askbot/migrations/0041_add_field_comment_offensive_flag_count.py | 1 | 26899 | # encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Comment.offensive_flag_count'
db.add_column(u'comment', 'offensive_flag_count', self.gf('django.db.models.fields.IntegerField')(default=0), keep_default=False)
def backwards(self, orm):
# Deleting field 'Comment.offensive_flag_count'
db.delete_column(u'comment', 'offensive_flag_count')
models = {
'askbot.activity': {
'Meta': {'object_name': 'Activity', 'db_table': "u'activity'"},
'active_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'activity_type': ('django.db.models.fields.SmallIntegerField', [], {}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_auditted': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'question': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['askbot.Question']", 'null': 'True'}),
'receiving_users': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'received_activity'", 'symmetrical': 'False', 'to': "orm['auth.User']"}),
'recipients': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'incoming_activity'", 'symmetrical': 'False', 'through': "orm['askbot.ActivityAuditStatus']", 'to': "orm['auth.User']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'askbot.activityauditstatus': {
'Meta': {'unique_together': "(('user', 'activity'),)", 'object_name': 'ActivityAuditStatus'},
'activity': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['askbot.Activity']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'status': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'askbot.anonymousanswer': {
'Meta': {'object_name': 'AnonymousAnswer'},
'added_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'author': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ip_addr': ('django.db.models.fields.IPAddressField', [], {'max_length': '15'}),
'question': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'anonymous_answers'", 'to': "orm['askbot.Question']"}),
'session_key': ('django.db.models.fields.CharField', [], {'max_length': '40'}),
'summary': ('django.db.models.fields.CharField', [], {'max_length': '180'}),
'text': ('django.db.models.fields.TextField', [], {}),
'wiki': ('django.db.models.fields.BooleanField', [], {'default': 'False'})
},
'askbot.anonymousquestion': {
'Meta': {'object_name': 'AnonymousQuestion'},
'added_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'author': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ip_addr': ('django.db.models.fields.IPAddressField', [], {'max_length': '15'}),
'is_anonymous': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'session_key': ('django.db.models.fields.CharField', [], {'max_length': '40'}),
'summary': ('django.db.models.fields.CharField', [], {'max_length': '180'}),
'tagnames': ('django.db.models.fields.CharField', [], {'max_length': '125'}),
'text': ('django.db.models.fields.TextField', [], {}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '300'}),
'wiki': ('django.db.models.fields.BooleanField', [], {'default': 'False'})
},
'askbot.answer': {
'Meta': {'object_name': 'Answer', 'db_table': "u'answer'"},
'accepted': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'accepted_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'added_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'author': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'answers'", 'to': "orm['auth.User']"}),
'comment_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'deleted': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'deleted_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'deleted_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'deleted_answers'", 'null': 'True', 'to': "orm['auth.User']"}),
'html': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_edited_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'last_edited_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'last_edited_answers'", 'null': 'True', 'to': "orm['auth.User']"}),
'locked': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'locked_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'locked_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'locked_answers'", 'null': 'True', 'to': "orm['auth.User']"}),
'offensive_flag_count': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'question': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'answers'", 'to': "orm['askbot.Question']"}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'text': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'vote_down_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'vote_up_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'wiki': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'wikified_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'})
},
'askbot.answerrevision': {
'Meta': {'ordering': "('-revision',)", 'object_name': 'AnswerRevision', 'db_table': "u'answer_revision'"},
'answer': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'revisions'", 'to': "orm['askbot.Answer']"}),
'author': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'answerrevisions'", 'to': "orm['auth.User']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'revised_at': ('django.db.models.fields.DateTimeField', [], {}),
'revision': ('django.db.models.fields.PositiveIntegerField', [], {}),
'summary': ('django.db.models.fields.CharField', [], {'max_length': '300', 'blank': 'True'}),
'text': ('django.db.models.fields.TextField', [], {})
},
'askbot.award': {
'Meta': {'object_name': 'Award', 'db_table': "u'award'"},
'awarded_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'badge': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'award_badge'", 'to': "orm['askbot.BadgeData']"}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'notified': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'award_user'", 'to': "orm['auth.User']"})
},
'askbot.badgedata': {
'Meta': {'ordering': "('slug',)", 'object_name': 'BadgeData'},
'awarded_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'awarded_to': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'badges'", 'symmetrical': 'False', 'through': "orm['askbot.Award']", 'to': "orm['auth.User']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '50', 'db_index': 'True'})
},
'askbot.comment': {
'Meta': {'ordering': "('-added_at',)", 'object_name': 'Comment', 'db_table': "u'comment'"},
'added_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'comment': ('django.db.models.fields.CharField', [], {'max_length': '2048'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'html': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '2048'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'offensive_flag_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'comments'", 'to': "orm['auth.User']"})
},
'askbot.emailfeedsetting': {
'Meta': {'object_name': 'EmailFeedSetting'},
'added_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'feed_type': ('django.db.models.fields.CharField', [], {'max_length': '16'}),
'frequency': ('django.db.models.fields.CharField', [], {'default': "'n'", 'max_length': '8'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'reported_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'subscriber': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'notification_subscriptions'", 'to': "orm['auth.User']"})
},
'askbot.favoritequestion': {
'Meta': {'object_name': 'FavoriteQuestion', 'db_table': "u'favorite_question'"},
'added_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'question': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['askbot.Question']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'user_favorite_questions'", 'to': "orm['auth.User']"})
},
'askbot.markedtag': {
'Meta': {'object_name': 'MarkedTag'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'reason': ('django.db.models.fields.CharField', [], {'max_length': '16'}),
'tag': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'user_selections'", 'to': "orm['askbot.Tag']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'tag_selections'", 'to': "orm['auth.User']"})
},
'askbot.question': {
'Meta': {'object_name': 'Question', 'db_table': "u'question'"},
'added_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'answer_accepted': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'answer_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'author': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'questions'", 'to': "orm['auth.User']"}),
'close_reason': ('django.db.models.fields.SmallIntegerField', [], {'null': 'True', 'blank': 'True'}),
'closed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'closed_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'closed_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'closed_questions'", 'null': 'True', 'to': "orm['auth.User']"}),
'comment_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'deleted': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'deleted_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'deleted_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'deleted_questions'", 'null': 'True', 'to': "orm['auth.User']"}),
'favorited_by': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'favorite_questions'", 'symmetrical': 'False', 'through': "orm['askbot.FavoriteQuestion']", 'to': "orm['auth.User']"}),
'favourite_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'followed_by': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'followed_questions'", 'symmetrical': 'False', 'to': "orm['auth.User']"}),
'html': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_anonymous': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_activity_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_activity_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'last_active_in_questions'", 'to': "orm['auth.User']"}),
'last_edited_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'last_edited_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'last_edited_questions'", 'null': 'True', 'to': "orm['auth.User']"}),
'locked': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'locked_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'locked_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'locked_questions'", 'null': 'True', 'to': "orm['auth.User']"}),
'offensive_flag_count': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'summary': ('django.db.models.fields.CharField', [], {'max_length': '180'}),
'tagnames': ('django.db.models.fields.CharField', [], {'max_length': '125'}),
'tags': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'questions'", 'symmetrical': 'False', 'to': "orm['askbot.Tag']"}),
'text': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '300'}),
'view_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'vote_down_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'vote_up_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'wiki': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'wikified_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'})
},
'askbot.questionrevision': {
'Meta': {'ordering': "('-revision',)", 'object_name': 'QuestionRevision', 'db_table': "u'question_revision'"},
'author': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'questionrevisions'", 'to': "orm['auth.User']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_anonymous': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'question': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'revisions'", 'to': "orm['askbot.Question']"}),
'revised_at': ('django.db.models.fields.DateTimeField', [], {}),
'revision': ('django.db.models.fields.PositiveIntegerField', [], {}),
'summary': ('django.db.models.fields.CharField', [], {'max_length': '300', 'blank': 'True'}),
'tagnames': ('django.db.models.fields.CharField', [], {'max_length': '125'}),
'text': ('django.db.models.fields.TextField', [], {}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '300'})
},
'askbot.questionview': {
'Meta': {'object_name': 'QuestionView'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'question': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'viewed'", 'to': "orm['askbot.Question']"}),
'when': ('django.db.models.fields.DateTimeField', [], {}),
'who': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'question_views'", 'to': "orm['auth.User']"})
},
'askbot.repute': {
'Meta': {'object_name': 'Repute', 'db_table': "u'repute'"},
'comment': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'negative': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'positive': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'question': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['askbot.Question']", 'null': 'True', 'blank': 'True'}),
'reputation': ('django.db.models.fields.IntegerField', [], {'default': '1'}),
'reputation_type': ('django.db.models.fields.SmallIntegerField', [], {}),
'reputed_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'askbot.tag': {
'Meta': {'ordering': "('-used_count', 'name')", 'object_name': 'Tag', 'db_table': "u'tag'"},
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'created_tags'", 'to': "orm['auth.User']"}),
'deleted': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'deleted_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'deleted_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'deleted_tags'", 'null': 'True', 'to': "orm['auth.User']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'used_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'})
},
'askbot.vote': {
'Meta': {'unique_together': "(('content_type', 'object_id', 'user'),)", 'object_name': 'Vote', 'db_table': "u'vote'"},
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'votes'", 'to': "orm['auth.User']"}),
'vote': ('django.db.models.fields.SmallIntegerField', [], {}),
'voted_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'})
},
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'about': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'bronze': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'consecutive_days_visit_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'country': ('django_countries.fields.CountryField', [], {'max_length': '2', 'blank': 'True'}),
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'date_of_birth': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'display_tag_filter_strategy': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'email_isvalid': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'email_key': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True'}),
'email_tag_filter_strategy': ('django.db.models.fields.SmallIntegerField', [], {'default': '1'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'gold': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'gravatar': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'has_custom_avatar': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ignored_tags': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'interesting_tags': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'location': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'new_response_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'questions_per_page': ('django.db.models.fields.SmallIntegerField', [], {'default': '10'}),
'real_name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'reputation': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1'}),
'seen_response_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'show_country': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'silver': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'status': ('django.db.models.fields.CharField', [], {'default': "'w'", 'max_length': '2'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'}),
'website': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
}
}
complete_apps = ['askbot']
| gpl-3.0 | -8,386,394,080,592,694,000 | 85.770968 | 221 | 0.558868 | false |
dyomas/pyhrol | examples/example_0040.py | 1 | 1889 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright (c) 2013, 2014, Pyhrol, [email protected]
# GEO: N55.703431,E37.623324 .. N48.742359,E44.536997
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# 4. Neither the name of the Pyhrol nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE REGENTS AND CONTRIBUTORS ``AS IS'' AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
# OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
# HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
# OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
# SUCH DAMAGE.
#
import example_0040
example_0040.function_with_default_args()
example_0040.function_with_default_args(1)
example_0040.function_with_default_args(2, 'Two')
try:
example_0040.function_with_default_args(2.1)
except TypeError as ex:
print '***', ex
| bsd-3-clause | 980,894,107,989,928,400 | 46.225 | 77 | 0.758602 | false |
miguelgrinberg/python-socketio | tests/common/test_pubsub_manager.py | 1 | 13274 | import functools
import logging
import unittest
from unittest import mock
import pytest
from socketio import base_manager
from socketio import pubsub_manager
class TestPubSubManager(unittest.TestCase):
def setUp(self):
id = 0
def generate_id():
nonlocal id
id += 1
return str(id)
mock_server = mock.MagicMock()
mock_server.eio.generate_id = generate_id
self.pm = pubsub_manager.PubSubManager()
self.pm._publish = mock.MagicMock()
self.pm.set_server(mock_server)
self.pm.host_id = '123456'
self.pm.initialize()
def test_default_init(self):
assert self.pm.channel == 'socketio'
self.pm.server.start_background_task.assert_called_once_with(
self.pm._thread
)
def test_custom_init(self):
pubsub = pubsub_manager.PubSubManager(channel='foo')
assert pubsub.channel == 'foo'
assert len(pubsub.host_id) == 32
def test_write_only_init(self):
mock_server = mock.MagicMock()
pm = pubsub_manager.PubSubManager(write_only=True)
pm.set_server(mock_server)
pm.initialize()
assert pm.channel == 'socketio'
assert len(pm.host_id) == 32
assert pm.server.start_background_task.call_count == 0
def test_write_only_default_logger(self):
pm = pubsub_manager.PubSubManager(write_only=True)
pm.initialize()
assert pm.channel == 'socketio'
assert len(pm.host_id) == 32
assert pm._get_logger() == logging.getLogger('socketio')
def test_write_only_with_provided_logger(self):
test_logger = logging.getLogger('new_logger')
pm = pubsub_manager.PubSubManager(write_only=True, logger=test_logger)
pm.initialize()
assert pm.channel == 'socketio'
assert len(pm.host_id) == 32
assert pm._get_logger() == test_logger
def test_emit(self):
self.pm.emit('foo', 'bar')
self.pm._publish.assert_called_once_with(
{
'method': 'emit',
'event': 'foo',
'data': 'bar',
'namespace': '/',
'room': None,
'skip_sid': None,
'callback': None,
'host_id': '123456',
}
)
def test_emit_with_namespace(self):
self.pm.emit('foo', 'bar', namespace='/baz')
self.pm._publish.assert_called_once_with(
{
'method': 'emit',
'event': 'foo',
'data': 'bar',
'namespace': '/baz',
'room': None,
'skip_sid': None,
'callback': None,
'host_id': '123456',
}
)
def test_emit_with_room(self):
self.pm.emit('foo', 'bar', room='baz')
self.pm._publish.assert_called_once_with(
{
'method': 'emit',
'event': 'foo',
'data': 'bar',
'namespace': '/',
'room': 'baz',
'skip_sid': None,
'callback': None,
'host_id': '123456',
}
)
def test_emit_with_skip_sid(self):
self.pm.emit('foo', 'bar', skip_sid='baz')
self.pm._publish.assert_called_once_with(
{
'method': 'emit',
'event': 'foo',
'data': 'bar',
'namespace': '/',
'room': None,
'skip_sid': 'baz',
'callback': None,
'host_id': '123456',
}
)
def test_emit_with_callback(self):
with mock.patch.object(
self.pm, '_generate_ack_id', return_value='123'
):
self.pm.emit('foo', 'bar', room='baz', callback='cb')
self.pm._publish.assert_called_once_with(
{
'method': 'emit',
'event': 'foo',
'data': 'bar',
'namespace': '/',
'room': 'baz',
'skip_sid': None,
'callback': ('baz', '/', '123'),
'host_id': '123456',
}
)
def test_emit_with_callback_without_server(self):
standalone_pm = pubsub_manager.PubSubManager()
with pytest.raises(RuntimeError):
standalone_pm.emit('foo', 'bar', callback='cb')
def test_emit_with_callback_missing_room(self):
with mock.patch.object(
self.pm, '_generate_ack_id', return_value='123'
):
with pytest.raises(ValueError):
self.pm.emit('foo', 'bar', callback='cb')
def test_emit_with_ignore_queue(self):
sid = self.pm.connect('123', '/')
self.pm.emit(
'foo', 'bar', room=sid, namespace='/', ignore_queue=True
)
self.pm._publish.assert_not_called()
self.pm.server._emit_internal.assert_called_once_with(
'123', 'foo', 'bar', '/', None
)
def test_can_disconnect(self):
sid = self.pm.connect('123', '/')
assert self.pm.can_disconnect(sid, '/')
self.pm.can_disconnect(sid, '/foo')
self.pm._publish.assert_called_once_with(
{'method': 'disconnect', 'sid': sid, 'namespace': '/foo'}
)
def test_close_room(self):
self.pm.close_room('foo')
self.pm._publish.assert_called_once_with(
{'method': 'close_room', 'room': 'foo', 'namespace': '/'}
)
def test_close_room_with_namespace(self):
self.pm.close_room('foo', '/bar')
self.pm._publish.assert_called_once_with(
{'method': 'close_room', 'room': 'foo', 'namespace': '/bar'}
)
def test_handle_emit(self):
with mock.patch.object(base_manager.BaseManager, 'emit') as super_emit:
self.pm._handle_emit({'event': 'foo', 'data': 'bar'})
super_emit.assert_called_once_with(
'foo',
'bar',
namespace=None,
room=None,
skip_sid=None,
callback=None,
)
def test_handle_emit_with_namespace(self):
with mock.patch.object(base_manager.BaseManager, 'emit') as super_emit:
self.pm._handle_emit(
{'event': 'foo', 'data': 'bar', 'namespace': '/baz'}
)
super_emit.assert_called_once_with(
'foo',
'bar',
namespace='/baz',
room=None,
skip_sid=None,
callback=None,
)
def test_handle_emit_with_room(self):
with mock.patch.object(base_manager.BaseManager, 'emit') as super_emit:
self.pm._handle_emit(
{'event': 'foo', 'data': 'bar', 'room': 'baz'}
)
super_emit.assert_called_once_with(
'foo',
'bar',
namespace=None,
room='baz',
skip_sid=None,
callback=None,
)
def test_handle_emit_with_skip_sid(self):
with mock.patch.object(base_manager.BaseManager, 'emit') as super_emit:
self.pm._handle_emit(
{'event': 'foo', 'data': 'bar', 'skip_sid': '123'}
)
super_emit.assert_called_once_with(
'foo',
'bar',
namespace=None,
room=None,
skip_sid='123',
callback=None,
)
def test_handle_emit_with_callback(self):
host_id = self.pm.host_id
with mock.patch.object(base_manager.BaseManager, 'emit') as super_emit:
self.pm._handle_emit(
{
'event': 'foo',
'data': 'bar',
'namespace': '/baz',
'callback': ('sid', '/baz', 123),
'host_id': host_id,
}
)
assert super_emit.call_count == 1
assert super_emit.call_args[0] == ('foo', 'bar')
assert super_emit.call_args[1]['namespace'] == '/baz'
assert super_emit.call_args[1]['room'] is None
assert super_emit.call_args[1]['skip_sid'] is None
assert isinstance(
super_emit.call_args[1]['callback'], functools.partial
)
super_emit.call_args[1]['callback']('one', 2, 'three')
self.pm._publish.assert_called_once_with(
{
'method': 'callback',
'host_id': host_id,
'sid': 'sid',
'namespace': '/baz',
'id': 123,
'args': ('one', 2, 'three'),
}
)
def test_handle_callback(self):
host_id = self.pm.host_id
with mock.patch.object(self.pm, 'trigger_callback') as trigger:
self.pm._handle_callback(
{
'method': 'callback',
'host_id': host_id,
'sid': 'sid',
'namespace': '/',
'id': 123,
'args': ('one', 2),
}
)
trigger.assert_called_once_with('sid', 123, ('one', 2))
def test_handle_callback_bad_host_id(self):
with mock.patch.object(self.pm, 'trigger_callback') as trigger:
self.pm._handle_callback(
{
'method': 'callback',
'host_id': 'bad',
'sid': 'sid',
'namespace': '/',
'id': 123,
'args': ('one', 2),
}
)
assert trigger.call_count == 0
def test_handle_callback_missing_args(self):
host_id = self.pm.host_id
with mock.patch.object(self.pm, 'trigger_callback') as trigger:
self.pm._handle_callback(
{
'method': 'callback',
'host_id': host_id,
'sid': 'sid',
'namespace': '/',
'id': 123,
}
)
self.pm._handle_callback(
{
'method': 'callback',
'host_id': host_id,
'sid': 'sid',
'namespace': '/',
}
)
self.pm._handle_callback(
{'method': 'callback', 'host_id': host_id, 'sid': 'sid'}
)
self.pm._handle_callback(
{'method': 'callback', 'host_id': host_id}
)
assert trigger.call_count == 0
def test_handle_disconnect(self):
self.pm._handle_disconnect(
{'method': 'disconnect', 'sid': '123', 'namespace': '/foo'}
)
self.pm.server.disconnect.assert_called_once_with(
sid='123', namespace='/foo', ignore_queue=True
)
def test_handle_close_room(self):
with mock.patch.object(
base_manager.BaseManager, 'close_room'
) as super_close_room:
self.pm._handle_close_room({'method': 'close_room', 'room': 'foo'})
super_close_room.assert_called_once_with(
room='foo', namespace=None
)
def test_handle_close_room_with_namespace(self):
with mock.patch.object(
base_manager.BaseManager, 'close_room'
) as super_close_room:
self.pm._handle_close_room(
{'method': 'close_room', 'room': 'foo', 'namespace': '/bar'}
)
super_close_room.assert_called_once_with(
room='foo', namespace='/bar'
)
def test_background_thread(self):
self.pm._handle_emit = mock.MagicMock()
self.pm._handle_callback = mock.MagicMock()
self.pm._handle_disconnect = mock.MagicMock()
self.pm._handle_close_room = mock.MagicMock()
def messages():
import pickle
yield {'method': 'emit', 'value': 'foo'}
yield {'missing': 'method'}
yield '{"method": "callback", "value": "bar"}'
yield {'method': 'disconnect', 'sid': '123', 'namespace': '/foo'}
yield {'method': 'bogus'}
yield pickle.dumps({'method': 'close_room', 'value': 'baz'})
yield 'bad json'
yield b'bad pickled'
self.pm._listen = mock.MagicMock(side_effect=messages)
try:
self.pm._thread()
except StopIteration:
pass
self.pm._handle_emit.assert_called_once_with(
{'method': 'emit', 'value': 'foo'}
)
self.pm._handle_callback.assert_called_once_with(
{'method': 'callback', 'value': 'bar'}
)
self.pm._handle_disconnect.assert_called_once_with(
{'method': 'disconnect', 'sid': '123', 'namespace': '/foo'}
)
self.pm._handle_close_room.assert_called_once_with(
{'method': 'close_room', 'value': 'baz'}
)
| mit | 408,921,371,623,106,050 | 33.035897 | 79 | 0.469715 | false |
enitram/mogli | tests/discover_python_tests.py | 1 | 2430 | ########################################################################################################################
# mogli - molecular graph library #
# #
# Copyright (C) 2016-2019 Martin S. Engler #
# #
# This program is free software: you can redistribute it and/or modify #
# it under the terms of the GNU Lesser General Public License as published #
# by the Free Software Foundation, either version 3 of the License, or #
# (at your option) any later version. #
# #
# This program is distributed in the hope that it will be useful, #
# but WITHOUT ANY WARRANTY; without even the implied warranty of #
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
# GNU General Public License for more details. #
# #
# You should have received a copy of the GNU Lesser General Public License #
# along with this program. If not, see <https://www.gnu.org/licenses/>. #
########################################################################################################################
import unittest
def print_suite(suite):
if hasattr(suite, '__iter__'):
for x in suite:
print_suite(x)
else:
print(suite.id())
if __name__ == '__main__':
print_suite(unittest.defaultTestLoader.discover('.'))
| lgpl-3.0 | -3,808,304,955,220,517,400 | 74.9375 | 120 | 0.286831 | false |
rschnapka/account-closing | account_cutoff_prepaid/account.py | 1 | 6258 | # -*- encoding: utf-8 -*-
##############################################################################
#
# Account Cut-off Prepaid module for OpenERP
# Copyright (C) 2013 Akretion (http://www.akretion.com)
# @author Alexis de Lattre <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import orm, fields
from openerp.tools.translate import _
class account_invoice_line(orm.Model):
_inherit = 'account.invoice.line'
_columns = {
'start_date': fields.date('Start Date'),
'end_date': fields.date('End Date'),
}
def _check_start_end_dates(self, cr, uid, ids):
for invline in self.browse(cr, uid, ids):
if invline.start_date and not invline.end_date:
raise orm.except_orm(
_('Error:'),
_("Missing End Date for invoice line with "
"Description '%s'.")
% (invline.name))
if invline.end_date and not invline.start_date:
raise orm.except_orm(
_('Error:'),
_("Missing Start Date for invoice line with "
"Description '%s'.")
% (invline.name))
if invline.end_date and invline.start_date and \
invline.start_date > invline.end_date:
raise orm.except_orm(
_('Error:'),
_("Start Date should be before or be the same as "
"End Date for invoice line with Description '%s'.")
% (invline.name))
# Note : we can't check invline.product_id.must_have_dates
# have start_date and end_date here, because it would
# block automatic invoice generation. So we do the check
# upon validation of the invoice (see below the function
# action_move_create)
return True
_constraints = [
(_check_start_end_dates, "Error msg in raise",
['start_date', 'end_date', 'product_id']),
]
def move_line_get_item(self, cr, uid, line, context=None):
res = super(account_invoice_line, self).move_line_get_item(
cr, uid, line, context=context)
res['start_date'] = line.start_date
res['end_date'] = line.end_date
return res
class account_move_line(orm.Model):
_inherit = "account.move.line"
_columns = {
'start_date': fields.date('Start Date'),
'end_date': fields.date('End Date'),
}
def _check_start_end_dates(self, cr, uid, ids):
for moveline in self.browse(cr, uid, ids):
if moveline.start_date and not moveline.end_date:
raise orm.except_orm(
_('Error:'),
_("Missing End Date for move line with Name '%s'.")
% (moveline.name))
if moveline.end_date and not moveline.start_date:
raise orm.except_orm(
_('Error:'),
_("Missing Start Date for move line with Name '%s'.")
% (moveline.name))
if moveline.end_date and moveline.start_date and \
moveline.start_date > moveline.end_date:
raise orm.except_orm(
_('Error:'),
_("Start Date should be before End Date for move line "
"with Name '%s'.")
% (moveline.name))
# should we check that it's related to an expense / revenue ?
# -> I don't think so
return True
_constraints = [(
_check_start_end_dates,
"Error msg in raise",
['start_date', 'end_date']
)]
class account_invoice(orm.Model):
_inherit = 'account.invoice'
def inv_line_characteristic_hashcode(self, invoice, invoice_line):
'''Add start and end dates to hashcode used when the option "Group
Invoice Lines" is active on the Account Journal'''
code = super(account_invoice, self).inv_line_characteristic_hashcode(
invoice, invoice_line)
hashcode = '%s-%s-%s' % (
code, invoice_line.get('start_date', 'False'),
invoice_line.get('end_date', 'False'),
)
return hashcode
def line_get_convert(self, cr, uid, x, part, date, context=None):
res = super(account_invoice, self).line_get_convert(
cr, uid, x, part, date, context=context)
res['start_date'] = x.get('start_date', False)
res['end_date'] = x.get('end_date', False)
return res
def action_move_create(self, cr, uid, ids, context=None):
'''Check that products with must_have_dates=True have
Start and End Dates'''
for invoice in self.browse(cr, uid, ids, context=context):
for invline in invoice.invoice_line:
if invline.product_id and invline.product_id.must_have_dates:
if not invline.start_date or not invline.end_date:
raise orm.except_orm(
_('Error:'),
_("Missing Start Date and End Date for invoice "
"line with Product '%s' which has the "
"property 'Must Have Start and End Dates'.")
% (invline.product_id.name))
return super(account_invoice, self).action_move_create(
cr, uid, ids, context=context)
| agpl-3.0 | 7,632,116,702,459,464,000 | 40.72 | 78 | 0.541067 | false |
botswana-harvard/getresults-receive | getresults_receive/tests/test_batch_item_form.py | 1 | 4314 | from django.utils import timezone
from django.test.testcases import TransactionTestCase
from getresults_patient.tests.factories import PatientFactory
from getresults_patient.models import Patient
from ..views import ReceiveView
from ..forms import BatchItemForm
from ..models import Batch, BatchItem
class TestBatchItemForm(TransactionTestCase):
def setUp(self):
self.data = {}
self.batch_items = []
self.batch = Batch.objects.create(item_count=3, specimen_type='WB')
self.patient = PatientFactory()
def test_batch_item_invalid_if_no_required(self):
batch_item_data = dict(
protocol_number='bhp066',
specimen_type='WB',
specimen_reference='AAA0023',
collection_datetime=timezone.now(),
)
batch_item_form = BatchItemForm(data=batch_item_data)
self.assertFalse(batch_item_form.is_valid())
def test_batch_item_valid_if_onlyrequired(self):
patient = PatientFactory()
batch = Batch.objects.create(item_count=3, specimen_type='WB', )
batch_items = dict(
tube_count=5,
patient=patient.id,
batch=batch.id,
collection_datetime=timezone.now(),
specimen_reference='ABCDSF',
specimen_type='PL'
)
batch_item_form = BatchItemForm(data=batch_items)
self.assertTrue(batch_item_form.is_valid())
def test_batch_item_valid_with_all(self):
patient = PatientFactory()
batch = Batch.objects.create(item_count=3, specimen_type='WB', )
batch_items = dict(
tube_count=5,
protocol_number='bhp066',
patient=patient.id,
batch=batch.id,
specimen_type='WB',
collection_datetime=timezone.now(),
specimen_reference='ABCDFS',
specimen_condition='PL',
clinician_initials='TS',
site_code='14'
)
batch_item_form = BatchItemForm(data=batch_items)
self.assertTrue(batch_item_form.is_valid())
def test_invalid_batch_has_errormsg(self):
patient = PatientFactory()
batch_items = dict(
patient=patient.id,
collection_datetime=timezone.now(),
specimen_type='WB',
protocol_number='bhp066')
batch_item_form = BatchItemForm(data=batch_items)
self.assertIn('batch', batch_item_form.errors)
def test_batch_form_data_validates_in_view(self):
batch = Batch.objects.create(item_count=3, specimen_type='WB')
patient = PatientFactory()
receive = ReceiveView()
data = dict(
tube_count=5,
patient=patient.id,
batch=batch.id,
collection_datetime=timezone.now(),
specimen_reference='ABCDSF',
specimen_type='PL'
)
batch_item_form = BatchItemForm(data=data)
batch_item_form_list = [batch_item_form.data]
self.assertTrue(receive.validate_batch_items(batch_item_form_list))
def test_batch(self):
batch = Batch.objects.create(item_count=3, specimen_type='WB')
receive = ReceiveView()
self.assertTrue(receive.batch(batch.batch_identifier))
def test_batch_items(self):
batch = Batch.objects.create(item_count=3, specimen_type='WB')
patient = PatientFactory()
receive = ReceiveView()
data = dict(
tube_count=5,
patient=patient.id,
batch=batch.id,
collection_datetime=timezone.now(),
specimen_reference='ABCDSF',
specimen_type='PL'
)
batch_item_form = BatchItemForm(data=data)
batch_items = [batch_item_form.data]
self.assertTrue(receive.batch_items(batch_items))
def test_create_batch_items(self):
batch = Batch.objects.create(item_count=3, specimen_type='WB')
receive = ReceiveView()
patient = PatientFactory()
data = [dict(
tube_count=5,
patient=patient.id,
batch=batch.id,
collection_datetime=timezone.now(),
specimen_reference='ABCDSF',
specimen_type='PL'
)]
self.assertEqual(patient.id, receive.batch_items(data)[0].patient.id)
| gpl-2.0 | 3,324,138,970,762,723,300 | 34.360656 | 77 | 0.602689 | false |
PaddlePaddle/Paddle | python/paddle/fluid/tests/unittests/hybrid_parallel_mp_clip_grad.py | 1 | 1391 | # Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import division
from __future__ import print_function
import paddle
import numpy as np
from hybrid_parallel_mp_model import TestDistMPTraning
import unittest
import logging
#log = logging.getLogger("HybridParallel")
#log.setLevel(logging.WARNING)
class TestMPClipGrad(TestDistMPTraning):
def build_optimizer(self, model):
grad_clip = paddle.nn.ClipGradByGlobalNorm(2.0)
scheduler = paddle.optimizer.lr.ExponentialDecay(
learning_rate=0.001, gamma=0.999, verbose=True)
optimizer = paddle.optimizer.SGD(scheduler,
grad_clip=grad_clip,
parameters=model.parameters())
return optimizer
if __name__ == "__main__":
unittest.main()
| apache-2.0 | -7,350,561,299,220,626,000 | 33.775 | 74 | 0.698059 | false |
zsiciarz/django-pgallery | pgallery/admin.py | 1 | 2105 | """
Administration for photos and galleries.
"""
from django.contrib import admin
from django.db.models import Count
from django.utils.translation import ugettext_lazy as _
from .forms import PhotoForm
from .models import Gallery, Photo
class PhotoInline(admin.TabularInline):
"""
Administration for photos.
"""
model = Photo
form = PhotoForm
ordering = ["created"]
def get_extra(self, request, obj=None, **kwargs):
return 0 if obj else 3
class GalleryAdmin(admin.ModelAdmin):
"""
Administration for galleries.
"""
list_display = (
"author",
"title",
"status",
# Having "description" here raises SystemCheckError (admin.E108).
# We need to remove description from list_display for Django 2.1-2.2
# See https://code.djangoproject.com/ticket/30543
# "description",
"shot_date",
"modified",
"photo_count",
)
list_display_links = ("title",)
list_editable = ("status",)
list_filter = ("status",)
date_hierarchy = "shot_date"
prepopulated_fields = {"slug": ("title",)}
inlines = [PhotoInline]
def photo_count(self, obj):
return obj.photo_count
photo_count.short_description = _("Photo count")
def get_queryset(self, request):
"""
Add number of photos to each gallery.
"""
qs = super(GalleryAdmin, self).get_queryset(request)
return qs.annotate(photo_count=Count("photos"))
def save_model(self, request, obj, form, change):
"""
Set currently authenticated user as the author of the gallery.
"""
obj.author = request.user
obj.save()
def save_formset(self, request, form, formset, change):
"""
For each photo set it's author to currently authenticated user.
"""
instances = formset.save(commit=False)
for instance in instances:
if isinstance(instance, Photo):
instance.author = request.user
instance.save()
admin.site.register(Gallery, GalleryAdmin)
| mit | 6,763,024,374,916,542,000 | 25.3125 | 76 | 0.614252 | false |
dgketchum/satellite_image | sat_image/image.py | 1 | 27285 | # =============================================================================================
# Copyright 2017 dgketchum
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# =============================================================================================
import os
import shutil
from rasterio import open as rasopen
from numpy import where, pi, cos, nan, inf, true_divide, errstate, log
from numpy import float32, sin, deg2rad, array, isnan
from shapely.geometry import Polygon, mapping
from fiona import open as fiopen
from fiona.crs import from_epsg
from tempfile import mkdtemp
from datetime import datetime
from bounds import RasterBounds
from sat_image import mtl
class UnmatchedStackGeoError(ValueError):
pass
class InvalidObjectError(TypeError):
pass
class LandsatImage(object):
'''
Object to process landsat images. The parent class: LandsatImage takes a directory
containing untarred files, for now this ingests images that have been downloaded
from USGS earth explorer, using our Landsat578 package.
'''
def __init__(self, obj):
'''
:param obj: Directory containing an unzipped Landsat 5, 7, or 8 image. This should include at least
a tif for each band, and a .mtl file.
'''
self.obj = obj
if os.path.isdir(obj):
self.isdir = True
self.date_acquired = None
self.file_list = os.listdir(obj)
self.tif_list = [x for x in os.listdir(obj) if x.endswith('.TIF')]
self.tif_list.sort()
# parse metadata file into attributes
# structure: {HEADER: {SUBHEADER: {key(attribute), val(attribute value)}}}
self.mtl = mtl.parsemeta(obj)
self.meta_header = list(self.mtl)[0]
self.super_dict = self.mtl[self.meta_header]
for key, val in self.super_dict.items():
for sub_key, sub_val in val.items():
# print(sub_key.lower(), sub_val)
setattr(self, sub_key.lower(), sub_val)
self.satellite = self.landsat_scene_id[:3]
# create numpy nd_array objects for each band
self.band_list = []
self.tif_dict = {}
for i, tif in enumerate(self.tif_list):
raster = os.path.join(self.obj, tif)
# set all lower case attributes
tif = tif.lower()
front_ind = tif.index('b')
end_ind = tif.index('.tif')
att_string = tif[front_ind: end_ind]
self.band_list.append(att_string)
self.tif_dict[att_string] = raster
self.band_count = i + 1
if i == 0:
with rasopen(raster) as src:
transform = src.transform
profile = src.profile
meta = src.meta.copy()
self.rasterio_geometry = meta
self.profile = profile
self.transform = transform
self.shape = (1, profile['height'], profile['width'])
bounds = RasterBounds(affine_transform=transform,
profile=profile,
latlon=False)
self.bounds = bounds
self.north, self.west, self.south, self.east = bounds.get_nwse_tuple()
self.coords = bounds.as_tuple('nsew')
self.solar_zenith = 90. - self.sun_elevation
self.solar_zenith_rad = self.solar_zenith * pi / 180
self.sun_elevation_rad = self.sun_elevation * pi / 180
self.earth_sun_dist = self.earth_sun_d(self.date_acquired)
dtime = datetime.strptime(str(self.date_acquired), '%Y-%m-%d')
julian_day = dtime.strftime('%j')
self.doy = int(julian_day)
self.scene_coords_deg = self._scene_centroid()
self.scene_coords_rad = deg2rad(self.scene_coords_deg[0]), deg2rad(self.scene_coords_deg[1])
def _get_band(self, band_str):
path = self.tif_dict[band_str]
with rasopen(path) as src:
arr = src.read(1)
arr = array(arr, dtype=float32)
arr[arr < 1.] = nan
return arr
def _scene_centroid(self):
""" Compute image center coordinates
:return: Tuple of image center in lat, lon
"""
ul_lat = self.corner_ul_lat_product
ll_lat = self.corner_ll_lat_product
ul_lon = self.corner_ul_lon_product
ur_lon = self.corner_ur_lon_product
lat = (ul_lat + ll_lat) / 2.
lon = (ul_lon + ur_lon) / 2.
return lat, lon
@staticmethod
def earth_sun_d(dtime):
""" Earth-sun distance in AU
:param dtime time, e.g. datetime.datetime(2007, 5, 1)
:type datetime object
:return float(distance from sun to earth in astronomical units)
"""
doy = int(dtime.strftime('%j'))
rad_term = 0.9856 * (doy - 4) * pi / 180
distance_au = 1 - 0.01672 * cos(rad_term)
return distance_au
@staticmethod
def _divide_zero(a, b, replace=0):
with errstate(divide='ignore', invalid='ignore'):
c = true_divide(a, b)
c[c == inf] = replace
return c
def get_tile_geometry(self, output_filename=None, geographic_coords=False):
if not output_filename:
temp_dir = mkdtemp()
temp = os.path.join(temp_dir, 'shape.shp')
else:
temp = output_filename
# corners = {'ul': (self.corner_ul_projection_x_product,
# self.corner_ul_projection_y_product),
# 'll': (self.corner_ll_projection_x_product,
# self.corner_ll_projection_y_product),
# 'lr': (self.corner_lr_projection_x_product,
# self.corner_lr_projection_y_product),
# 'ur': (self.corner_ur_projection_x_product,
# self.corner_ur_projection_y_product)}
if geographic_coords:
points = [(self.north, self.west), (self.south, self.west),
(self.south, self.east), (self.north, self.east),
(self.north, self.west)]
else:
points = [(self.west, self.north), (self.west, self.south),
(self.east, self.south), (self.east, self.north),
(self.west, self.north)]
polygon = Polygon(points)
schema = {'geometry': 'Polygon',
'properties': {'id': 'int'}}
crs = from_epsg(int(self.rasterio_geometry['crs']['init'].split(':')[1]))
with fiopen(temp, 'w', 'ESRI Shapefile', schema=schema, crs=crs) as shp:
shp.write({
'geometry': mapping(polygon),
'properties': {'id': 1}})
if output_filename:
return None
with fiopen(temp, 'r') as src:
features = [f['geometry'] for f in src]
if not output_filename:
try:
shutil.rmtree(temp_dir)
except UnboundLocalError:
pass
return features
def save_array(self, arr, output_filename):
geometry = self.rasterio_geometry
arr = arr.reshape(1, arr.shape[0], arr.shape[1])
geometry['dtype'] = arr.dtype
with rasopen(output_filename, 'w', **geometry) as dst:
dst.write(arr)
return None
def mask_by_image(self, arr):
image = self._get_band('b1')
image = array(image, dtype=float32)
image[image < 1.] = nan
arr = where(isnan(image), nan, arr)
return arr
def mask(self):
image = self._get_band('b1')
image = array(image, dtype=float32)
image[image < 1.] = nan
arr = where(isnan(image), 0, 1)
return arr
class Landsat5(LandsatImage):
def __init__(self, obj):
LandsatImage.__init__(self, obj)
if self.satellite != 'LT5':
raise ValueError('Must init Landsat5 object with Landsat5 data, not {}'.format(self.satellite))
# https://landsat.usgs.gov/esun
self.ex_atm_irrad = (1958.0, 1827.0, 1551.0,
1036.0, 214.9, nan, 80.65)
# old values from fmask.exe
# self.ex_atm_irrad = (1983.0, 1796.0, 1536.0, 1031.0, 220.0, nan, 83.44)
self.k1, self.k2 = 607.76, 1260.56
def radiance(self, band):
qcal_min = getattr(self, 'quantize_cal_min_band_{}'.format(band))
qcal_max = getattr(self, 'quantize_cal_max_band_{}'.format(band))
l_min = getattr(self, 'radiance_minimum_band_{}'.format(band))
l_max = getattr(self, 'radiance_maximum_band_{}'.format(band))
qcal = self._get_band('b{}'.format(band))
rad = ((l_max - l_min) / (qcal_max - qcal_min)) * (qcal - qcal_min) + l_min
return rad.astype(float32)
def brightness_temp(self, band, temp_scale='K'):
if band in [1, 2, 3, 4, 5, 7]:
raise ValueError('LT5 brightness must be band 6')
rad = self.radiance(band)
brightness = self.k2 / (log((self.k1 / rad) + 1))
if temp_scale == 'K':
return brightness
elif temp_scale == 'F':
return brightness * (9 / 5.0) - 459.67
elif temp_scale == 'C':
return brightness - 273.15
else:
raise ValueError('{} is not a valid temperature scale'.format(temp_scale))
def reflectance(self, band):
"""
:param band: An optical band, i.e. 1-5, 7
:return: At satellite reflectance, [-]
"""
if band == 6:
raise ValueError('LT5 reflectance must be other than band 6')
rad = self.radiance(band)
esun = self.ex_atm_irrad[band - 1]
toa_reflect = (pi * rad * self.earth_sun_dist ** 2) / (esun * cos(self.solar_zenith_rad))
return toa_reflect
def albedo(self, model='smith'):
"""Finds broad-band surface reflectance (albedo)
Smith (2010), “The heat budget of the earth’s surface deduced from space”
LT5 toa reflectance bands 1, 3, 4, 5, 7
# normalized i.e. 0.356 + 0.130 + 0.373 + 0.085 + 0.07 = 1.014
Should have option for Liang, 2000;
Tasumi (2008), "At-Surface Reflectance and Albedo from Satellite for
Operational Calculation of Land Surface Energy Balance"
:return albedo array of floats
"""
if model == 'smith':
blue, red, nir, swir1, swir2 = (self.reflectance(1), self.reflectance(3), self.reflectance(4),
self.reflectance(5), self.reflectance(7))
alb = (0.356 * blue + 0.130 * red + 0.373 * nir + 0.085 * swir1 + 0.072 * swir2 - 0.0018) / 1.014
elif model == 'tasumi':
pass
# add tasumi algorithm TODO
return alb
def saturation_mask(self, band, value=255):
""" Mask saturated pixels, 1 (True) is saturated.
:param band: Image band with dn values, type: array
:param value: Maximum (saturated) value, i.e. 255 for 8-bit data, type: int
:return: boolean array
"""
dn = self._get_band('b{}'.format(band))
mask = self.mask()
mask = where((dn == value) & (mask > 0), True, False)
return mask
def ndvi(self):
""" Normalized difference vegetation index.
:return: NDVI
"""
red, nir = self.reflectance(3), self.reflectance(4)
ndvi = self._divide_zero((nir - red), (nir + red), nan)
return ndvi
def lai(self):
"""
Leaf area index (LAI), or the surface area of leaves to surface area ground.
Trezza and Allen, 2014
:param ndvi: normalized difference vegetation index [-]
:return: LAI [-]
"""
ndvi = self.ndvi()
lai = 7.0 * (ndvi ** 3)
lai = where(lai > 6., 6., lai)
return lai
def emissivity(self, approach='tasumi'):
ndvi = self.ndvi()
if approach == 'tasumi':
lai = self.lai()
# Tasumi et al., 2003
# narrow-band emissivity
nb_epsilon = where((ndvi > 0) & (lai <= 3), 0.97 + 0.0033 * lai, nan)
nb_epsilon = where((ndvi > 0) & (lai > 3), 0.98, nb_epsilon)
nb_epsilon = where(ndvi <= 0, 0.99, nb_epsilon)
return nb_epsilon
if approach == 'sobrino':
# Sobrino et el., 2004
red = self.reflectance(3)
bound_ndvi = where(ndvi > 0.5, ndvi, 0.99)
bound_ndvi = where(ndvi < 0.2, red, bound_ndvi)
pv = ((ndvi - 0.2) / (0.5 - 0.2)) ** 2
pv_emiss = 0.004 * pv + 0.986
emissivity = where((ndvi >= 0.2) & (ndvi <= 0.5), pv_emiss, bound_ndvi)
return emissivity
def land_surface_temp(self):
"""
Mean values from Allen (2007)
:return:
"""
rp = 0.91
tau = 0.866
rsky = 1.32
epsilon = self.emissivity(approach='tasumi')
radiance = self.radiance(6)
rc = ((radiance - rp) / tau) - ((1 - epsilon) * rsky)
lst = self.k2 / (log((epsilon * self.k1 / rc) + 1))
return lst
def ndsi(self):
""" Normalized difference snow index.
:return: NDSI
"""
green, swir1 = self.reflectance(2), self.reflectance(5)
ndsi = self._divide_zero((green - swir1), (green + swir1), nan)
return ndsi
class Landsat7(LandsatImage):
def __init__(self, obj):
LandsatImage.__init__(self, obj)
if self.satellite != 'LE7':
raise ValueError('Must init Landsat7 object with Landsat5 data, not {}'.format(self.satellite))
# https://landsat.usgs.gov/esun; Landsat 7 Handbook
self.ex_atm_irrad = (1970.0, 1842.0, 1547.0, 1044.0,
255.700, nan, 82.06, 1369.00)
self.k1, self.k2 = 666.09, 1282.71
def radiance(self, band):
if band == 6:
band = '6_vcid_1'
qcal_min = getattr(self, 'quantize_cal_min_band_{}'.format(band))
qcal_max = getattr(self, 'quantize_cal_max_band_{}'.format(band))
l_min = getattr(self, 'radiance_minimum_band_{}'.format(band))
l_max = getattr(self, 'radiance_maximum_band_{}'.format(band))
qcal = self._get_band('b{}'.format(band))
rad = ((l_max - l_min) / (qcal_max - qcal_min)) * (qcal - qcal_min) + l_min
return rad
def brightness_temp(self, band=6, gain='low', temp_scale='K'):
if band in [1, 2, 3, 4, 5, 7, 8]:
raise ValueError('LE7 brightness must be either vcid_1 or vcid_2')
if gain == 'low':
# low gain : b6_vcid_1
band_gain = '6_vcid_1'
else:
band_gain = '6_vcid_2'
rad = self.radiance(band_gain)
brightness = self.k2 / (log((self.k1 / rad) + 1))
if temp_scale == 'K':
return brightness
elif temp_scale == 'F':
return brightness * (9 / 5.0) - 459.67
elif temp_scale == 'C':
return brightness - 273.15
else:
raise ValueError('{} is not a valid temperature scale'.format(temp_scale))
def reflectance(self, band):
"""
:param band: An optical band, i.e. 1-5, 7
:return: At satellite reflectance, [-]
"""
if band in ['b6_vcid_1', 'b6_vcid_2']:
raise ValueError('LE7 reflectance must not be b6_vcid_1 or b6_vcid_2')
rad = self.radiance(band)
esun = self.ex_atm_irrad[band - 1]
toa_reflect = (pi * rad * self.earth_sun_dist ** 2) / (esun * cos(self.solar_zenith_rad))
return toa_reflect
def albedo(self):
"""Finds broad-band surface reflectance (albedo)
Smith (2010), “The heat budget of the earth’s surface deduced from space”
Should have option for Liang, 2000;
LE7 toa reflectance bands 1, 3, 4, 5, 7
# normalized i.e. 0.356 + 0.130 + 0.373 + 0.085 + 0.07 = 1.014
:return albedo array of floats
"""
blue, red, nir, swir1, swir2 = (self.reflectance(1), self.reflectance(3), self.reflectance(4),
self.reflectance(5), self.reflectance(7))
alb = (0.356 * blue + 0.130 * red + 0.373 * nir + 0.085 * swir1 + 0.072 * swir2 - 0.0018) / 1.014
return alb
def saturation_mask(self, band, value=255):
""" Mask saturated pixels, 1 (True) is saturated.
:param band: Image band with dn values, type: array
:param value: Maximum (saturated) value, i.e. 255 for 8-bit data, type: int
:return: boolean array
"""
dn = self._get_band('b{}'.format(band))
mask = where((dn == value) & (self.mask() > 0), True, False)
return mask
def ndvi(self):
""" Normalized difference vegetation index.
:return: NDVI
"""
red, nir = self.reflectance(3), self.reflectance(4)
ndvi = self._divide_zero((nir - red), (nir + red), nan)
return ndvi
def lai(self):
"""
Leaf area index (LAI), or the surface area of leaves to surface area ground.
Trezza and Allen, 2014
:param ndvi: normalized difference vegetation index [-]
:return: LAI [-]
"""
ndvi = self.ndvi()
lai = 7.0 * (ndvi ** 3)
lai = where(lai > 6., 6., lai)
return lai
def emissivity(self, approach='tasumi'):
ndvi = self.ndvi()
if approach == 'tasumi':
lai = self.lai()
# Tasumi et al., 2003
# narrow-band emissivity
nb_epsilon = where((ndvi > 0) & (lai <= 3), 0.97 + 0.0033 * lai, nan)
nb_epsilon = where((ndvi > 0) & (lai > 3), 0.98, nb_epsilon)
nb_epsilon = where(ndvi <= 0, 0.99, nb_epsilon)
return nb_epsilon
if approach == 'sobrino':
# Sobrino et el., 2004
red = self.reflectance(3)
bound_ndvi = where(ndvi > 0.5, ndvi, 0.99)
bound_ndvi = where(ndvi < 0.2, red, bound_ndvi)
pv = ((ndvi - 0.2) / (0.5 - 0.2)) ** 2
pv_emiss = 0.004 * pv + 0.986
emissivity = where((ndvi >= 0.2) & (ndvi <= 0.5), pv_emiss, bound_ndvi)
return emissivity
def land_surface_temp(self):
rp = 0.91
tau = 0.866
rsky = 1.32
epsilon = self.emissivity()
rc = ((self.radiance(6) - rp) / tau) - ((1 - epsilon) * rsky)
lst = self.k2 / (log((epsilon * self.k1 / rc) + 1))
return lst
def ndsi(self):
""" Normalized difference snow index.
:return NDSI
"""
green, swir1 = self.reflectance(2), self.reflectance(5)
ndsi = self._divide_zero((green - swir1), (green + swir1), nan)
return ndsi
class Landsat8(LandsatImage):
def __init__(self, obj):
LandsatImage.__init__(self, obj)
self.oli_bands = [1, 2, 3, 4, 5, 6, 7, 8, 9]
def brightness_temp(self, band, temp_scale='K'):
"""Calculate brightness temperature of Landsat 8
as outlined here: http://landsat.usgs.gov/Landsat8_Using_Product.php
T = K2 / log((K1 / L) + 1)
and
L = ML * Q + AL
where:
T = At-satellite brightness temperature (degrees kelvin)
L = TOA spectral radiance (Watts / (m2 * srad * mm))
ML = Band-specific multiplicative rescaling factor from the metadata
(RADIANCE_MULT_BAND_x, where x is the band number)
AL = Band-specific additive rescaling factor from the metadata
(RADIANCE_ADD_BAND_x, where x is the band number)
Q = Quantized and calibrated standard product pixel values (DN)
(ndarray img)
K1 = Band-specific thermal conversion constant from the metadata
(K1_CONSTANT_BAND_x, where x is the thermal band number)
K2 = Band-specific thermal conversion constant from the metadata
(K1_CONSTANT_BAND_x, where x is the thermal band number)
Returns
--------
ndarray:
float32 ndarray with shape == input shape
"""
if band in self.oli_bands:
raise ValueError('Landsat 8 brightness should be TIRS band (i.e. 10 or 11)')
k1 = getattr(self, 'k1_constant_band_{}'.format(band))
k2 = getattr(self, 'k2_constant_band_{}'.format(band))
rad = self.radiance(band)
brightness = k2 / log((k1 / rad) + 1)
if temp_scale == 'K':
return brightness
elif temp_scale == 'F':
return brightness * (9 / 5.0) - 459.67
elif temp_scale == 'C':
return brightness - 273.15
else:
raise ValueError('{} is not a valid temperature scale'.format(temp_scale))
def reflectance(self, band):
"""Calculate top of atmosphere reflectance of Landsat 8
as outlined here: http://landsat.usgs.gov/Landsat8_Using_Product.php
R_raw = MR * Q + AR
R = R_raw / cos(Z) = R_raw / sin(E)
Z = 90 - E (in degrees)
where:
R_raw = TOA planetary reflectance, without correction for solar angle.
R = TOA reflectance with a correction for the sun angle.
MR = Band-specific multiplicative rescaling factor from the metadata
(REFLECTANCE_MULT_BAND_x, where x is the band number)
AR = Band-specific additive rescaling factor from the metadata
(REFLECTANCE_ADD_BAND_x, where x is the band number)
Q = Quantized and calibrated standard product pixel values (DN)
E = Local sun elevation angle. The scene center sun elevation angle
in degrees is provided in the metadata (SUN_ELEVATION).
Z = Local solar zenith angle (same angle as E, but measured from the
zenith instead of from the horizon).
Returns
--------
ndarray:
float32 ndarray with shape == input shape
"""
if band not in self.oli_bands:
raise ValueError('Landsat 8 reflectance should OLI band (i.e. bands 1-8)')
elev = getattr(self, 'sun_elevation')
dn = self._get_band('b{}'.format(band))
mr = getattr(self, 'reflectance_mult_band_{}'.format(band))
ar = getattr(self, 'reflectance_add_band_{}'.format(band))
if elev < 0.0:
raise ValueError("Sun elevation must be non-negative "
"(sun must be above horizon for entire scene)")
rf = ((mr * dn.astype(float32)) + ar) / sin(deg2rad(elev))
return rf
def radiance(self, band):
"""Calculate top of atmosphere radiance of Landsat 8
as outlined here: http://landsat.usgs.gov/Landsat8_Using_Product.php
L = ML * Q + AL
where:
L = TOA spectral radiance (Watts / (m2 * srad * mm))
ML = Band-specific multiplicative rescaling factor from the metadata
(RADIANCE_MULT_BAND_x, where x is the band number)
AL = Band-specific additive rescaling factor from the metadata
(RADIANCE_ADD_BAND_x, where x is the band number)
Q = Quantized and calibrated standard product pixel values (DN)
(ndarray img)
Returns
--------
ndarray:
float32 ndarray with shape == input shape
"""
ml = getattr(self, 'radiance_mult_band_{}'.format(band))
al = getattr(self, 'radiance_add_band_{}'.format(band))
dn = self._get_band('b{}'.format(band))
rad = ml * dn.astype(float32) + al
return rad
def albedo(self):
"""Smith (2010), finds broad-band surface reflectance (albedo)
Should have option for Liang, 2000; Tasumi, 2008;
LC8 toa reflectance bands 2, 4, 5, 6, 7
# normalized i.e. 0.356 + 0.130 + 0.373 + 0.085 + 0.07 = 1.014
:return albedo array of floats
"""
blue, red, nir, swir1, swir2 = (self.reflectance(2), self.reflectance(4), self.reflectance(5),
self.reflectance(6), self.reflectance(7))
alb = (0.356 * blue + 0.130 * red + 0.373 * nir + 0.085 * swir1 + 0.072 * swir2 - 0.0018) / 1.014
return alb
def ndvi(self):
""" Normalized difference vegetation index.
:return: NDVI
"""
red, nir = self.reflectance(4), self.reflectance(5)
ndvi = self._divide_zero((nir - red), (nir + red), nan)
return ndvi
def lai(self):
"""
Leaf area index (LAI), or the surface area of leaves to surface area ground.
Trezza and Allen, 2014
:param ndvi: normalized difference vegetation index [-]
:return: LAI [-]
"""
ndvi = self.ndvi()
lai = 7.0 * (ndvi ** 3)
lai = where(lai > 6., 6., lai)
return lai
def emissivity(self, approach='tasumi'):
ndvi = self.ndvi()
if approach == 'tasumi':
lai = self.lai()
# Tasumi et al., 2003
# narrow-band emissivity
nb_epsilon = where((ndvi > 0) & (lai <= 3), 0.97 + 0.0033 * lai, nan)
nb_epsilon = where((ndvi > 0) & (lai > 3), 0.98, nb_epsilon)
nb_epsilon = where(ndvi <= 0, 0.99, nb_epsilon)
return nb_epsilon
if approach == 'sobrino':
# Sobrino et el., 2004
red = self.reflectance(3)
bound_ndvi = where(ndvi > 0.5, ndvi, 0.99)
bound_ndvi = where(ndvi < 0.2, red, bound_ndvi)
pv = ((ndvi - 0.2) / (0.5 - 0.2)) ** 2
pv_emiss = 0.004 * pv + 0.986
emissivity = where((ndvi >= 0.2) & (ndvi <= 0.5), pv_emiss, bound_ndvi)
return emissivity
def land_surface_temp(self):
band = 10
k1 = getattr(self, 'k1_constant_band_{}'.format(band))
k2 = getattr(self, 'k2_constant_band_{}'.format(band))
rp = 0.91
tau = 0.866
rsky = 1.32
epsilon = self.emissivity()
rc = ((self.radiance(band) - rp) / tau) - ((1 - epsilon) * rsky)
lst = k2 / (log((epsilon * k1 / rc) + 1))
return lst
def ndsi(self):
""" Normalized difference snow index.
:return: NDSI
"""
green, swir1 = self.reflectance(3), self.reflectance(6)
ndsi = self._divide_zero((green - swir1), (green + swir1), nan)
return ndsi
# =============================================================================================
| apache-2.0 | -1,773,811,191,002,346,000 | 34.419481 | 109 | 0.541011 | false |
crunchmail/munch-core | src/munch/apps/transactional/tests/test_policies.py | 1 | 24699 | from django.conf import settings
from django.test import TestCase
from django.utils import timezone
from slimta.envelope import Envelope
from slimta.relay import PermanentRelayError
from slimta.queue import QueueError
from faker import Factory as FakerFactory
from munch.core.models import Category
# from munch.core.utils.tests import temporary_settings
# from munch.core.tests.factories import CategoryFactory
from munch.apps.users.tests.factories import UserFactory
from munch.apps.users.tests.factories import SmtpApplicationFactory
from munch.apps.domains.tests.factories import SendingDomainFactory
from munch.apps.optouts.models import OptOut
from munch.apps.optouts.tests.factories import OptOutFactory
from ..models import Mail
from ..models import MailBatch
from ..policies.relay import headers as headers_policy
from ..policies.queue import bounces
from ..policies.queue import identifier
from ..policies.queue import store_mail
from ..policies.queue import sending_domain
faker = FakerFactory.create()
class PolicyCase(TestCase):
def mk_envelope(self, data, sender=None, recipients=None):
env = Envelope(sender=sender)
env.parse(data)
if not recipients:
recipients = ['root@localhost']
env.recipients = recipients
return env
class TestStoreHeaders(PolicyCase):
def setUp(self):
self.smtp_application = SmtpApplicationFactory()
self.domain = SendingDomainFactory(
name='example.com',
organization=self.smtp_application.author.organization)
self.user = UserFactory(
identifier='[email protected]', last_login=timezone.now())
def test_store_mail_empty(self):
env = self.mk_envelope(b'')
env.client = {'auth': (self.smtp_application.username, None)}
env.user = self.user
with self.assertRaises(PermanentRelayError):
store_mail.Store().apply(env)
def test_store_mail_minimal(self):
env = self.mk_envelope(
b"From: [email protected]\nSubject: foo\nTo: foo@bar",
sender="[email protected]", recipients=['[email protected]'])
env.client = {'auth': (self.smtp_application.username, None)}
env.user = self.user
identifier.Add().apply(env)
bounces.Check().apply(env)
sending_domain.Check().apply(env)
store_mail.Store().apply(env)
mail = Mail.objects.get(
identifier=env.headers.get(settings.TRANSACTIONAL[
'X_MESSAGE_ID_HEADER']))
self.assertEqual(
mail.headers, {
'Subject': 'foo',
'To': 'foo@bar',
'From': "[email protected]",
settings.TRANSACTIONAL[
'X_MESSAGE_ID_HEADER']: env.headers.get(
settings.TRANSACTIONAL['X_MESSAGE_ID_HEADER']),
settings.TRANSACTIONAL[
'X_USER_ID_HEADER']: str(self.user.pk)})
self.assertEqual(mail.sender, '[email protected]')
self.assertEqual(MailBatch.objects.count(), 0)
self.assertEqual(Category.objects.count(), 0)
def test_store_mail_with_batch(self):
headers = (
"From: [email protected]\n"
"Subject: foo\nTo: foo@bar\n{}: foo").format(
settings.TRANSACTIONAL['X_MAIL_BATCH_HEADER'])
env = self.mk_envelope(
headers.encode('utf-8'),
sender="[email protected]", recipients=['[email protected]'])
env.client = {'auth': (self.smtp_application.username, None)}
env.user = self.user
identifier.Add().apply(env)
bounces.Check().apply(env)
sending_domain.Check().apply(env)
store_mail.Store().apply(env)
mail = Mail.objects.get(
identifier=env.headers.get(settings.TRANSACTIONAL[
'X_MESSAGE_ID_HEADER']))
self.assertEqual(mail.batch.name, 'foo')
headers = (
"From: [email protected]\n"
"Subject: foo\nTo: foo@bar\n{}: foo").format(
settings.TRANSACTIONAL['X_MAIL_BATCH_HEADER'])
env = self.mk_envelope(
headers.encode('utf-8'),
sender="[email protected]", recipients=['[email protected]'])
env.client = {'auth': (self.smtp_application.username, None)}
env.user = self.user
identifier.Add().apply(env)
bounces.Check().apply(env)
sending_domain.Check().apply(env)
store_mail.Store().apply(env)
self.assertEqual(MailBatch.objects.count(), 1)
self.assertEqual(Category.objects.count(), 0)
def test_store_mail_another_with_category(self):
headers = (
"From: [email protected]\n"
"Subject: foo\nTo: foo@bar\n{}: foo").format(
settings.TRANSACTIONAL['X_MAIL_BATCH_HEADER'])
env = self.mk_envelope(
headers.encode('utf-8'),
sender="[email protected]", recipients=['[email protected]'])
env.client = {'auth': (self.smtp_application.username, None)}
env.user = self.user
identifier.Add().apply(env)
bounces.Check().apply(env)
sending_domain.Check().apply(env)
store_mail.Store().apply(env)
mail = Mail.objects.get(
identifier=env.headers.get(settings.TRANSACTIONAL[
'X_MESSAGE_ID_HEADER']))
self.assertEqual(mail.batch.name, 'foo')
headers = (
"From: [email protected]\n"
"Subject: foo\nTo: foo@bar\n{}: foo\n{}: foo-cat").format(
settings.TRANSACTIONAL['X_MAIL_BATCH_HEADER'],
settings.TRANSACTIONAL['X_MAIL_BATCH_CATEGORY_HEADER'])
env = self.mk_envelope(
headers.encode('utf-8'),
sender="[email protected]", recipients=['[email protected]'])
env.client = {'auth': (self.smtp_application.username, None)}
env.user = self.user
identifier.Add().apply(env)
bounces.Check().apply(env)
sending_domain.Check().apply(env)
store_mail.Store().apply(env)
self.assertEqual(MailBatch.objects.count(), 1)
self.assertEqual(Category.objects.count(), 1)
def test_store_mail_category_scope(self):
# Create first mail, batch and category
headers = (
"From: [email protected]\n"
"Subject: foo\nTo: foo@bar\n{}: foo\n{}: foo-cat").format(
settings.TRANSACTIONAL['X_MAIL_BATCH_HEADER'],
settings.TRANSACTIONAL['X_MAIL_BATCH_CATEGORY_HEADER'])
env = self.mk_envelope(
headers.encode('utf-8'),
sender="[email protected]", recipients=['[email protected]'])
env.client = {'auth': (self.smtp_application.username, None)}
env.user = self.user
identifier.Add().apply(env)
bounces.Check().apply(env)
sending_domain.Check().apply(env)
store_mail.Store().apply(env)
# mail = Mail.objects.get(
# identifier=env.headers.get(settings.TRANSACTIONAL[
# 'X_MESSAGE_ID_HEADER']))
# Same batch name and category with another user
another_smtp_application = SmtpApplicationFactory()
SendingDomainFactory(
name='example.com',
organization=another_smtp_application.author.organization)
headers = (
"From: [email protected]\n"
"Subject: foo\nTo: foo@bar\n{}: foo\n{}: foo-cat").format(
settings.TRANSACTIONAL['X_MAIL_BATCH_HEADER'],
settings.TRANSACTIONAL['X_MAIL_BATCH_CATEGORY_HEADER'])
env = self.mk_envelope(
headers.encode('utf-8'),
sender="[email protected]", recipients=['[email protected]'])
env.client = {'auth': (another_smtp_application.username, None)}
env.user = self.user
identifier.Add().apply(env)
bounces.Check().apply(env)
sending_domain.Check().apply(env)
store_mail.Store().apply(env)
# Mail.objects.get(
# identifier=env.headers.get(settings.TRANSACTIONAL[
# 'X_MESSAGE_ID_HEADER']))
self.assertEqual(Mail.objects.filter(
author=self.smtp_application.author).count(), 1)
self.assertEqual(Mail.objects.filter(
author=another_smtp_application.author).count(), 1)
self.assertEqual(Mail.objects.count(), 2)
self.assertEqual(MailBatch.objects.filter(
author=self.smtp_application.author).count(), 1)
self.assertEqual(MailBatch.objects.filter(
author=another_smtp_application.author).count(), 1)
self.assertEqual(MailBatch.objects.count(), 2)
self.assertEqual(Category.objects.filter(
author=self.smtp_application.author).count(), 1)
self.assertEqual(Category.objects.filter(
author=another_smtp_application.author).count(), 1)
self.assertEqual(Category.objects.count(), 2)
def test_track_open_no_html(self):
headers = (
"From: [email protected]\n"
"Subject: foo\nTo: foo@bar\n{}: true").format(
settings.TRANSACTIONAL['X_MAIL_TRACK_OPEN_HEADER'])
env = self.mk_envelope(
headers.encode('utf-8') + '\n\nMy Message'.encode('utf-8'),
sender="[email protected]", recipients=['[email protected]'])
env.client = {'auth': (self.smtp_application.username, None)}
env.user = self.user
identifier.Add().apply(env)
bounces.Check().apply(env)
sending_domain.Check().apply(env)
store_mail.Store().apply(env)
headers, body = env.flatten()
self.assertNotIn(
'alt="" height="1" width="1" border="0" />', body.decode('utf-8'))
self.assertNotIn(
'/t/open/{}'.format(env.headers.get(settings.TRANSACTIONAL[
'X_MESSAGE_ID_HEADER'])), body.decode('utf-8'))
def test_track_open(self):
headers = (
'From: [email protected]\n'
'Content-Type: multipart/alternative;\n'
' boundary="===============0445577956452755870=="\n'
'Subject: foo\n'
'To: foo@bar\n'
'{}: true').format(
settings.TRANSACTIONAL['X_MAIL_TRACK_OPEN_HEADER'])
body = (
'--===============0445577956452755870==\n'
'Content-Type: text/plain; charset="us-ascii"\n'
'MIME-Version: 1.0\n'
'Content-Transfer-Encoding: 7bit\n'
'\n'
'My template in text to [1]\n'
'\n'
'[1]: http://google.it\n'
'--===============0445577956452755870==\n'
'Content-Type: text/html; charset="us-ascii"\n'
'MIME-Version: 1.0\n'
'Content-Transfer-Encoding: 7bit\n'
'\n'
'<a href="http://google.fr">Google!</a>\n'
'<a href="http://google.com">Google COM</a>\n'
'<strong>Strong jambon is strong</strong>\n'
'--===============0445577956452755870==--\n')
env = self.mk_envelope(
headers.encode('utf-8') + body.encode('utf-8'),
sender="[email protected]", recipients=['[email protected]'])
env.client = {'auth': (self.smtp_application.username, None)}
env.user = self.user
identifier.Add().apply(env)
bounces.Check().apply(env)
sending_domain.Check().apply(env)
store_mail.Store().apply(env)
headers, body = env.flatten()
self.assertIn(
'alt="" height="1" width="1" border="0"', body.decode('utf-8'))
self.assertIn(
'/t/open/{}'.format(env.headers.get(settings.TRANSACTIONAL[
'X_MESSAGE_ID_HEADER'])), body.decode('utf-8'))
def test_track_clicks(self):
headers = (
'From: [email protected]\n'
'Content-Type: multipart/alternative;\n'
' boundary="===============0445577956452755870=="\n'
'Subject: foo\n'
'To: foo@bar\n'
'{}: true').format(
settings.TRANSACTIONAL['X_MAIL_TRACK_CLICKS_HEADER'])
body = (
'--===============0445577956452755870==\n'
'Content-Type: text/plain; charset="us-ascii"\n'
'MIME-Version: 1.0\n'
'Content-Transfer-Encoding: 7bit\n'
'\n'
'My template in text to [1]\n'
'\n'
'[1]: http://google.it\n'
'--===============0445577956452755870==\n'
'Content-Type: text/html; charset="us-ascii"\n'
'MIME-Version: 1.0\n'
'Content-Transfer-Encoding: 7bit\n'
'\n'
'<a href="http://google.fr">Google!</a>\n'
'<a href="http://google.com">Google COM</a>\n'
'<strong>Strong jambon is strong</strong>\n'
'--===============0445577956452755870==--\n')
env = self.mk_envelope(
headers.encode('utf-8') + body.encode('utf-8'),
sender="[email protected]", recipients=['[email protected]'])
env.client = {'auth': (self.smtp_application.username, None)}
env.user = self.user
identifier.Add().apply(env)
bounces.Check().apply(env)
sending_domain.Check().apply(env)
store_mail.Store().apply(env)
headers, body = env.flatten()
self.assertIn('/t/clicks/m/', body.decode('utf-8'))
self.assertIn('http://google.it', body.decode('utf-8'))
self.assertNotIn('http://google.fr', body.decode('utf-8'))
self.assertNotIn('http://google.com', body.decode('utf-8'))
def test_unsubscribe_link_no_placeholder(self):
headers = (
'From: [email protected]\n'
'Content-Type: multipart/alternative;\n'
' boundary="===============0445577956452755870=="\n'
'Subject: foo\n'
'To: foo@bar\n'
'{}: true').format(
settings.TRANSACTIONAL['X_MAIL_UNSUBSCRIBE_HEADER'])
body = (
'--===============0445577956452755870==\n'
'Content-Type: text/plain; charset="us-ascii"\n'
'MIME-Version: 1.0\n'
'Content-Transfer-Encoding: 7bit\n'
'\n'
'My template in text to [1]\n'
'\n'
'[1]: http://google.it\n'
'--===============0445577956452755870==\n'
'Content-Type: text/html; charset="us-ascii"\n'
'MIME-Version: 1.0\n'
'Content-Transfer-Encoding: 7bit\n'
'\n'
'<a href="http://google.fr">Google!</a>\n'
'<a href="http://google.com">Google COM</a>\n'
'<strong>Strong jambon is strong</strong>\n'
'--===============0445577956452755870==--\n')
env = self.mk_envelope(
headers.encode('utf-8') + body.encode('utf-8'),
sender="[email protected]", recipients=['[email protected]'])
env.client = {'auth': (self.smtp_application.username, None)}
env.user = self.user
identifier.Add().apply(env)
bounces.Check().apply(env)
sending_domain.Check().apply(env)
store_mail.Store().apply(env)
headers, body = env.flatten()
self.assertNotIn('/h/subscriptions/', body.decode('utf-8'))
def test_unsubscribe_link(self):
headers = (
'From: [email protected]\n'
'Content-Type: multipart/alternative;\n'
' boundary="===============0445577956452755870=="\n'
'Subject: foo\n'
'To: foo@bar\n'
'{}: true').format(
settings.TRANSACTIONAL['X_MAIL_UNSUBSCRIBE_HEADER'])
body = (
'--===============0445577956452755870==\n'
'Content-Type: text/plain; charset="us-ascii"\n'
'MIME-Version: 1.0\n'
'Content-Transfer-Encoding: 7bit\n'
'\n'
'My template in text to [1]\n'
'Unsub here: {}'
'\n'
'[1]: http://google.it\n'
'--===============0445577956452755870==\n'
'Content-Type: text/html; charset="us-ascii"\n'
'MIME-Version: 1.0\n'
'Content-Transfer-Encoding: 7bit\n'
'\n'
'<a href="http://google.fr">Google!</a>\n'
'<a href="http://google.com">Google COM</a>\n'
'<strong>Strong jambon is strong</strong>\n'
'<a href="{}">Unsubscribe here</a>\n'
'--===============0445577956452755870==--\n').format(
settings.OPTOUTS['UNSUBSCRIBE_PLACEHOLDER'],
settings.OPTOUTS['UNSUBSCRIBE_PLACEHOLDER'])
env = self.mk_envelope(
headers.encode('utf-8') + body.encode('utf-8'),
sender="[email protected]", recipients=['[email protected]'])
env.client = {'auth': (self.smtp_application.username, None)}
env.user = self.user
identifier.Add().apply(env)
bounces.Check().apply(env)
sending_domain.Check().apply(env)
store_mail.Store().apply(env)
headers, body = env.flatten()
self.assertIn('/h/subscriptions/', body.decode('utf-8'))
class TestBounces(PolicyCase):
def setUp(self):
self.smtp_application = SmtpApplicationFactory()
self.domain = SendingDomainFactory(
name='example.com',
organization=self.smtp_application.author.organization)
self.user = UserFactory(
identifier='[email protected]', last_login=timezone.now())
def test_clean_address(self):
optout = OptOutFactory(origin=OptOut.BY_WEB)
envelope = self.mk_envelope(b'', recipients=[optout.address])
envelope.client = {'auth': (self.smtp_application.username, None)}
envelope.user = self.user
identifier.Add().apply(envelope)
bounces.Check().apply(envelope)
def test_bounce(self):
optout = OptOutFactory(origin=OptOut.BY_BOUNCE)
envelope = self.mk_envelope(b'', recipients=[optout.address])
envelope.client = {'auth': (self.smtp_application.username, None)}
envelope.user = self.user
identifier.Add().apply(envelope)
with self.assertRaises(QueueError):
bounces.Check().apply(envelope)
def test_optout_batch_no_category(self):
headers = (
'From: [email protected]\n'
'Subject: foo\n'
'To: foo@bar\n'
'{}: test').format(
settings.TRANSACTIONAL['X_MAIL_BATCH_HEADER'])
recipient = faker.email()
envelope = self.mk_envelope(
headers.encode('utf-8'),
sender=faker.email(), recipients=[recipient])
envelope.client = {'auth': (self.smtp_application.username, None)}
envelope.user = self.user
identifier.Add().apply(envelope)
bounces.Check().apply(envelope)
sending_domain.Check().apply(envelope)
store_mail.Store().apply(envelope)
self.client.post(
'/h/subscriptions/{}/optout/'.format(
envelope.headers.get(
settings.TRANSACTIONAL['X_MESSAGE_ID_HEADER'])))
with self.assertRaises(QueueError):
identifier.Add().apply(envelope)
bounces.Check().apply(envelope)
def test_optout_batch_no_category_another_user(self):
headers = (
'From: [email protected]\n'
'Subject: foo\n'
'To: foo@bar\n'
'{}: test').format(
settings.TRANSACTIONAL['X_MAIL_BATCH_HEADER'])
recipient = faker.email()
envelope = self.mk_envelope(
headers.encode('utf-8'),
sender=faker.email(), recipients=[recipient])
envelope.client = {'auth': (self.smtp_application.username, None)}
envelope.user = self.user
identifier.Add().apply(envelope)
bounces.Check().apply(envelope)
sending_domain.Check().apply(envelope)
store_mail.Store().apply(envelope)
self.client.post(
'/h/subscriptions/{}/optout/'.format(
envelope.headers.get(
settings.TRANSACTIONAL['X_MESSAGE_ID_HEADER'])))
another_smtp_application = SmtpApplicationFactory()
SendingDomainFactory(
name='example.com',
organization=another_smtp_application.author.organization)
headers = (
'From: [email protected]\n'
'Subject: foo\n'
'To: foo@bar\n'
'{}: test').format(
settings.TRANSACTIONAL['X_MAIL_BATCH_HEADER'])
recipient = faker.email()
envelope = self.mk_envelope(
headers.encode('utf-8'),
sender=faker.email(), recipients=[recipient])
envelope.client = {'auth': (another_smtp_application.username, None)}
envelope.user = self.user
identifier.Add().apply(envelope)
bounces.Check().apply(envelope)
sending_domain.Check().apply(envelope)
store_mail.Store().apply(envelope)
def test_optout_batch_with_category(self):
# First Optout on mail without category
headers = (
'From: [email protected]\n'
'Subject: foo\n'
'To: foo@bar\n'
'{}: test').format(
settings.TRANSACTIONAL['X_MAIL_BATCH_HEADER'])
recipient = faker.email()
envelope = self.mk_envelope(
headers.encode('utf-8'),
sender=faker.email(), recipients=[recipient])
envelope.client = {'auth': (self.smtp_application.username, None)}
envelope.user = self.user
identifier.Add().apply(envelope)
bounces.Check().apply(envelope)
sending_domain.Check().apply(envelope)
store_mail.Store().apply(envelope)
self.client.post(
'/h/subscriptions/{}/optout/'.format(
envelope.headers.get(
settings.TRANSACTIONAL['X_MESSAGE_ID_HEADER'])))
# Second one with same recipient but with a category
headers = (
'From: [email protected]\n'
'Subject: foo\n'
'To: foo@bar\n'
'{}: test-category\n'
'{}: test').format(
settings.TRANSACTIONAL['X_MAIL_BATCH_CATEGORY_HEADER'],
settings.TRANSACTIONAL['X_MAIL_BATCH_HEADER'])
envelope = self.mk_envelope(
headers.encode('utf-8'),
sender=faker.email(), recipients=[recipient])
envelope.client = {'auth': (self.smtp_application.username, None)}
envelope.user = self.user
identifier.Add().apply(envelope)
bounces.Check().apply(envelope)
sending_domain.Check().apply(envelope)
store_mail.Store().apply(envelope)
identifier.Add().apply(envelope)
bounces.Check().apply(envelope)
class TestReturnPath(PolicyCase):
def test_no_prev_returnpath(self):
"""
X-Munch-HTTP-Return-Path: yes
Return-Path: no
Return-Path-Rewrite: yes
"""
env = self.mk_envelope(b'X-Munch-HTTP-Return-Path: http://unittest')
headers_policy.RewriteReturnPath().apply(env)
headers, _ = env.flatten()
self.assertIn('return-', env.sender)
def test_no_prev_returnpath_without_http_returnpath(self):
"""
X-Munch-HTTP-Return-Path: no
Return-Path: no
Return-Path: envelope.sender
"""
env = self.mk_envelope(b'', sender="[email protected]")
headers_policy.RewriteReturnPath().apply(env)
headers, _ = env.flatten()
self.assertIn('[email protected]', env.sender)
def test_replace_returnpath(self):
"""
X-Munch-HTTP-Return-Path: yes
Return-Path: yes
Return-Path-Rewrite: yes
"""
env = self.mk_envelope(
b"X-Munch-HTTP-Return-Path: http://unittest.example.com/ping",
sender='[email protected]')
headers_policy.RewriteReturnPath().apply(env)
headers, _ = env.flatten()
self.assertRegex(env.sender, 'return-.*test.munch.example.com.*')
def test_replace_returnpath_without_http_returnpath(self):
"""
X-Munch-HTTP-Return-Path: no
Return-Path: yes
Return-Path-Rewrite: no
"""
env = self.mk_envelope(b"Return-Path: [email protected]")
headers_policy.RewriteReturnPath().apply(env)
headers, _ = env.flatten()
self.assertIn(
'Return-Path: [email protected]', headers.decode('utf-8'))
| agpl-3.0 | 4,494,491,588,191,560,700 | 38.966019 | 78 | 0.570226 | false |
gchq/gaffer-tools | python-shell/src/example.py | 1 | 26676 | #
# Copyright 2016-2019 Crown Copyright
#
# Licensed under the Apache License, Version 2.0 (the 'License');
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an 'AS IS' BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from gafferpy import gaffer as g
from gafferpy import gaffer_connector
def run(host, verbose=False):
return run_with_connector(create_connector(host, verbose))
def run_with_connector(gc):
print()
print('Running operations')
print('--------------------------')
print()
get_schema(gc)
get_filter_functions(gc)
get_class_filter_functions(gc)
get_element_generators(gc)
get_object_generators(gc)
get_operations(gc)
get_serialised_fields(gc)
get_store_traits(gc)
is_operation_supported(gc)
add_elements(gc)
get_elements(gc)
get_adj_seeds(gc)
get_all_elements(gc)
get_walks(gc)
generate_elements(gc)
generate_domain_objs(gc)
generate_domain_objects_chain(gc)
get_element_group_counts(gc)
get_sub_graph(gc)
export_to_gaffer_result_cache(gc)
get_job_details(gc)
get_all_job_details(gc)
add_named_operation(gc)
get_all_named_operations(gc)
named_operation(gc)
delete_named_operation(gc)
add_named_view_summarise(gc)
add_named_view_date_range(gc)
get_all_named_views(gc)
named_view_summarise(gc)
named_view_date_range(gc)
named_views(gc)
delete_named_views(gc)
sort_elements(gc)
max_element(gc)
min_element(gc)
to_vertices_to_entity_seeds(gc)
complex_op_chain(gc)
op_chain_in_json(gc)
def create_connector(host, verbose=False):
return gaffer_connector.GafferConnector(host, verbose)
def get_schema(gc):
# Get Schema
result = gc.execute_get(
g.GetSchema()
)
print('Schema:')
print(result)
print()
def get_filter_functions(gc):
# Get filter functions
result = gc.execute_get(
g.GetFilterFunctions()
)
print('Filter Functions:')
print(result)
print()
def get_class_filter_functions(gc):
# Get class filter functions
class_name = 'uk.gov.gchq.koryphe.impl.predicate.IsMoreThan'
result = gc.execute_get(
g.GetClassFilterFunctions(class_name=class_name)
)
print('Class Filter Functions (IsMoreThan):')
print(result)
print()
def get_element_generators(gc):
# Get Element generators
result = gc.execute_get(
g.GetElementGenerators()
)
print('Element generators:')
print(result)
print()
def get_object_generators(gc):
# Get Object generators
result = gc.execute_get(
g.GetObjectGenerators()
)
print('Object generators:')
print(result)
print()
def get_operations(gc):
# Get operations
result = gc.execute_get(
g.GetOperations()
)
print('Operations:')
print(result)
print()
def get_serialised_fields(gc):
# Get serialised fields
class_name = 'uk.gov.gchq.koryphe.impl.predicate.IsMoreThan'
result = gc.execute_get(
g.GetSerialisedFields(class_name=class_name)
)
print('Serialised Fields (IsMoreThan):')
print(result)
print()
def get_store_traits(gc):
# Get Store Traits
result = gc.execute_get(
g.GetStoreTraits()
)
print('Store Traits:')
print(result)
print()
def is_operation_supported(gc):
# Is operation supported
operation = 'uk.gov.gchq.gaffer.operation.impl.add.AddElements'
result = gc.is_operation_supported(
g.IsOperationSupported(operation=operation)
)
print(
'\nOperation supported ("uk.gov.gchq.gaffer.operation.impl.add.AddElements"):')
print(result)
print()
def add_elements(gc):
# Add Elements
gc.execute_operation(
g.AddElements(
input=[
g.Entity(
group='JunctionUse',
vertex='M1:1',
properties={
'countByVehicleType': g.freq_map({
'BUS': 10,
'CAR': 50
}),
'endDate': g.date(1034319600000),
'count': g.long(60),
'startDate': g.date(1034316000000)
}
),
g.Edge(
group='RoadHasJunction',
source='M1',
destination='M1:1',
directed=True,
properties={}
)
]
)
)
print('Elements have been added')
print()
def get_elements(gc):
# Get Elements
input = gc.execute_operation(
g.GetElements(
input=[
g.EntitySeed('M5:10'),
# Edge input can be provided as follows
g.EdgeSeed('M5:10', 'M5:11', g.DirectedType.EITHER),
g.EdgeSeed('M5:10', 'M5:11', g.DirectedType.DIRECTED),
# Or you can use True or False for the direction
g.EdgeSeed('M5:10', 'M5:11', True)
],
view=g.View(
edges=[
g.ElementDefinition(
group='RoadUse',
group_by=[],
transient_properties=[
g.Property('description', 'java.lang.String')
],
pre_aggregation_filter_functions=[
g.PredicateContext(
selection=['count'],
predicate=g.IsMoreThan(
value=g.long(1)
)
)
],
transform_functions=[
g.FunctionContext(
selection=['SOURCE', 'DESTINATION', 'count'],
function=g.Function(
class_name='uk.gov.gchq.gaffer.traffic.transform.DescriptionTransform'
),
projection=['description']
)
]
)
]
),
directed_type=g.DirectedType.EITHER
)
)
print('Related input')
print(input)
print()
def get_adj_seeds(gc):
# Adjacent Elements - chain 2 adjacent entities together
adj_seeds = gc.execute_operations(
[
g.GetAdjacentIds(
input=[
g.EntitySeed(
vertex='M5'
)
],
view=g.View(
edges=[
g.ElementDefinition(
'RoadHasJunction',
group_by=[]
)
]
),
include_incoming_out_going=g.InOutType.OUT
),
g.GetAdjacentIds(
view=g.View(
edges=[
g.ElementDefinition(
'RoadUse',
group_by=[]
)
]
),
include_incoming_out_going=g.InOutType.OUT
)
]
)
print('Adjacent entities - 2 hop')
print(adj_seeds)
print()
def get_all_elements(gc):
# Get all input, but limit the total results to 3
all_elements = gc.execute_operations(
operations=[
g.GetAllElements(),
g.Limit(result_limit=3)
]
)
print('All input (Limited to first 3)')
print(all_elements)
print()
def get_walks(gc):
# Get walks from M32 traversing down RoadHasJunction then JunctionLocatedAt
walks = gc.execute_operation(
g.GetWalks(
input=[
g.EntitySeed('M32'),
],
operations=[
g.GetElements(
view=g.View(
edges=[
g.ElementDefinition(
group='RoadHasJunction'
)
]
)
),
g.GetElements(
view=g.View(
edges=[
g.ElementDefinition(
group='JunctionLocatedAt'
)
]
)
)
]
)
)
print(
'Walks from M32 traversing down RoadHasJunction then JunctionLocatedAt')
print(walks)
print()
def generate_elements(gc):
# Generate Elements
input = gc.execute_operation(
g.GenerateElements(
element_generator=g.ElementGenerator(
class_name='uk.gov.gchq.gaffer.traffic.generator.RoadTrafficStringElementGenerator'
),
input=[
'"South West","E06000054","Wiltshire","6016","389200","179080","M4","LA Boundary","381800","180030","17","391646","179560","TM","E","2000","2000-05-03 00:00:00","7","0","9","2243","15","426","127","21","20","37","106","56","367","3060"'
]
)
)
print('Generated input from provided domain input')
print(input)
print()
def generate_domain_objs(gc):
# Generate Domain Objects - single provided element
input = gc.execute_operation(
g.GenerateObjects(
element_generator=g.ElementGenerator(
class_name='uk.gov.gchq.gaffer.rest.example.ExampleDomainObjectGenerator'
),
input=[
g.Entity('entity', '1'),
g.Edge('edge', '1', '2', True)
]
)
)
print('Generated input from provided input')
print(input)
print()
def generate_domain_objects_chain(gc):
# Generate Domain Objects - chain of get input then generate input
input = gc.execute_operations(
[
g.GetElements(
input=[g.EntitySeed(vertex='M5')],
seed_matching_type=g.SeedMatchingType.RELATED,
view=g.View(
edges=[
g.ElementDefinition(
group='RoadHasJunction',
group_by=[]
)
]
)
),
g.GenerateObjects(
element_generator=g.ElementGenerator(
class_name='uk.gov.gchq.gaffer.rest.example.ExampleDomainObjectGenerator'
)
)
]
)
print('Generated input from get input by seed')
print(input)
print()
def get_element_group_counts(gc):
# Get Elements
group_counts = gc.execute_operations([
g.GetElements(
input=[g.EntitySeed('M5')]
),
g.CountGroups(limit=1000)
])
print('Groups counts (limited to 1000 input)')
print(group_counts)
print()
def get_sub_graph(gc):
# Export and Get to/from an in memory set
entity_seeds = gc.execute_operations(
[
g.GetAdjacentIds(
input=[g.EntitySeed('South West')],
include_incoming_out_going=g.InOutType.OUT
),
g.ExportToSet(),
g.GetAdjacentIds(include_incoming_out_going=g.InOutType.OUT),
g.ExportToSet(),
g.DiscardOutput(),
g.GetSetExport()
]
)
print('Export and Get to/from an in memory set')
print(entity_seeds)
print()
def export_to_gaffer_result_cache(gc):
# Export to Gaffer Result Cache and Get from Gaffer Result Cache
job_details = gc.execute_operations(
[
g.GetAdjacentIds(
input=[g.EntitySeed('South West')],
include_incoming_out_going=g.InOutType.OUT
),
g.ExportToGafferResultCache(),
g.DiscardOutput(),
g.GetJobDetails()
]
)
print('Export to Gaffer Result Cache. Job Details:')
print(job_details)
print()
job_id = job_details['jobId']
entity_seeds = gc.execute_operation(
g.GetGafferResultCacheExport(job_id=job_id),
)
print('Get Gaffer Result Cache Export.')
print(entity_seeds)
print()
def get_job_details(gc):
# Get all job details
job_details_initial = gc.execute_operations(
[
g.GetAdjacentIds(
input=[g.EntitySeed('1')],
),
g.ExportToGafferResultCache(),
g.DiscardOutput(),
g.GetJobDetails()
]
)
job_id = job_details_initial['jobId']
job_details = gc.execute_operation(
g.GetJobDetails(job_id=job_id),
)
print('Get job details')
print(job_details)
print()
def get_all_job_details(gc):
# Get all job details
all_job_details = gc.execute_operation(
g.GetAllJobDetails(),
)
print('Get all job details (just prints the first 3 results)')
print(all_job_details[:3])
print()
def delete_named_operation(gc):
gc.execute_operation(
g.DeleteNamedOperation('2-hop-with-limit')
)
print('Deleted named operation: 2-hop-with-limit')
print()
def add_named_operation(gc):
gc.execute_operation(
g.AddNamedOperation(
operation_chain={
"operations": [{
"class": "uk.gov.gchq.gaffer.operation.impl.get.GetAdjacentIds",
"includeIncomingOutGoing": "OUTGOING"
}, {
"class": "uk.gov.gchq.gaffer.operation.impl.get.GetAdjacentIds",
"includeIncomingOutGoing": "OUTGOING"
}, {
"class": "uk.gov.gchq.gaffer.operation.impl.Limit",
"resultLimit": "${param1}"
}]
},
operation_name='2-hop-with-limit',
description='2 hop query with limit',
overwrite_flag=True,
read_access_roles=["read-user"],
write_access_roles=["write-user"],
parameters=[
g.NamedOperationParameter(
name="param1",
description="Limit param",
default_value=1,
value_class="java.lang.Long",
required=False
)
]
)
)
print('Added named operation: 2-hop-with-limit')
print()
def get_all_named_operations(gc):
namedOperations = gc.execute_operation(
g.GetAllNamedOperations()
)
print('Named operations')
print(namedOperations)
print()
def named_operation(gc):
result = gc.execute_operation(
g.NamedOperation(
operation_name='2-hop-with-limit',
parameters={
'param1': 2
},
input=[
g.EntitySeed('M5')
]
)
)
print('Execute named operation')
print(result)
print()
def delete_named_views(gc):
gc.execute_operation(
g.DeleteNamedView(name='summarise')
)
print('Deleted named view: summarise')
gc.execute_operation(
g.DeleteNamedView(name='dateRange')
)
print('Deleted named view: dateRange')
print()
def add_named_view_summarise(gc):
gc.execute_operation(
g.AddNamedView(
view=g.View(
global_elements=[
g.GlobalElementDefinition(group_by=[])
]
),
name='summarise',
description='Summarises all results (overrides the groupBy to an empty array).',
overwrite_flag=True
)
)
print('Added named view: summarise')
print()
def add_named_view_date_range(gc):
gc.execute_operation(
g.AddNamedView(
view=g.View(
global_elements=g.GlobalElementDefinition(
pre_aggregation_filter_functions=[
g.PredicateContext(
selection=['startDate'],
predicate=g.InDateRange(
start='${start}',
end='${end}'
)
)
]
)
),
name='dateRange',
description='Filters results to a provided date range.',
overwrite_flag=True,
parameters=[
g.NamedViewParameter(
name="start",
description="A date string for the start of date range.",
value_class="java.lang.String",
required=False
),
g.NamedViewParameter(
name="end",
description="A date string for the end of the date range.",
value_class="java.lang.String",
required=False
)
]
)
)
print('Added named view: dateRange')
print()
def get_all_named_views(gc):
namedViews = gc.execute_operation(
g.GetAllNamedViews()
)
print('Named views')
print(namedViews)
print()
def named_view_summarise(gc):
result = gc.execute_operation(
g.GetElements(
input=[
g.EntitySeed(
vertex='M32:1'
)
],
view=g.NamedView(
name="summarise"
)
)
)
print('Execute get elements with summarised named view')
print(result)
print()
def named_view_date_range(gc):
result = gc.execute_operation(
g.GetElements(
input=[
g.EntitySeed(
vertex='M32:1'
)
],
view=g.NamedView(
name="dateRange",
parameters={
'start': '2005/05/03 06:00',
'end': '2005/05/03 09:00'
}
)
)
)
print('Execute get elements with date range named view')
print(result)
print()
def named_views(gc):
result = gc.execute_operation(
g.GetElements(
input=[
g.EntitySeed(
vertex='M32:1'
)
],
view=[
g.NamedView(
name="summarise"
),
g.NamedView(
name="dateRange",
parameters={
'start': '2005/05/03 06:00',
'end': '2005/05/03 09:00'
}
)
]
)
)
print('Execute get elements with summarised and date range named views')
print(result)
print()
def sort_elements(gc):
# Get sorted Elements
input = gc.execute_operations([
g.GetAllElements(
view=g.View(
edges=[
g.ElementDefinition(
group='RoadUse',
group_by=[]
)
]
)
),
g.Sort(
comparators=[
g.ElementPropertyComparator(
groups=['RoadUse'],
property='count'
)
],
result_limit=5
)
])
print('Sorted input')
print(input)
print()
def max_element(gc):
# Get sorted Elements
input = gc.execute_operations([
g.GetAllElements(
view=g.View(
edges=[
g.ElementDefinition(
group='RoadUse',
group_by=[]
)
]
)
),
g.Max(
comparators=[
g.ElementPropertyComparator(
groups=['RoadUse'],
property='count'
)
]
)
])
print('Max element')
print(input)
print()
def min_element(gc):
# Get sorted Elements
input = gc.execute_operations([
g.GetAllElements(
view=g.View(
edges=[
g.ElementDefinition(
group='RoadUse',
group_by=[]
)
]
)
),
g.Min(
comparators=[
g.ElementPropertyComparator(
groups=['RoadUse'],
property='count'
)
]
)
])
print('Min element')
print(input)
print()
def to_vertices_to_entity_seeds(gc):
# Get sorted Elements
input = gc.execute_operations([
g.GetElements(
input=[
g.EntitySeed(
vertex='South West'
)
],
view=g.View(
edges=[
g.ElementDefinition(
'RegionContainsLocation',
group_by=[]
)
]
),
include_incoming_out_going=g.InOutType.OUT
),
g.ToVertices(
edge_vertices=g.EdgeVertices.DESTINATION,
use_matched_vertex=g.UseMatchedVertex.OPPOSITE
),
g.ToEntitySeeds(),
g.GetElements(
view=g.View(
edges=[
g.ElementDefinition(
'LocationContainsRoad',
group_by=[]
)
]
),
include_incoming_out_going=g.InOutType.OUT
),
g.Limit(5)
])
print('ToVertices then ToEntitySeeds')
print(input)
print()
def complex_op_chain(gc):
# All road junctions in the South West that were heavily used by buses in year 2000.
junctions = gc.execute_operations(
operations=[
g.GetAdjacentIds(
input=[g.EntitySeed(vertex='South West')],
view=g.View(
edges=[
g.ElementDefinition(
group='RegionContainsLocation',
group_by=[]
)
]
)
),
g.GetAdjacentIds(
view=g.View(
edges=[
g.ElementDefinition(
group='LocationContainsRoad',
group_by=[]
)
]
)
),
g.ToSet(),
g.GetAdjacentIds(
view=g.View(
edges=[
g.ElementDefinition(
group='RoadHasJunction',
group_by=[]
)
]
)
),
g.GetElements(
view=g.View(
entities=[
g.ElementDefinition(
group='JunctionUse',
group_by=[],
transient_properties=[
g.Property('busCount', 'java.lang.Long')
],
pre_aggregation_filter_functions=[
g.PredicateContext(
selection=['startDate'],
predicate=g.InDateRange(
start='2000/01/01',
end='2001/01/01'
)
)
],
post_aggregation_filter_functions=[
g.PredicateContext(
selection=['countByVehicleType'],
predicate=g.PredicateMap(
predicate=g.IsMoreThan(
value={'java.lang.Long': 1000},
or_equal_to=False
),
key='BUS'
)
)
],
transform_functions=[
g.FunctionContext(
selection=['countByVehicleType'],
function=g.FreqMapExtractor(key='BUS'),
projection=['busCount']
)
]
)
]
),
include_incoming_out_going=g.InOutType.OUT
),
g.ToCsv(
element_generator=g.CsvGenerator(
fields={
'VERTEX': 'Junction',
'busCount': 'Bus Count'
},
quoted=False
),
include_header=True
)
]
)
print(
'All road junctions in the South West that were heavily used by buses in year 2000.')
print(junctions)
print()
def op_chain_in_json(gc):
# Operation chain defined in json
result = gc.execute_operation_chain(
{
"class": "uk.gov.gchq.gaffer.operation.OperationChain",
"operations": [{
"class": "uk.gov.gchq.gaffer.operation.impl.get.GetAllElements",
}, {
"class": "uk.gov.gchq.gaffer.operation.impl.CountGroups"
}]
}
)
print('Operation chain defined in json')
print(result)
print()
if __name__ == "__main__":
run('http://localhost:8080/rest/latest', False)
| apache-2.0 | -6,482,966,961,602,001,000 | 26.700935 | 252 | 0.458315 | false |
menegazzo/travispy | setup.py | 2 | 1873 | from setuptools import setup
from setuptools.command.test import test as TestCommand
import sys
class PyTest(TestCommand):
user_options = [('pytest-args=', 'a', 'Arguments to pass to py.test')]
def initialize_options(self):
TestCommand.initialize_options(self)
self.pytest_args = []
def finalize_options(self):
TestCommand.finalize_options(self)
self.test_args = []
self.test_suite = True
def run_tests(self):
import pytest
args = ['travispy']
if self.pytest_args:
args.insert(0, self.pytest_args)
errno = pytest.main(args)
sys.exit(errno)
setup(
name='TravisPy',
version='0.3.5',
packages=['travispy', 'travispy.entities'],
python_requires='>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*',
install_requires=['requests'],
# metadata for upload to PyPI
author='Fabio Menegazzo',
author_email='[email protected]',
description='Python API for Travis CI.',
long_description=open('README.rst').read(),
license='GPL',
keywords='travis ci continuous integration travisci',
url='https://github.com/menegazzo/travispy',
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: GNU General Public License v3 (GPLv3)',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
],
# tests
tests_require=['pytest'],
cmdclass={'test': PyTest},
)
| gpl-3.0 | -5,545,387,666,978,756,000 | 29.704918 | 75 | 0.608649 | false |
tulikavijay/vms | vms/administrator/tests/test_report.py | 1 | 16008 | # third party
from selenium import webdriver
from selenium.common.exceptions import NoSuchElementException
# Django
from django.contrib.staticfiles.testing import LiveServerTestCase
from django.db import IntegrityError
# local Django
from pom.locators.administratorReportPageLocators import *
from pom.pages.administratorReportPage import AdministratorReportPage
from pom.pages.authenticationPage import AuthenticationPage
from shift.utils import (
create_admin,
create_volunteer,
create_organization_with_details,
create_event_with_details,
create_job_with_details,
create_shift_with_details,
log_hours_with_details,
register_volunteer_for_shift_utility
)
class Report(LiveServerTestCase):
'''
'''
@classmethod
def setUpClass(cls):
cls.driver = webdriver.Firefox()
cls.driver.implicitly_wait(5)
cls.driver.maximize_window()
cls.authentication_page = AuthenticationPage(cls.driver)
cls.report_page = AdministratorReportPage(cls.driver)
cls.elements = AdministratorReportPageLocators()
super(Report, cls).setUpClass()
def setUp(self):
create_admin()
self.login_admin()
self.report_page.go_to_admin_report()
def tearDown(self):
pass
@classmethod
def tearDownClass(cls):
cls.driver.quit()
super(Report, cls).tearDownClass()
def login_admin(self):
self.authentication_page.server_url = self.live_server_url
self.authentication_page.login({ 'username' : 'admin', 'password' : 'admin'})
def verify_shift_details(self, total_shifts, hours):
total_no_of_shifts = self.report_page.get_shift_summary().split(' ')[10].strip('\nTotal')
total_no_of_hours = self.report_page.get_shift_summary().split(' ')[-1].strip('\n')
self.assertEqual(total_no_of_shifts, total_shifts)
self.assertEqual(total_no_of_hours, hours)
#Failing test case which has been documented
#Test commented out to prevent travis build failure - bug #327
"""def test_null_values_with_dataset(self):
# register dataset
org = create_organization_with_details('organization-one')
volunteer = create_volunteer()
volunteer.organization = org
volunteer.save()
# create shift and log hours
# register event first to create job
event = ['Hackathon', '2017-08-21', '2017-09-28']
created_event = create_event_with_details(event)
# create job
job = ['Developer', '2017-08-21', '2017-08-30', '',created_event]
created_job = create_job_with_details(job)
# create shift
shift = ['2017-08-21', '09:00', '15:00', '10', created_job]
created_shift = create_shift_with_details(shift)
logged_shift = log_hours_with_details(volunteer, created_shift, "09:00", "12:00")
report_page = self.report_page
# check admin report with null fields, should return the above shift
report_page.fill_report_form(['','','','',''])
self.verify_shift_details('1','3.0')
self.assertEqual(report_page.element_by_xpath(
self.elements.NAME).text, created_event.name)
self.assertEqual(report_page.element_by_xpath(
self.elements.DATE).text, 'Aug. 21, 2016')
self.assertEqual(report_page.element_by_xpath(
self.elements.START_TIME).text, '9 a.m.')
self.assertEqual(report_page.element_by_xpath(
self.elements.END_TIME).text, '12 p.m.')
self.assertEqual(report_page.element_by_xpath(
self.elements.HOURS).text, '3.0')"""
def test_null_values_with_empty_dataset(self):
# should return no entries
report_page = self.report_page
report_page.fill_report_form(['','','','',''])
self.assertEqual(report_page.get_alert_box_text(),report_page.no_results_message)
def test_only_logged_shifts_are_reported(self):
# register dataset
org = create_organization_with_details('organization-one')
volunteer = create_volunteer()
volunteer.organization = org
volunteer.save()
# register event first to create job
event = ['Hackathon', '2017-08-21', '2017-09-28']
created_event = create_event_with_details(event)
# create job
job = ['Developer', '2017-08-21', '2017-08-30', '',created_event]
created_job = create_job_with_details(job)
# create shift
shift = ['2017-08-21', '09:00', '15:00', '10', created_job]
created_shift = create_shift_with_details(shift)
# shift is assigned to volunteer-one, but hours have not been logged
volunteer_shift = register_volunteer_for_shift_utility(created_shift, volunteer)
report_page = self.report_page
# check admin report with null fields, should not return the above shift
report_page.fill_report_form(['','','','',''])
self.assertEqual(report_page.get_alert_box_text(),report_page.no_results_message)
#Failing test case which has been documented - bug #327
#Test commented out to prevent travis build failure
"""def test_check_intersection_of_fields(self):
self.create_dataset()
report_page = self.report_page
search_parameters_1 = ['tom','','','','']
report_page.fill_report_form(search_parameters_1)
self.verify_shift_details('2','2.0')
search_parameters_2 = ['','','','','org-one']
report_page.fill_report_form(search_parameters_2)
self.verify_shift_details('3','3.0')
search_parameters_3 = ['','','event-four','Two','']
report_page.fill_report_form(search_parameters_3)
# 1 shift of 1:30 hrs
self.verify_shift_details('1','1.5')
search_parameters_4 = ['','','one','','']
report_page.fill_report_form(search_parameters_4)
# 3 shifts of 0:30 hrs, 1:00 hrs, 1:00 hrs
self.verify_shift_details('3','2.5')
# check case-insensitive
search_parameters_5 = ['','sherlock','two','','']
report_page.fill_report_form(search_parameters_5)
self.verify_shift_details('1','2.0')
def create_dataset(self):
parameters = {'org' : 'org-one',
'volunteer' : {
'username' : 'uname1',
'password' : 'uname1',
'email' : '[email protected]',
'first_name' : 'tom-fname',
'last_name' : 'tom-lname',
'address' : 'address',
'city' : 'city',
'state' : 'state',
'country' : 'country',
'phone-no' : '9999999999'},
'event' : {
'name' : 'event-four',
'start_date' : '2016-06-01',
'end_date' : '2016-06-10'},
'job' : {
'name' : 'jobOneInEventFour',
'start_date' : '2016-06-01',
'end_date' : '2016-06-01'},
'shift' : {
'date' : '2016-06-01',
'start_time' : '09:00',
'end_time' : '11:00',
'max_volunteers' : '10'},
'vshift' : {
'start_time' : '09:30',
'end_time' : '10:00',}}
self.register_dataset(parameters)
parameters = {'org' : 'org-one',
'volunteer' : {
'username' : 'uname2',
'password' : 'uname2',
'email' : '[email protected]',
'first_name' : 'peter-fname',
'last_name' : 'peter-lname',
'address' : 'address',
'city' : 'city',
'state' : 'state',
'country' : 'country',
'phone-no' : '9999999999'},
'event' : {
'name' : 'event-one',
'start_date' : '2016-06-01',
'end_date' : '2016-06-10'},
'job' : {
'name' : 'jobOneInEventOne',
'start_date' : '2016-06-01',
'end_date' : '2016-06-01'},
'shift' : {
'date' : '2016-06-01',
'start_time' : '18:00',
'end_time' : '23:00',
'max_volunteers' : '10'},
'vshift' : {
'start_time' : '19:00',
'end_time' : '20:00'}}
self.register_dataset(parameters)
parameters = {'org' : 'org-one',
'volunteer' : {
'username' : 'uname3',
'password' : 'uname3',
'email' : '[email protected]',
'first_name' : 'tom-fname',
'last_name' : 'tom-lname',
'address' : 'address',
'city' : 'city',
'state' : 'state',
'country' : 'country',
'phone-no' : '9999999999'},
'event' : {
'name' : 'event-four',
'start_date' : '2016-06-01',
'end_date' : '2016-06-10'},
'job' : {
'name' : 'jobTwoInEventFour',
'start_date' : '2016-06-01',
'end_date' : '2016-06-01'},
'shift' : {
'date' : '2016-06-01',
'start_time' : '09:00',
'end_time' : '15:00',
'max_volunteers' : '10'},
'vshift' : {
'start_time' : '10:00',
'end_time' : '11:30'}}
self.register_dataset(parameters)
parameters = {'org' : 'org-two',
'volunteer' : {
'username' : 'uname4',
'password' : 'uname4',
'email' : '[email protected]',
'first_name' : 'harry-fname',
'last_name' : 'harry-lname',
'address' : 'address',
'city' : 'city',
'state' : 'state',
'country' : 'country',
'phone-no' : '9999999999'},
'event' : {
'name' : 'event-one',
'start_date' : '2016-06-01',
'end_date' : '2016-06-10'},
'job' : {
'name' : 'jobTwoInEventOne',
'start_date' : '2016-06-01',
'end_date' : '2016-06-01'},
'shift' : {
'date' : '2016-06-01',
'start_time' : '09:00',
'end_time' : '11:00',
'max_volunteers' : '10'},
'vshift' : {
'start_time' : '09:00',
'end_time' : '10:00'}}
self.register_dataset(parameters)
parameters = {'org' : 'org-two',
'volunteer' : {
'username' : 'uname5',
'password' : 'uname5',
'email' : '[email protected]',
'first_name' : 'harry-fname',
'last_name' : 'harry-lname',
'address' : 'address',
'city' : 'city',
'state' : 'state',
'country' : 'country',
'phone-no' : '9999999999'},
'event' : {
'name' : 'event-two',
'start_date' : '2016-06-01',
'end_date' : '2016-06-10'},
'job' : {
'name' : 'jobOneInEventTwo',
'start_date' : '2016-06-01',
'end_date' : '2016-06-01'},
'shift' : {
'date' : '2016-06-01',
'start_time' : '09:00',
'end_time' : '18:00',
'max_volunteers' : '10'},
'vshift' : {
'start_time' : '12:00',
'end_time' : '15:00'}}
self.register_dataset(parameters)
parameters = {'org' : 'org-three',
'volunteer' : {
'username' : 'uname6',
'password' : 'uname6',
'email' : '[email protected]',
'first_name' : 'sherlock-fname',
'last_name' : 'sherlock-lname',
'address' : 'address',
'city' : 'city',
'state' : 'state',
'country' : 'country',
'phone-no' : '9999999999'},
'event' : {
'name' : 'event-two',
'start_date' : '2016-06-01',
'end_date' : '2016-06-10'},
'job' : {
'name' : 'jobOneInEventTwo',
'start_date' : '2016-06-01',
'end_date' : '2016-06-01'},
'shift' : {
'date' : '2016-06-01',
'start_time' : '09:00',
'end_time' : '16:00',
'max_volunteers' : '10'},
'vshift' : {
'start_time' : '12:00',
'end_time' : '14:00'}}
self.register_dataset(parameters)
parameters = {'org' : 'org-four',
'volunteer' : {
'username' : 'uname7',
'password' : 'uname7',
'email' : '[email protected]',
'first_name' : 'harvey-fname',
'last_name' : 'harvey-lname',
'address' : 'address',
'city' : 'city',
'state' : 'state',
'country' : 'country',
'phone-no' : '9999999999'},
'event' : {
'name' : 'event-one',
'start_date' : '2016-06-01',
'end_date' : '2016-06-10'},
'job' : {
'name' : 'jobThreeInEventOne',
'start_date' : '2016-06-01',
'end_date' : '2016-06-01'},
'shift' : {
'date' : '2016-06-01',
'start_time' : '09:00',
'end_time' : '13:00',
'max_volunteers' : '10'},
'vshift' : {
'start_time' : '12:00',
'end_time' : '12:30'}}
self.register_dataset(parameters)
parameters = {'org' : 'org-four',
'volunteer' : {
'username' : 'uname8',
'password' : 'uname8',
'email' : '[email protected]',
'first_name' : 'mike-fname',
'last_name' : 'mike-lname',
'address' : 'address',
'city' : 'city',
'state' : 'state',
'country' : 'country',
'phone-no' : '9999999999'},
'event' : {
'name' : 'event-three',
'start_date' : '2016-06-01',
'end_date' : '2016-06-10'},
'job' : {
'name' : 'jobOneInEventThree',
'start_date' : '2016-06-01',
'end_date' : '2016-06-01'},
'shift' : {
'date' : '2016-06-01',
'start_time' : '01:00',
'end_time' : '10:00',
'max_volunteers' : '10'},
'vshift' : {
'start_time' : '01:00',
'end_time' : '04:00'}}
self.register_dataset(parameters)"""
| gpl-2.0 | 6,484,563,254,342,688,000 | 37.760291 | 97 | 0.446777 | false |
uclouvain/osis_louvain | base/models/offer.py | 1 | 2121 | ##############################################################################
#
# OSIS stands for Open Student Information System. It's an application
# designed to manage the core business of higher education institutions,
# such as universities, faculties, institutes and professional schools.
# The core business involves the administration of students, teachers,
# courses, programs and so on.
#
# Copyright (C) 2015-2018 Université catholique de Louvain (http://www.uclouvain.be)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# A copy of this license - GNU General Public License - is available
# at the root of the source code of this program. If not,
# see http://www.gnu.org/licenses/.
#
##############################################################################
from django.db import models
from osis_common.models.serializable_model import SerializableModel, SerializableModelAdmin
class OfferAdmin(SerializableModelAdmin):
list_display = ('id', 'title', 'changed')
search_fields = ['title']
class Offer(SerializableModel):
external_id = models.CharField(max_length=100, blank=True, null=True, db_index=True)
changed = models.DateTimeField(null=True, auto_now=True)
title = models.CharField(max_length=255)
def __str__(self):
return "{} {}".format(self.id, self.title)
class Meta:
permissions = (
("can_access_offer", "Can access offer"),
("can_access_catalog", "Can access catalog"),
)
def find_by_id(offer_id):
try:
return Offer.objects.get(pk=offer_id)
except Offer.DoesNotExist:
return None
| agpl-3.0 | 137,301,873,159,780,320 | 38.259259 | 91 | 0.649528 | false |
wakaru44/api_ab_test | abapi/main.py | 1 | 1161 |
from flask import Flask, url_for, render_template, request, \
redirect, abort, session, g, flash, Markup
import helpers
from abapi.helpers import *
from abapi import app
@app.route('/event', defaults={"whatever": ""})
@app.route('/event/<path:whatever>')
def event_listener(whatever):
app.logger.info(whatever)
"""
The format of the URL should be something like
- SessionID: GUID (or a string for the purpose of this demo)
- Timestamp: Int. The unix timestamp in the client
- Client Version: String. Something to identify the version/kind/variation of the client
- Item: String. The thing that the user has touched.
In this order, and validating a bit
"""
(sesid, stamp, client, item) = parse_request(whatever)
assert sessionid_check(sesid) is not False
assert stamp_check(stamp) is not False
assert client_check(client) is not False
assert item_check(item) is not False
return "{0} registered".format(whatever)
@app.route('/')
def index():
return render_template('base.html')
@app.errorhandler(404)
def page_not_found(error):
return render_template('page_not_found.html'), 404
| gpl-3.0 | 5,941,074,207,892,447,000 | 29.552632 | 92 | 0.699397 | false |
Lemma1/MAC-POSTS | doc_builder/sphinx-contrib/traclinks/setup.py | 2 | 1133 | # -*- coding: utf-8 -*-
from setuptools import setup, find_packages
long_desc = '''
This package contains the traclinks Sphinx extension.
.. add description here ..
'''
requires = ['Sphinx>=0.6']
setup(
name='traclinks',
version='0.1',
url='http://bitbucket.org/birkenfeld/sphinx-contrib',
download_url='http://pypi.python.org/pypi/traclinks',
license='MIT',
author='Kevin Horn',
author_email='[email protected]',
description='Sphinx extension traclinks',
long_description=long_desc,
zip_safe=False,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Documentation',
'Topic :: Utilities',
],
platforms='any',
packages=find_packages(),
include_package_data=True,
install_requires=requires,
namespace_packages=['sphinxcontrib'],
)
| mit | -6,048,783,826,970,069,000 | 26.325 | 57 | 0.609003 | false |
Monsido/graphite-grafana | graphite/local_settings.py | 1 | 8922 | ## Graphite local_settings.py
# Edit this file to customize the default Graphite webapp settings
#
# Additional customizations to Django settings can be added to this file as well
#####################################
# General Configuration #
#####################################
# Set this to a long, random unique string to use as a secret key for this
# install. This key is used for salting of hashes used in auth tokens,
# CRSF middleware, cookie storage, etc. This should be set identically among
# instances if used behind a load balancer.
#SECRET_KEY = 'UNSAFE_DEFAULT'
# In Django 1.5+ set this to the list of hosts your graphite instances is
# accessible as. See:
# https://docs.djangoproject.com/en/dev/ref/settings/#std:setting-ALLOWED_HOSTS
#ALLOWED_HOSTS = [ '*' ]
# Set your local timezone (Django's default is America/Chicago)
# If your graphs appear to be offset by a couple hours then this probably
# needs to be explicitly set to your local timezone.
#TIME_ZONE = 'America/Los_Angeles'
# Override this to provide documentation specific to your Graphite deployment
#DOCUMENTATION_URL = "http://graphite.readthedocs.org/"
# Logging
#LOG_RENDERING_PERFORMANCE = True
#LOG_CACHE_PERFORMANCE = True
#LOG_METRIC_ACCESS = True
# Enable full debug page display on exceptions (Internal Server Error pages)
#DEBUG = True
# If using RRD files and rrdcached, set to the address or socket of the daemon
#FLUSHRRDCACHED = 'unix:/var/run/rrdcached.sock'
# This lists the memcached servers that will be used by this webapp.
# If you have a cluster of webapps you should ensure all of them
# have the *exact* same value for this setting. That will maximize cache
# efficiency. Setting MEMCACHE_HOSTS to be empty will turn off use of
# memcached entirely.
#
# You should not use the loopback address (127.0.0.1) here if using clustering
# as every webapp in the cluster should use the exact same values to prevent
# unneeded cache misses. Set to [] to disable caching of images and fetched data
#MEMCACHE_HOSTS = ['10.10.10.10:11211', '10.10.10.11:11211', '10.10.10.12:11211']
#DEFAULT_CACHE_DURATION = 60 # Cache images and data for 1 minute
#####################################
# Filesystem Paths #
#####################################
# Change only GRAPHITE_ROOT if your install is merely shifted from /opt/graphite
# to somewhere else
#GRAPHITE_ROOT = '/opt/graphite'
# Most installs done outside of a separate tree such as /opt/graphite will only
# need to change these three settings. Note that the default settings for each
# of these is relative to GRAPHITE_ROOT
#CONF_DIR = '/opt/graphite/conf'
STORAGE_DIR = '/srv/data/graphite'
#CONTENT_DIR = '/opt/graphite/webapp/content'
# To further or fully customize the paths, modify the following. Note that the
# default settings for each of these are relative to CONF_DIR and STORAGE_DIR
#
## Webapp config files
#DASHBOARD_CONF = '/opt/graphite/conf/dashboard.conf'
#GRAPHTEMPLATES_CONF = '/opt/graphite/conf/graphTemplates.conf'
## Data directories
# NOTE: If any directory is unreadable in DATA_DIRS it will break metric browsing
#WHISPER_DIR = '/opt/graphite/storage/whisper'
#RRD_DIR = '/opt/graphite/storage/rrd'
#DATA_DIRS = [WHISPER_DIR, RRD_DIR] # Default: set from the above variables
#LOG_DIR = '/opt/graphite/storage/log/webapp'
#INDEX_FILE = '/opt/graphite/storage/index' # Search index file
#####################################
# Email Configuration #
#####################################
# This is used for emailing rendered Graphs
# Default backend is SMTP
#EMAIL_BACKEND = 'django.core.mail.backends.smtp.EmailBackend'
#EMAIL_HOST = 'localhost'
#EMAIL_PORT = 25
#EMAIL_HOST_USER = ''
#EMAIL_HOST_PASSWORD = ''
#EMAIL_USE_TLS = False
# To drop emails on the floor, enable the Dummy backend:
#EMAIL_BACKEND = 'django.core.mail.backends.dummy.EmailBackend'
#####################################
# Authentication Configuration #
#####################################
## LDAP / ActiveDirectory authentication setup
#USE_LDAP_AUTH = True
#LDAP_SERVER = "ldap.mycompany.com"
#LDAP_PORT = 389
# OR
#LDAP_URI = "ldaps://ldap.mycompany.com:636"
#LDAP_SEARCH_BASE = "OU=users,DC=mycompany,DC=com"
#LDAP_BASE_USER = "CN=some_readonly_account,DC=mycompany,DC=com"
#LDAP_BASE_PASS = "readonly_account_password"
#LDAP_USER_QUERY = "(username=%s)" #For Active Directory use "(sAMAccountName=%s)"
#
# If you want to further customize the ldap connection options you should
# directly use ldap.set_option to set the ldap module's global options.
# For example:
#
#import ldap
#ldap.set_option(ldap.OPT_X_TLS_REQUIRE_CERT, ldap.OPT_X_TLS_ALLOW)
#ldap.set_option(ldap.OPT_X_TLS_CACERTDIR, "/etc/ssl/ca")
#ldap.set_option(ldap.OPT_X_TLS_CERTFILE, "/etc/ssl/mycert.pem")
#ldap.set_option(ldap.OPT_X_TLS_KEYFILE, "/etc/ssl/mykey.pem")
# See http://www.python-ldap.org/ for further details on these options.
## REMOTE_USER authentication. See: https://docs.djangoproject.com/en/dev/howto/auth-remote-user/
#USE_REMOTE_USER_AUTHENTICATION = True
# Override the URL for the login link (e.g. for django_openid_auth)
#LOGIN_URL = '/account/login'
##########################
# Database Configuration #
##########################
# By default sqlite is used. If you cluster multiple webapps you will need
# to setup an external database (such as MySQL) and configure all of the webapp
# instances to use the same database. Note that this database is only used to store
# Django models such as saved graphs, dashboards, user preferences, etc.
# Metric data is not stored here.
#
# DO NOT FORGET TO RUN 'manage.py syncdb' AFTER SETTING UP A NEW DATABASE
#
# The following built-in database engines are available:
# django.db.backends.postgresql # Removed in Django 1.4
# django.db.backends.postgresql_psycopg2
# django.db.backends.mysql
# django.db.backends.sqlite3
# django.db.backends.oracle
#
# The default is 'django.db.backends.sqlite3' with file 'graphite.db'
# located in STORAGE_DIR
#
#DATABASES = {
# 'default': {
# 'NAME': '/opt/graphite/storage/graphite.db',
# 'ENGINE': 'django.db.backends.sqlite3',
# 'USER': '',
# 'PASSWORD': '',
# 'HOST': '',
# 'PORT': ''
# }
#}
#
#########################
# Cluster Configuration #
#########################
# (To avoid excessive DNS lookups you want to stick to using IP addresses only in this entire section)
#
# This should list the IP address (and optionally port) of the webapp on each
# remote server in the cluster. These servers must each have local access to
# metric data. Note that the first server to return a match for a query will be
# used.
#CLUSTER_SERVERS = ["10.0.2.2:80", "10.0.2.3:80"]
## These are timeout values (in seconds) for requests to remote webapps
#REMOTE_STORE_FETCH_TIMEOUT = 6 # Timeout to fetch series data
#REMOTE_STORE_FIND_TIMEOUT = 2.5 # Timeout for metric find requests
#REMOTE_STORE_RETRY_DELAY = 60 # Time before retrying a failed remote webapp
#REMOTE_STORE_USE_POST = False # Use POST instead of GET for remote requests
#REMOTE_FIND_CACHE_DURATION = 300 # Time to cache remote metric find results
## Prefetch cache
# set to True to fetch all metrics using a single http request per remote server
# instead of one http request per target, per remote server.
# Especially useful when generating graphs with more than 4-5 targets or if
# there's significant latency between this server and the backends. (>20ms)
#REMOTE_PREFETCH_DATA = False
## Remote rendering settings
# Set to True to enable rendering of Graphs on a remote webapp
#REMOTE_RENDERING = True
# List of IP (and optionally port) of the webapp on each remote server that
# will be used for rendering. Note that each rendering host should have local
# access to metric data or should have CLUSTER_SERVERS configured
#RENDERING_HOSTS = []
#REMOTE_RENDER_CONNECT_TIMEOUT = 1.0
# If you are running multiple carbon-caches on this machine (typically behind a relay using
# consistent hashing), you'll need to list the ip address, cache query port, and instance name of each carbon-cache
# instance on the local machine (NOT every carbon-cache in the entire cluster). The default cache query port is 7002
# and a common scheme is to use 7102 for instance b, 7202 for instance c, etc.
#
# You *should* use 127.0.0.1 here in most cases
#CARBONLINK_HOSTS = ["127.0.0.1:7002:a", "127.0.0.1:7102:b", "127.0.0.1:7202:c"]
#CARBONLINK_TIMEOUT = 1.0
# Using 'query-bulk' queries for carbon
# It's more effective, but python-carbon 0.9.13 (or latest from 0.9.x branch) is required
# See https://github.com/graphite-project/carbon/pull/132 for details
#CARBONLINK_QUERY_BULK = False
#####################################
# Additional Django Settings #
#####################################
# Uncomment the following line for direct access to Django settings such as
# MIDDLEWARE_CLASSES or APPS
#from graphite.app_settings import *
| apache-2.0 | 8,667,306,118,967,225,000 | 40.497674 | 116 | 0.703206 | false |
django-oscar/django-oscar-mws | oscar_mws/migrations/0002_auto__add_field_fulfillmentorderline_shipment__add_field_fulfillmentor.py | 1 | 34713 | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'FulfillmentOrderLine.shipment'
db.add_column('oscar_mws_fulfillmentorderline', 'shipment',
self.gf('django.db.models.fields.related.ForeignKey')(blank=True, related_name='order_lines', null=True, to=orm['oscar_mws.FulfillmentShipment']),
keep_default=False)
# Adding field 'FulfillmentOrderLine.package'
db.add_column('oscar_mws_fulfillmentorderline', 'package',
self.gf('django.db.models.fields.related.ForeignKey')(blank=True, related_name='order_lines', null=True, to=orm['oscar_mws.ShipmentPackage']),
keep_default=False)
# Changing field 'FulfillmentOrderLine.line'
db.alter_column('oscar_mws_fulfillmentorderline', 'line_id', self.gf('django.db.models.fields.related.OneToOneField')(unique=True, to=orm['order.Line']))
# Adding unique constraint on 'FulfillmentOrderLine', fields ['line']
db.create_unique('oscar_mws_fulfillmentorderline', ['line_id'])
# Adding field 'ShipmentPackage.package_number'
db.add_column('oscar_mws_shipmentpackage', 'package_number',
self.gf('django.db.models.fields.IntegerField')(default=0),
keep_default=False)
def backwards(self, orm):
# Removing unique constraint on 'FulfillmentOrderLine', fields ['line']
db.delete_unique('oscar_mws_fulfillmentorderline', ['line_id'])
# Deleting field 'FulfillmentOrderLine.shipment'
db.delete_column('oscar_mws_fulfillmentorderline', 'shipment_id')
# Deleting field 'FulfillmentOrderLine.package'
db.delete_column('oscar_mws_fulfillmentorderline', 'package_id')
# Changing field 'FulfillmentOrderLine.line'
db.alter_column('oscar_mws_fulfillmentorderline', 'line_id', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['order.Line']))
# Deleting field 'ShipmentPackage.package_number'
db.delete_column('oscar_mws_shipmentpackage', 'package_number')
models = {
'address.country': {
'Meta': {'ordering': "('-display_order', 'name')", 'object_name': 'Country'},
'display_order': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0', 'db_index': 'True'}),
'is_shipping_country': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'iso_3166_1_a2': ('django.db.models.fields.CharField', [], {'max_length': '2', 'primary_key': 'True'}),
'iso_3166_1_a3': ('django.db.models.fields.CharField', [], {'max_length': '3', 'null': 'True', 'db_index': 'True'}),
'iso_3166_1_numeric': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True', 'db_index': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'printable_name': ('django.db.models.fields.CharField', [], {'max_length': '128'})
},
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'catalogue.attributeentity': {
'Meta': {'object_name': 'AttributeEntity'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '255', 'blank': 'True'}),
'type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'entities'", 'to': "orm['catalogue.AttributeEntityType']"})
},
'catalogue.attributeentitytype': {
'Meta': {'object_name': 'AttributeEntityType'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '255', 'blank': 'True'})
},
'catalogue.attributeoption': {
'Meta': {'object_name': 'AttributeOption'},
'group': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'options'", 'to': "orm['catalogue.AttributeOptionGroup']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'option': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
'catalogue.attributeoptiongroup': {
'Meta': {'object_name': 'AttributeOptionGroup'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'})
},
'catalogue.category': {
'Meta': {'ordering': "['full_name']", 'object_name': 'Category'},
'depth': ('django.db.models.fields.PositiveIntegerField', [], {}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'full_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.files.ImageField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
'numchild': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'path': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '255'})
},
'catalogue.option': {
'Meta': {'object_name': 'Option'},
'code': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '128'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'type': ('django.db.models.fields.CharField', [], {'default': "'Required'", 'max_length': '128'})
},
'catalogue.product': {
'Meta': {'ordering': "['-date_created']", 'object_name': 'Product'},
'attributes': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['catalogue.ProductAttribute']", 'through': "orm['catalogue.ProductAttributeValue']", 'symmetrical': 'False'}),
'categories': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['catalogue.Category']", 'through': "orm['catalogue.ProductCategory']", 'symmetrical': 'False'}),
'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'date_updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'db_index': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_discountable': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'variants'", 'null': 'True', 'to': "orm['catalogue.Product']"}),
'product_class': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalogue.ProductClass']", 'null': 'True'}),
'product_options': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['catalogue.Option']", 'symmetrical': 'False', 'blank': 'True'}),
'rating': ('django.db.models.fields.FloatField', [], {'null': 'True'}),
'recommended_products': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['catalogue.Product']", 'symmetrical': 'False', 'through': "orm['catalogue.ProductRecommendation']", 'blank': 'True'}),
'related_products': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'relations'", 'blank': 'True', 'to': "orm['catalogue.Product']"}),
'score': ('django.db.models.fields.FloatField', [], {'default': '0.0', 'db_index': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '255'}),
'status': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '128', 'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'upc': ('django.db.models.fields.CharField', [], {'max_length': '64', 'unique': 'True', 'null': 'True', 'blank': 'True'})
},
'catalogue.productattribute': {
'Meta': {'ordering': "['code']", 'object_name': 'ProductAttribute'},
'code': ('django.db.models.fields.SlugField', [], {'max_length': '128'}),
'entity_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalogue.AttributeEntityType']", 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'option_group': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalogue.AttributeOptionGroup']", 'null': 'True', 'blank': 'True'}),
'product_class': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'attributes'", 'null': 'True', 'to': "orm['catalogue.ProductClass']"}),
'required': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'type': ('django.db.models.fields.CharField', [], {'default': "'text'", 'max_length': '20'})
},
'catalogue.productattributevalue': {
'Meta': {'object_name': 'ProductAttributeValue'},
'attribute': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalogue.ProductAttribute']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'attribute_values'", 'to': "orm['catalogue.Product']"}),
'value_boolean': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'value_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'value_entity': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalogue.AttributeEntity']", 'null': 'True', 'blank': 'True'}),
'value_float': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'value_integer': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'value_option': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalogue.AttributeOption']", 'null': 'True', 'blank': 'True'}),
'value_richtext': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'value_text': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'})
},
'catalogue.productcategory': {
'Meta': {'ordering': "['-is_canonical']", 'object_name': 'ProductCategory'},
'category': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalogue.Category']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_canonical': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalogue.Product']"})
},
'catalogue.productclass': {
'Meta': {'ordering': "['name']", 'object_name': 'ProductClass'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'options': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['catalogue.Option']", 'symmetrical': 'False', 'blank': 'True'}),
'requires_shipping': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '128'}),
'track_stock': ('django.db.models.fields.BooleanField', [], {'default': 'True'})
},
'catalogue.productrecommendation': {
'Meta': {'object_name': 'ProductRecommendation'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'primary': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'primary_recommendations'", 'to': "orm['catalogue.Product']"}),
'ranking': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}),
'recommendation': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalogue.Product']"})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'order.billingaddress': {
'Meta': {'object_name': 'BillingAddress'},
'country': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['address.Country']"}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'line1': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'line2': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'line3': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'line4': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'postcode': ('oscar.models.fields.UppercaseCharField', [], {'max_length': '64', 'null': 'True', 'blank': 'True'}),
'search_text': ('django.db.models.fields.CharField', [], {'max_length': '1000'}),
'state': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True', 'blank': 'True'})
},
'order.line': {
'Meta': {'object_name': 'Line'},
'est_dispatch_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'line_price_before_discounts_excl_tax': ('django.db.models.fields.DecimalField', [], {'max_digits': '12', 'decimal_places': '2'}),
'line_price_before_discounts_incl_tax': ('django.db.models.fields.DecimalField', [], {'max_digits': '12', 'decimal_places': '2'}),
'line_price_excl_tax': ('django.db.models.fields.DecimalField', [], {'max_digits': '12', 'decimal_places': '2'}),
'line_price_incl_tax': ('django.db.models.fields.DecimalField', [], {'max_digits': '12', 'decimal_places': '2'}),
'order': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'lines'", 'to': "orm['order.Order']"}),
'partner': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'order_lines'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': "orm['partner.Partner']"}),
'partner_line_notes': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'partner_line_reference': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'}),
'partner_name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'partner_sku': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalogue.Product']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'quantity': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1'}),
'status': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'unit_cost_price': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '12', 'decimal_places': '2', 'blank': 'True'}),
'unit_price_excl_tax': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '12', 'decimal_places': '2', 'blank': 'True'}),
'unit_price_incl_tax': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '12', 'decimal_places': '2', 'blank': 'True'}),
'unit_retail_price': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '12', 'decimal_places': '2', 'blank': 'True'}),
'upc': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'})
},
'order.order': {
'Meta': {'ordering': "['-date_placed']", 'object_name': 'Order'},
'basket_id': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'billing_address': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['order.BillingAddress']", 'null': 'True', 'blank': 'True'}),
'date_placed': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'db_index': 'True', 'blank': 'True'}),
'guest_email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'number': ('django.db.models.fields.CharField', [], {'max_length': '128', 'db_index': 'True'}),
'shipping_address': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['order.ShippingAddress']", 'null': 'True', 'blank': 'True'}),
'shipping_excl_tax': ('django.db.models.fields.DecimalField', [], {'default': '0', 'max_digits': '12', 'decimal_places': '2'}),
'shipping_incl_tax': ('django.db.models.fields.DecimalField', [], {'default': '0', 'max_digits': '12', 'decimal_places': '2'}),
'shipping_method': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'}),
'site': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['sites.Site']"}),
'status': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'total_excl_tax': ('django.db.models.fields.DecimalField', [], {'max_digits': '12', 'decimal_places': '2'}),
'total_incl_tax': ('django.db.models.fields.DecimalField', [], {'max_digits': '12', 'decimal_places': '2'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'orders'", 'null': 'True', 'to': "orm['auth.User']"})
},
'order.shippingaddress': {
'Meta': {'object_name': 'ShippingAddress'},
'country': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['address.Country']"}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'line1': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'line2': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'line3': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'line4': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'notes': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'phone_number': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True', 'blank': 'True'}),
'postcode': ('oscar.models.fields.UppercaseCharField', [], {'max_length': '64', 'null': 'True', 'blank': 'True'}),
'search_text': ('django.db.models.fields.CharField', [], {'max_length': '1000'}),
'state': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True', 'blank': 'True'})
},
'order.shippingevent': {
'Meta': {'ordering': "['-date_created']", 'object_name': 'ShippingEvent'},
'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'event_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['order.ShippingEventType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'lines': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'shipping_events'", 'symmetrical': 'False', 'through': "orm['order.ShippingEventQuantity']", 'to': "orm['order.Line']"}),
'notes': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'order': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'shipping_events'", 'to': "orm['order.Order']"})
},
'order.shippingeventquantity': {
'Meta': {'object_name': 'ShippingEventQuantity'},
'event': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'line_quantities'", 'to': "orm['order.ShippingEvent']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'line': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'shipping_event_quantities'", 'to': "orm['order.Line']"}),
'quantity': ('django.db.models.fields.PositiveIntegerField', [], {})
},
'order.shippingeventtype': {
'Meta': {'ordering': "('sequence_number',)", 'object_name': 'ShippingEventType'},
'code': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '128'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_required': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'sequence_number': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'})
},
'oscar_mws.amazonprofile': {
'Meta': {'object_name': 'AmazonProfile'},
'asin': ('django.db.models.fields.CharField', [], {'max_length': '10', 'blank': 'True'}),
'fulfillment_by': ('django.db.models.fields.CharField', [], {'default': "'MFN'", 'max_length': '3'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'item_package_quantity': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'launch_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'number_of_items': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'product': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'amazon_profile'", 'unique': 'True', 'to': "orm['catalogue.Product']"}),
'product_tax_code': ('django.db.models.fields.CharField', [], {'max_length': '200', 'blank': 'True'}),
'release_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'})
},
'oscar_mws.feedreport': {
'Meta': {'object_name': 'FeedReport'},
'errors': ('django.db.models.fields.PositiveIntegerField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'processed': ('django.db.models.fields.PositiveIntegerField', [], {}),
'status_code': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'submission': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'report'", 'unique': 'True', 'to': "orm['oscar_mws.FeedSubmission']"}),
'successful': ('django.db.models.fields.PositiveIntegerField', [], {}),
'warnings': ('django.db.models.fields.PositiveIntegerField', [], {})
},
'oscar_mws.feedresult': {
'Meta': {'object_name': 'FeedResult'},
'description': ('django.db.models.fields.TextField', [], {}),
'feed_report': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'results'", 'to': "orm['oscar_mws.FeedReport']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'message_code': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'+'", 'null': 'True', 'to': "orm['catalogue.Product']"}),
'type': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'oscar_mws.feedsubmission': {
'Meta': {'ordering': "['-date_updated']", 'object_name': 'FeedSubmission'},
'date_created': ('django.db.models.fields.DateTimeField', [], {}),
'date_submitted': ('django.db.models.fields.DateTimeField', [], {}),
'date_updated': ('django.db.models.fields.DateTimeField', [], {}),
'feed_type': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'processing_status': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'submission_id': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '64'}),
'submitted_products': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'feed_submissions'", 'symmetrical': 'False', 'to': "orm['catalogue.Product']"})
},
'oscar_mws.fulfillmentorder': {
'Meta': {'object_name': 'FulfillmentOrder'},
'date_updated': ('django.db.models.fields.DateTimeField', [], {}),
'fulfillment_id': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '32'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'lines': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'fulfillment_orders'", 'symmetrical': 'False', 'through': "orm['oscar_mws.FulfillmentOrderLine']", 'to': "orm['order.Line']"}),
'order': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'fulfillment_orders'", 'to': "orm['order.Order']"}),
'status': ('django.db.models.fields.CharField', [], {'max_length': '25', 'blank': 'True'})
},
'oscar_mws.fulfillmentorderline': {
'Meta': {'object_name': 'FulfillmentOrderLine'},
'fulfillment_order': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'fulfillment_lines'", 'to': "orm['oscar_mws.FulfillmentOrder']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'line': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'fulfillment_line'", 'unique': 'True', 'to': "orm['order.Line']"}),
'order_item_id': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'package': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'order_lines'", 'null': 'True', 'to': "orm['oscar_mws.ShipmentPackage']"}),
'shipment': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'order_lines'", 'null': 'True', 'to': "orm['oscar_mws.FulfillmentShipment']"})
},
'oscar_mws.fulfillmentshipment': {
'Meta': {'object_name': 'FulfillmentShipment'},
'date_estimated_arrival': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'date_shipped': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'fulfillment_center_id': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'order': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'fulfillment_shipments'", 'to': "orm['order.Order']"}),
'shipment_events': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'fulfillment_shipments'", 'symmetrical': 'False', 'to': "orm['order.ShippingEvent']"}),
'shipment_id': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'status': ('django.db.models.fields.CharField', [], {'max_length': '24'})
},
'oscar_mws.shipmentpackage': {
'Meta': {'object_name': 'ShipmentPackage'},
'carrier_code': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'fulfillment_shipment': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'packages'", 'to': "orm['oscar_mws.FulfillmentShipment']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'package_number': ('django.db.models.fields.IntegerField', [], {}),
'tracking_number': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
'partner.partner': {
'Meta': {'object_name': 'Partner'},
'code': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '128'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'}),
'users': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'partners'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['auth.User']"})
},
'sites.site': {
'Meta': {'ordering': "('domain',)", 'object_name': 'Site', 'db_table': "'django_site'"},
'domain': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
}
}
complete_apps = ['oscar_mws'] | bsd-3-clause | -6,517,976,486,046,583,000 | 85.568579 | 222 | 0.563046 | false |
stvstnfrd/edx-platform | pavelib/paver_tests/test_paver_get_quality_reports.py | 1 | 1506 | """
Tests to ensure only the report files we want are returned as part of run_quality.
"""
import unittest
from mock import patch
import pavelib.quality
class TestGetReportFiles(unittest.TestCase):
"""
Ensure only the report files we want are returned as part of run_quality.
"""
@patch('os.walk')
def test_get_pylint_reports(self, my_mock):
my_mock.return_value = iter([
('/foo', (None,), ('pylint.report',)),
('/bar', ('/baz',), ('pylint.report',))
])
reports = pavelib.quality.get_violations_reports("pylint")
assert len(reports) == 2
@patch('os.walk')
def test_get_pep8_reports(self, my_mock):
my_mock.return_value = iter([
('/foo', (None,), ('pep8.report',)),
('/bar', ('/baz',), ('pep8.report',))
])
reports = pavelib.quality.get_violations_reports("pep8")
assert len(reports) == 2
@patch('os.walk')
def test_get_pep8_reports_noisy(self, my_mock):
""" Several conditions: different report types, different files, multiple files """
my_mock.return_value = iter([
('/foo', (None,), ('pep8.report',)),
('/fooz', ('/ball',), ('pylint.report',)),
('/fooz', ('/ball',), ('non.report',)),
('/fooz', ('/ball',), ('lms.xml',)),
('/bar', ('/baz',), ('pep8.report',))
])
reports = pavelib.quality.get_violations_reports("pep8")
assert len(reports) == 2
| agpl-3.0 | -206,932,454,560,561,900 | 30.375 | 91 | 0.543825 | false |
n3wb13/OpenNfrGui-5.0-1 | lib/python/Components/VolumeControl.py | 1 | 2495 | from enigma import eDVBVolumecontrol, eTimer
from Tools.Profile import profile
from Screens.Volume import Volume
from Screens.Mute import Mute
from GlobalActions import globalActionMap
from config import config, ConfigSubsection, ConfigInteger
profile("VolumeControl")
#TODO .. move this to a own .py file
class VolumeControl:
instance = None
"""Volume control, handles volUp, volDown, volMute actions and display
a corresponding dialog"""
def __init__(self, session):
global globalActionMap
globalActionMap.actions["volumeUp"]=self.volUp
globalActionMap.actions["volumeDown"]=self.volDown
globalActionMap.actions["volumeMute"]=self.volMute
assert not VolumeControl.instance, "only one VolumeControl instance is allowed!"
VolumeControl.instance = self
config.audio = ConfigSubsection()
config.audio.volume = ConfigInteger(default = 50, limits = (0, 100))
self.volumeDialog = session.instantiateDialog(Volume)
self.volumeDialog.setAnimationMode(0)
self.muteDialog = session.instantiateDialog(Mute)
self.muteDialog.setAnimationMode(0)
self.hideVolTimer = eTimer()
self.hideVolTimer.callback.append(self.volHide)
vol = config.audio.volume.getValue()
self.volumeDialog.setValue(vol)
self.volctrl = eDVBVolumecontrol.getInstance()
self.volctrl.setVolume(vol, vol)
def volSave(self):
if self.volctrl.isMuted():
config.audio.volume.setValue(0)
else:
config.audio.volume.setValue(self.volctrl.getVolume())
config.audio.volume.save()
def volUp(self):
self.setVolume(+1)
def volDown(self):
self.setVolume(-1)
def setVolume(self, direction):
oldvol = self.volctrl.getVolume()
if direction > 0:
self.volctrl.volumeUp()
else:
self.volctrl.volumeDown()
is_muted = self.volctrl.isMuted()
vol = self.volctrl.getVolume()
self.volumeDialog.show()
if is_muted:
self.volMute() # unmute
elif not vol:
self.volMute(False, True) # mute but dont show mute symbol
if self.volctrl.isMuted():
self.volumeDialog.setValue(0)
else:
self.volumeDialog.setValue(self.volctrl.getVolume())
self.volSave()
self.hideVolTimer.start(3000, True)
def volHide(self):
self.volumeDialog.hide()
def volMute(self, showMuteSymbol=True, force=False):
vol = self.volctrl.getVolume()
if vol or force:
self.volctrl.volumeToggleMute()
if self.volctrl.isMuted():
if showMuteSymbol:
self.muteDialog.show()
self.volumeDialog.setValue(0)
else:
self.muteDialog.hide()
self.volumeDialog.setValue(vol)
| gpl-2.0 | 2,756,066,974,094,087,000 | 28.352941 | 82 | 0.748297 | false |
BrAwnyTime/RayTracer | Textures/makeScaledTex.py | 1 | 1585 | import numpy as np
import time
import tables
import sys
'''---------------------------------------------------------'''
''' Setup PyTables Files '''
'''---------------------------------------------------------'''
scale = 2
originalName = "earthScaled8"
scaledName = "earthScaled16"
h5tex = tables.open_file("/home/brad/rayTracer/Textures/textures.h5", mode = 'a', title = "HDF5 Texture File")
og = h5tex.getNode(h5tex.root, name=originalName)
texWidth = og.shape[1] / 3
texHeight = og.shape[0]
scaledWidth = texWidth/scale
scaledHeight = texHeight/scale
scaled = np.zeros((scaledHeight, scaledWidth * 3))
str_time = time.time()
curPercent = 0
lastPercent = 0
for y in range(0, scaledHeight):
for x in range(0, scaledWidth):
scaledValue = np.zeros(3)
t_y = y * scale
t_x = x * scale
curPercent = np.floor((((y*scaledWidth)+(x+1))/float(scaledWidth*scaledHeight))*1000) / 10.0
if (curPercent > lastPercent):
lastPercent = curPercent
cur_sec = time.time() - str_time
sys.stdout.write("\rScale Texture %.1f%% [%ds]" % (curPercent, cur_sec))
sys.stdout.flush()
for iy in range(0, scale):
for ix in range(0, scale):
scaledValue += og[t_y + iy, (3 * (t_x + ix)):(3 * (t_x + ix)) + 3]
scaledValue = scaledValue / float(scale**2)
scaled[y, (3 * x):(3 * x) + 3] = scaledValue
earthsmall = h5tex.create_array(h5tex.root, scaledName, scaled, "Scaled texture map of the Earth's surface")
h5tex.close()
| mit | 3,638,641,487,451,038,000 | 27.818182 | 110 | 0.557098 | false |
pattarapol-iamngamsup/projecteuler_python | problem_011.py | 1 | 6237 | """ Copyright 2012, July 31
Written by Pattarapol (Cheer) Iamngamsup
E-mail: [email protected]
Largest product in a grid
Problem 11
In the 20 X 20 grid below, four numbers
along a diagonal line have been marked in red.
08 02 22 97 38 15 00 40 00 75 04 05 07 78 52 12 50 77 91 08
49 49 99 40 17 81 18 57 60 87 17 40 98 43 69 48 04 56 62 00
81 49 31 73 55 79 14 29 93 71 40 67 53 88 30 03 49 13 36 65
52 70 95 23 04 60 11 42 69 24 68 56 01 32 56 71 37 02 36 91
22 31 16 71 51 67 63 89 41 92 36 54 22 40 40 28 66 33 13 80
24 47 32 60 99 03 45 02 44 75 33 53 78 36 84 20 35 17 12 50
32 98 81 28 64 23 67 10 26 38 40 67 59 54 70 66 18 38 64 70
67 26 20 68 02 62 12 20 95 63 94 39 63 08 40 91 66 49 94 21
24 55 58 05 66 73 99 26 97 17 78 78 96 83 14 88 34 89 63 72
21 36 23 09 75 00 76 44 20 45 35 14 00 61 33 97 34 31 33 95
78 17 53 28 22 75 31 67 15 94 03 80 04 62 16 14 09 53 56 92
16 39 05 42 96 35 31 47 55 58 88 24 00 17 54 24 36 29 85 57
86 56 00 48 35 71 89 07 05 44 44 37 44 60 21 58 51 54 17 58
19 80 81 68 05 94 47 69 28 73 92 13 86 52 17 77 04 89 55 40
04 52 08 83 97 35 99 16 07 97 57 32 16 26 26 79 33 27 98 66
88 36 68 87 57 62 20 72 03 46 33 67 46 55 12 32 63 93 53 69
04 42 16 73 38 25 39 11 24 94 72 18 08 46 29 32 40 62 76 36
20 69 36 41 72 30 23 88 34 62 99 69 82 67 59 85 74 04 36 16
20 73 35 29 78 31 90 01 74 31 49 71 48 86 81 16 23 57 05 54
01 70 54 71 83 51 54 69 16 92 33 48 61 43 52 01 89 19 67 48
The product of these numbers is 26 63 78 14 = 1788696.
What is the greatest product of four adjacent numbers in any direction
(up, down, left, right, or diagonally) in the 20 X 20 grid?
"""
#################################################
# Importing libraries & modules
import datetime
#################################################
# Global variables
ADJACENT_NUM = 4
ROW_NUM = 20
COL_NUM = 20
GridNumberStr = '08 02 22 97 38 15 00 40 00 75 04 05 07 78 52 12 50 77 91 08 '
GridNumberStr += '49 49 99 40 17 81 18 57 60 87 17 40 98 43 69 48 04 56 62 00 '
GridNumberStr += '81 49 31 73 55 79 14 29 93 71 40 67 53 88 30 03 49 13 36 65 '
GridNumberStr += '52 70 95 23 04 60 11 42 69 24 68 56 01 32 56 71 37 02 36 91 '
GridNumberStr += '22 31 16 71 51 67 63 89 41 92 36 54 22 40 40 28 66 33 13 80 '
GridNumberStr += '24 47 32 60 99 03 45 02 44 75 33 53 78 36 84 20 35 17 12 50 '
GridNumberStr += '32 98 81 28 64 23 67 10 26 38 40 67 59 54 70 66 18 38 64 70 '
GridNumberStr += '67 26 20 68 02 62 12 20 95 63 94 39 63 08 40 91 66 49 94 21 '
GridNumberStr += '24 55 58 05 66 73 99 26 97 17 78 78 96 83 14 88 34 89 63 72 '
GridNumberStr += '21 36 23 09 75 00 76 44 20 45 35 14 00 61 33 97 34 31 33 95 '
GridNumberStr += '78 17 53 28 22 75 31 67 15 94 03 80 04 62 16 14 09 53 56 92 '
GridNumberStr += '16 39 05 42 96 35 31 47 55 58 88 24 00 17 54 24 36 29 85 57 '
GridNumberStr += '86 56 00 48 35 71 89 07 05 44 44 37 44 60 21 58 51 54 17 58 '
GridNumberStr += '19 80 81 68 05 94 47 69 28 73 92 13 86 52 17 77 04 89 55 40 '
GridNumberStr += '04 52 08 83 97 35 99 16 07 97 57 32 16 26 26 79 33 27 98 66 '
GridNumberStr += '88 36 68 87 57 62 20 72 03 46 33 67 46 55 12 32 63 93 53 69 '
GridNumberStr += '04 42 16 73 38 25 39 11 24 94 72 18 08 46 29 32 40 62 76 36 '
GridNumberStr += '20 69 36 41 72 30 23 88 34 62 99 69 82 67 59 85 74 04 36 16 '
GridNumberStr += '20 73 35 29 78 31 90 01 74 31 49 71 48 86 81 16 23 57 05 54 '
GridNumberStr += '01 70 54 71 83 51 54 69 16 92 33 48 61 43 52 01 89 19 67 48 '
#################################################
# Functions
#################################################
# Classes
#################################################
# Main function
def main():
numberStrList = GridNumberStr.split()
numList = list()
for index in range( 0, len( numberStrList ) ):
numList.append( int( numberStrList[index] ) )
greatestProduct = 0
adjacentProduct = 0
for i in range( 0, ROW_NUM ):
for j in range( 0, COL_NUM ):
# left to right
if j + ( ADJACENT_NUM - 1 ) < COL_NUM:
adjacentProduct = ( numList[ ROW_NUM * i + j ]
* numList[ ROW_NUM * i + j + 1 ]
* numList[ ROW_NUM * i + j + 2 ]
* numList[ ROW_NUM * i + j + 3 ] )
if adjacentProduct > greatestProduct:
greatestProduct = adjacentProduct
########################
# up to down
if i + ( ADJACENT_NUM - 1 ) < ROW_NUM:
adjacentProduct = ( numList[ ROW_NUM * i + j ]
* numList[ ROW_NUM * i + 1 + j ]
* numList[ ROW_NUM * i + 2 + j ]
* numList[ ROW_NUM * i + 3 + j ] )
if adjacentProduct > greatestProduct:
greatestProduct = adjacentProduct
########################
# diagnal left to right
if j + ( ADJACENT_NUM - 1 ) < COL_NUM \
and i + ( ADJACENT_NUM - 1) < ROW_NUM:
adjacentProduct = ( numList[ ROW_NUM * i + j ]
* numList[ ROW_NUM * ( i + 1 ) + j + 1 ]
* numList[ ROW_NUM * ( i + 2 ) + j + 2 ]
* numList[ ROW_NUM * ( i + 3 ) + j + 3 ] )
if adjacentProduct > greatestProduct:
greatestProduct = adjacentProduct
########################
# diagnal right to left
if j - ( ADJACENT_NUM - 1 ) > 0 \
and i + ( ADJACENT_NUM - 1 ) < ROW_NUM:
adjacentProduct = ( numList[ ROW_NUM * i + j ]
* numList[ ROW_NUM * ( i + 1 ) + j - 1 ]
* numList[ ROW_NUM * ( i + 2 ) + j - 2 ]
* numList[ ROW_NUM * ( i + 3 ) + j - 3 ] )
if adjacentProduct > greatestProduct:
greatestProduct = adjacentProduct
print( 'answer = {0}'.format( greatestProduct ) )
#################################################
# Main execution
if __name__ == '__main__':
# get starting date time
startingDateTime = datetime.datetime.utcnow()
print( 'startingDateTime = {0} UTC'.format( startingDateTime ) )
# call main function
main()
# get ending date time
endingdateTime = datetime.datetime.utcnow()
print( 'endingdateTime = {0} UTC'.format( endingdateTime ) )
# compute delta date time
deltaDateTime = endingdateTime - startingDateTime
print( 'deltaDateTime = {0}'.format( deltaDateTime ) )
| gpl-3.0 | 6,276,150,481,619,951,000 | 40.141892 | 79 | 0.575758 | false |
Azure/azure-sdk-for-python | sdk/network/azure-mgmt-network/azure/mgmt/network/v2020_07_01/operations/_available_delegations_operations.py | 1 | 5366 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import TYPE_CHECKING
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpRequest, HttpResponse
from azure.mgmt.core.exceptions import ARMErrorFormat
from .. import models as _models
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
class AvailableDelegationsOperations(object):
"""AvailableDelegationsOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2020_07_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def list(
self,
location, # type: str
**kwargs # type: Any
):
# type: (...) -> Iterable["_models.AvailableDelegationsResult"]
"""Gets all of the available subnet delegations for this subscription in this region.
:param location: The location of the subnet.
:type location: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either AvailableDelegationsResult or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.network.v2020_07_01.models.AvailableDelegationsResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.AvailableDelegationsResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-07-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'location': self._serialize.url("location", location, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('AvailableDelegationsResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Network/locations/{location}/availableDelegations'} # type: ignore
| mit | 7,378,669,600,206,450,000 | 44.863248 | 148 | 0.641446 | false |
semk/voldemort | voldemort/__init__.py | 1 | 19088 | # -*- coding: utf-8 -*-
#
# Voldemort: A static site generator using Jinja2 and Markdown templates
#
# @author: Sreejith K
# Created On 19th Sep 2011
import os
import sys
import logging
import datetime
import shutil
import urllib
from optparse import OptionParser
import BaseHTTPServer
from SimpleHTTPServer import SimpleHTTPRequestHandler
import template
import config
import util
import paginator
log = logging.getLogger('voldemort')
FEED_TEMPLATE = ("<?xml version=\"1.0\" encoding=\"utf-8\"?>\n"
"<feed xmlns=\"http://www.w3.org/2005/Atom\">\n"
"\n"
" <title>{{ site.name }}</title>\n"
" <link href=\"{{ site.address }}/atom.xml\" rel=\"self\"/>\n"
" <link href=\"{{ site.address }}\"/>\n"
" <updated>{{ site.time | date_to_xmlschema }}</updated>\n"
" <id>{{ site.id }}</id>\n"
" <author>\n"
" <name>{{ site.author_name }}</name>\n"
" <email>{{ site.author_email }}</email>\n"
" </author>\n"
"\n"
" {% for post in posts %}\n"
" <entry>\n"
" <title>{{ post.title }}</title>\n"
" <link href=\"{{ site.address }}{{ post.url }}\"/>\n"
" <updated>{{ post.date | date_to_xmlschema }}</updated>\n"
" <id>{{ site.address }}{{ post.id }}</id>\n"
" <content type=\"html\">{{ post.content | xml_escape }}</content>\n"
" </entry>\n"
" {% endfor %}\n"
"\n"
"</feed>\n"
)
SITE_MAP = ("<?xml version='1.0' encoding='UTF-8'?>\n"
"<urlset xmlns=\"http://www.sitemaps.org/schemas/sitemap/0.9\"\n"
" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"\n"
" xsi:schemaLocation=\"http://www.sitemaps.org/schemas/sitemap/0.9\n"
" http://www.sitemaps.org/schemas/sitemap/0.9/sitemap.xsd\">\n"
"\n"
" {% for page in pages %}\n"
" <url>\n"
" <loc>{{ page.url }}</loc>\n"
" <lastmod>{{ site.time | date_to_xmlschema }}</lastmod>\n"
" </url>\n"
" {% endfor %}\n"
"\n"
" {% for post in posts %}\n"
" <url>\n"
" <loc>{{ post.url }}</loc>\n"
" <lastmod>{{ post.date | date_to_xmlschema }}</lastmod>\n"
" </url>\n"
" {% endfor %}\n"
"\n"
"</urlset>\n"
)
class Voldemort(object):
"""Provides all the functionalities like meta-data parsing
and site generation.
"""
template_extensions = [
'.htm', '.html', '.md', '.markdown',
'.jinja', '.jinja2', '.txt', '.xml']
preserved_extensions = ['.txt', '.xml']
date_format = '%d-%m-%Y'
def __init__(self, work_dir, conf):
self.work_dir = work_dir
self.config = conf
self.logfile = os.path.join(self.work_dir, 'voldemort.log')
self.tag_template = os.path.join(self.work_dir, 'tag.html')
util.setup_logging(self.logfile, logging.DEBUG)
log.info('Voldemort working at %s' % self.work_dir)
template.setup_template_dirs(self.config.layout_dirs)
template.setup_filters()
# ignore the following directories
self.ignored_items = [
self.config.posts_dir,
self.config.site_dir,
self.logfile,
self.tag_template,
os.path.join(self.work_dir, '.git'),
os.path.join(self.work_dir, '.DS_Store')
] + self.config.layout_dirs
log.debug('The following list of directories/files will be ignored: %s'
% ', '.join(self.ignored_items))
self.posts = []
self.tags = {}
self.pages = []
def init(self):
"""(Re)create the site directory.
"""
if not os.path.exists(self.config.site_dir):
log.debug('Creating %s' % self.config.site_dir)
os.mkdir(self.config.site_dir)
def serve(self, port):
"""Run an HTTPServer on the given port under the working directory.
"""
# change to site directory
os.chdir(self.config.site_dir)
# start httpd on port
server_address = ('', port)
SimpleHTTPRequestHandler.protocol_version = 'HTTP/1.0'
httpd = BaseHTTPServer.HTTPServer(
server_address,
SimpleHTTPRequestHandler)
sa = httpd.socket.getsockname()
log.info('Serving HTTP on %s port %s ...' % (sa[0], sa[1]))
try:
httpd.serve_forever()
except KeyboardInterrupt:
log.info('Stopping httpd...')
httpd.socket.close()
def deploy(self, username, server_address, directory):
"""Deploy this website to the server
"""
if server_address == 'github.com':
log.info('Pushing updates to %s' % directory)
if not os.path.exists(os.path.join(self.config.site_dir, '.git')):
# initialize github page as a submodule
log.info('Adding [email protected]:%s/%s.git as a submodule'
% (username, directory))
os.system('git submodule add [email protected]:%s/%s.git %s'
% (username, directory, self.config.site_dir))
os.system('cd %s && git add -A && git commit -am "Updated on %s" && git push origin master'
% (self.config.site_dir, datetime.datetime.now()))
return
if directory.startswith('~') or directory.startswith('.'):
directory = directory.replace(directory[0], '/home/%s' % username)
log.info('Deploying site at %s@%s:%s'
% (username, server_address, directory))
try:
os.system('rsync -rtzh --progress --delete %s/ %s@%s:%s/'
% (self.config.site_dir,
username,
server_address,
directory))
except:
log.error('Deployment failed.')
def get_page_name_for_site(self, filename, extn='.html'):
"""Changes the file extension to html if needed.
"""
directory, base = os.path.split(filename)
name, ext = os.path.splitext(base)
if ext not in self.template_extensions \
or ext in self.preserved_extensions:
return filename
if directory == self.config.site_dir and name == 'index':
name = name + extn
else:
name = os.path.join(name, 'index' + extn)
return os.path.join(directory, name)
def parse_meta_data(self):
"""Parses the meta data from posts
"""
for post in os.listdir(self.config.posts_dir):
# ignore hidden files
if post.startswith('.'):
continue
post = os.path.join(self.config.posts_dir, post)
post_meta = template.get_meta_data(post)
post_meta['date'] = datetime.datetime.strptime(
post_meta['date'],
self.date_format)
post_url = os.path.join(
'/',
post_meta['date'].strftime(
self.config.post_url),
os.path.splitext(
post_meta['filename'].split(
self.config.posts_dir)[1][1:])[0])
post_meta['url'] = post_url
self.posts.append(post_meta)
# create tag cloud
for tag in post_meta.get('tags', []):
if tag in self.tags:
self.tags[tag].append(post_meta)
else:
self.tags[tag] = [post_meta]
# sort posts based on date.
self.posts.sort(key=lambda x: x['date'], reverse=True)
# sort tags based on date
for tagname in self.tags:
self.tags[tagname].sort(key=lambda x: x['date'], reverse=True)
# include next and previous urls for posts. includes post tags
for post_num, post in enumerate(self.posts):
post_tags = []
for tagname in post.get('tags', []):
tag_url = os.path.join(
'/',
'tag',
urllib.quote_plus(tagname.lower()))
post_tags.append(
{'name': tagname,
'url': tag_url,
'posts': self.tags[tagname]})
post['tags'] = post_tags
post['id'] = post['url']
previous = post_num + 1
next = post_num - 1
if previous < len(self.posts):
post['previous'] = self.posts[previous]
else:
post['previous'] = None
if next >= 0:
post['next'] = self.posts[next]
else:
post['next'] = None
# tags for env
tags_info = []
for tagname, post in self.tags.iteritems():
tag_url = os.path.join(
'/',
'tag',
urllib.quote_plus(tagname.lower()))
tags_info.append(
{'name': tagname,
'url': tag_url,
'posts': self.tags[tagname]})
# create paginator
self.paginator = paginator.Paginator(self.posts, self.config.paginate)
# create site information
site = {'time': datetime.datetime.now()}
# extract site information from settings.yaml
site.update(getattr(self.config, 'site', {}))
# update the template global with posts info
template.env.globals.update(
{'posts': self.posts,
'site': site,
'tags': tags_info})
def paginate(self, filename, page_meta):
"""Paginate the content in the file
"""
log.info('Paginating page %s' % filename)
for pgr in self.paginator:
log.debug('Paginating: %s' % pgr)
html = template.render(
page_meta['raw'],
{'page': page_meta, 'paginator': pgr})
if pgr.current_page == 1:
paginator_path = os.path.join(self.config.site_dir,
filename.split(self.work_dir)[1][1:])
log.debug('Generating page %s' % paginator_path)
write_html(paginator_path, html)
current_page = 'page%s' % pgr.current_page
site_path, ext = os.path.splitext(
filename.split(self.work_dir)[1][1:])
if site_path == 'index':
site_path = ''
paginator_path = os.path.join(
self.config.site_dir,
site_path,
current_page,
'index.html')
log.debug('Generating page %s' % paginator_path)
# write the rendered page
write_html(paginator_path, html)
def generate_posts(self):
"""Generate the posts from the posts directory
"""
log.info('Generating posts from %s' % self.config.posts_dir)
for post in self.posts:
html = template.render(
post['raw'],
{'post': post, 'page': post})
# construct the url to the post
post_url = os.path.join(self.config.site_dir, post['url'][1:])
post_file = os.path.join(post_url, 'index.html')
log.debug('Generating post: %s' % post_file)
# write the html
write_html(post_file, html)
def generate_pages(self):
"""Generate HTML from all the other pages.
"""
log.info('Generating pages')
for root, dirs, files in os.walk(self.work_dir):
# checks whether the directory is as subdirectory of root
def is_a_subdirectory(sub):
return sub in root
# ignore all the subdirectories
if any(map(is_a_subdirectory, self.ignored_items)):
continue
for filename in files:
# ignore hidden files
if filename.startswith('.'):
continue
filename = os.path.join(root, filename)
# ignore tag template
if filename == self.tag_template:
continue
_, extn = os.path.splitext(filename)
if extn not in self.template_extensions:
dest = os.path.join(self.config.site_dir,
filename.split(self.work_dir)[1][1:])
move_to_site(filename, dest)
continue
page_meta = template.get_meta_data(filename)
page_url = os.path.join(
'/',
os.path.splitext(
page_meta['filename'].split(self.work_dir)[1][1:])[0])
page_meta['url'] = page_url
self.pages.append(page_meta)
# paginate if needed
if page_meta.get('paginate', False):
self.paginate(filename, page_meta)
continue
html = template.render(page_meta['raw'], {'page': page_meta})
page_path = os.path.join(
self.config.site_dir,
filename.split(self.work_dir)[1][1:])
page_path = self.get_page_name_for_site(page_path)
log.debug('Generating page %s' % page_path)
# write the rendered page
write_html(page_path, html)
def generate_tags(self):
"""Generate tag pages.
"""
log.info('Generating tags')
tag_template_meta = template.get_meta_data(
os.path.join(self.work_dir, self.tag_template))
for tagname, posts in self.tags.iteritems():
render_vars = {'tag': {'name': tagname, 'posts': posts},
'page': tag_template_meta}
html = template.render(tag_template_meta['raw'], render_vars)
tag_page_path = os.path.join(
self.config.site_dir,
'tag',
urllib.quote_plus(tagname.lower()),
'index.html')
log.debug('Generating tag %s: %s' % (tagname, tag_page_path))
# write the html page
write_html(tag_page_path, html)
def generate_feed(self, filename='atom.xml'):
"""Generate Atom feed
"""
feed_path = os.path.join(self.config.site_dir, filename)
feed = template.render(FEED_TEMPLATE)
feed_path = self.get_page_name_for_site(feed_path)
log.info('Generating Atom feed at %s' % feed_path)
write_html(feed_path, feed)
def generate_sitemap(self, filename='sitemap.xml'):
map_path = os.path.join(self.config.site_dir, filename)
log.info('Generating sitemap at %s' % map_path)
sitemap = template.render(
SITE_MAP,
{'posts': self.posts, 'pages': self.pages})
write_html(map_path, sitemap)
def run(self, options):
"""Generate the site.
"""
self.init()
try:
if os.path.exists(self.config.posts_dir):
self.parse_meta_data()
else:
log.warning("No posts directory found. Ignoring posts.")
if self.posts and not options.skip_blog:
self.generate_posts()
if not options.skip_pages:
self.generate_pages()
if not options.skip_tags:
self.generate_tags()
if not options.skip_feeds:
self.generate_feed()
if not options.skip_sitemap:
self.generate_sitemap()
log.info('Done.')
except Exception as ex:
log.error('ERROR: %s. Refer %s for detailed information.'
% (str(ex), self.logfile)
)
log.debug('TRACEBACK: %r' % util.print_traceback())
def write_html(filename, data):
"""Write the html data to file.
"""
try:
os.makedirs(os.path.dirname(filename))
except OSError:
pass
with open(filename, 'w') as f:
f.write(data.encode('utf-8'))
def move_to_site(source, dest):
"""Move the file to the site.
"""
log.debug('Moving %s to %s' % (source, dest))
try:
os.makedirs(os.path.dirname(dest))
except OSError:
pass
shutil.copyfile(source, dest)
def main():
work_dir = os.path.abspath(os.getcwd())
# check for commandline options
usage = 'voldemort [options]'
parser = OptionParser(usage)
conf = config.load_config(work_dir)
parser.add_option(
'-s', '--serve',
action='store_true', help='Start the HTTP Server',
default=False)
parser.add_option(
'-p', '--port',
help='Port inwhich the HTTPServer should run',
type='int', default=8080)
parser.add_option(
'-d', '--deploy',
action='store_true', help='Deploy this website',
default=False)
parser.add_option(
'-u', '--user',
help='Login name for server',
default=conf.deploy.get('user') if hasattr(conf, 'deploy') else None)
parser.add_option(
'-a', '--at',
help='Server address to deploy the site',
default=conf.deploy.get('at') if hasattr(conf, 'deploy') else None)
parser.add_option(
'-t', '--to',
help='Deployment directory',
default=conf.deploy.get('to') if hasattr(conf, 'deploy') else None)
parser.add_option(
'--skip-blog',
action='store_true', help='Skip blog posts generation',
default=False)
parser.add_option(
'--skip-pages',
action='store_true', help='Skip pages generation',
default=False)
parser.add_option(
'--skip-tags',
action='store_true', help='Skip tags generation',
default=False)
parser.add_option(
'--skip-feeds',
action='store_true', help='Skip Atom feed generation',
default=False)
parser.add_option(
'--skip-sitemap',
action='store_true', help='Skip sitemap generation',
default=False)
# parse the options
(options, args) = parser.parse_args()
app = Voldemort(work_dir, conf)
# validate options
if options.serve:
app.serve(options.port)
elif options.deploy:
if not options.user or not options.at or not options.to:
print 'Operation is missing a few options.'
parser.print_help()
sys.exit(-2)
app.deploy(options.user, options.at, options.to)
else:
app.run(options)
if __name__ == '__main__':
main()
| apache-2.0 | 8,570,697,191,450,008,000 | 35.77842 | 103 | 0.508749 | false |
ThomasMcVay/MediaApp | MediaAppKnobs/KnobElements/FloatWidget.py | 1 | 1798 | #===============================================================================
# @Author: Madison Aster
# @ModuleDescription:
# @License:
# MediaApp Library - Python Package framework for developing robust Media
# Applications with Qt Library
# Copyright (C) 2013 Madison Aster
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License version 2.1 as published by the Free Software Foundation;
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#
# See LICENSE in the root directory of this library for copy of
# GNU Lesser General Public License and other license details.
#===============================================================================
from Qt import QtGui, QtCore, QtWidgets
class FloatWidget(QtWidgets.QLineEdit):
def __init__(self):
super(FloatWidget, self).__init__()
self.setSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Fixed)
self.setAlignment(QtCore.Qt.AlignLeft)
def setValue(self, value):
self.setText(str(value))
self.textChanged.emit
self.update()
def getValue(self):
return float(self.text())
def sizeHint(self):
return QtCore.QSize(150,16) | lgpl-2.1 | 156,117,284,344,290,800 | 41.902439 | 88 | 0.619577 | false |
ARM-software/astc-encoder | Test/astc_quality_test.py | 1 | 3498 | #!/usr/bin/env python3
# SPDX-License-Identifier: Apache-2.0
# -----------------------------------------------------------------------------
# Copyright 2021 Arm Limited
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy
# of the License at:
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# -----------------------------------------------------------------------------
"""
The ``astc_quality_test`` utility provides a tool to sweep quality settings.
"""
import numpy as np
import re
import subprocess as sp
import sys
def get_psnr_pattern():
return r"\s*PSNR \(LDR-RGB\):\s*([0-9.]*) dB"
def get_coding_rate_pattern():
return r"\s*Coding rate:\s*([0-9.]*) MT/s"
def parse_output(output):
# Regex pattern for image quality
patternPSNR = re.compile(get_psnr_pattern())
patternCRate = re.compile(get_coding_rate_pattern())
# Extract results from the log
runPSNR = None
runCRate = None
for line in output:
match = patternPSNR.match(line)
if match:
runPSNR = float(match.group(1))
match = patternCRate.match(line)
if match:
runCRate = float(match.group(1))
assert runPSNR is not None, "No coding PSNR found"
assert runCRate is not None, "No coding rate found"
return (runPSNR, runCRate)
def execute(command):
"""
Run a subprocess with the specified command.
Args:
command (list(str)): The list of command line arguments.
Returns:
list(str): The output log (stdout) split into lines.
"""
try:
result = sp.run(command, stdout=sp.PIPE, stderr=sp.PIPE,
check=True, universal_newlines=True)
except (OSError, sp.CalledProcessError):
print("ERROR: Test run failed")
print(" + %s" % " ".join(command))
qcommand = ["\"%s\"" % x for x in command]
print(" + %s" % ", ".join(qcommand))
sys.exit(1)
return result.stdout.splitlines()
def main():
"""
The main function.
Returns:
int: The process return code.
"""
for block in ("4x4", "5x5", "6x6", "8x8", "10x10"):
for quality in range (0, 101, 2):
resultsQ = []
resultsS = []
if (quality < 40):
repeats = 20
elif (quality < 75):
repeats = 10
else:
repeats = 5
for _ in range(0, repeats):
command = [
"./astcenc/astcenc-avx2",
"-tl",
"./Test/Images/Kodak/LDR-RGB/ldr-rgb-kodak23.png",
"/dev/null",
block,
"%s" % quality,
"-silent"
]
stdout = execute(command)
psnr, mts = parse_output(stdout)
resultsQ.append(psnr)
resultsS.append(mts)
print("%s, %u, %0.3f, %0.3f" % (block, quality, np.mean(resultsS), np.mean(resultsQ)))
return 0
if __name__ == "__main__":
sys.exit(main())
| apache-2.0 | -8,849,309,807,774,193,000 | 27.209677 | 98 | 0.540309 | false |
tensorflow/models | official/vision/image_classification/optimizer_factory.py | 1 | 6894 | # Copyright 2021 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Optimizer factory for vision tasks."""
from __future__ import absolute_import
from __future__ import division
# from __future__ import google_type_annotations
from __future__ import print_function
from typing import Any, Dict, Optional, Text
from absl import logging
import tensorflow as tf
import tensorflow_addons as tfa
from official.modeling import optimization
from official.vision.image_classification import learning_rate
from official.vision.image_classification.configs import base_configs
# pylint: disable=protected-access
def build_optimizer(
optimizer_name: Text,
base_learning_rate: tf.keras.optimizers.schedules.LearningRateSchedule,
params: Dict[Text, Any],
model: Optional[tf.keras.Model] = None):
"""Build the optimizer based on name.
Args:
optimizer_name: String representation of the optimizer name. Examples: sgd,
momentum, rmsprop.
base_learning_rate: `tf.keras.optimizers.schedules.LearningRateSchedule`
base learning rate.
params: String -> Any dictionary representing the optimizer params. This
should contain optimizer specific parameters such as `base_learning_rate`,
`decay`, etc.
model: The `tf.keras.Model`. This is used for the shadow copy if using
`ExponentialMovingAverage`.
Returns:
A tf.keras.Optimizer.
Raises:
ValueError if the provided optimizer_name is not supported.
"""
optimizer_name = optimizer_name.lower()
logging.info('Building %s optimizer with params %s', optimizer_name, params)
if optimizer_name == 'sgd':
logging.info('Using SGD optimizer')
nesterov = params.get('nesterov', False)
optimizer = tf.keras.optimizers.SGD(
learning_rate=base_learning_rate, nesterov=nesterov)
elif optimizer_name == 'momentum':
logging.info('Using momentum optimizer')
nesterov = params.get('nesterov', False)
optimizer = tf.keras.optimizers.SGD(
learning_rate=base_learning_rate,
momentum=params['momentum'],
nesterov=nesterov)
elif optimizer_name == 'rmsprop':
logging.info('Using RMSProp')
rho = params.get('decay', None) or params.get('rho', 0.9)
momentum = params.get('momentum', 0.9)
epsilon = params.get('epsilon', 1e-07)
optimizer = tf.keras.optimizers.RMSprop(
learning_rate=base_learning_rate,
rho=rho,
momentum=momentum,
epsilon=epsilon)
elif optimizer_name == 'adam':
logging.info('Using Adam')
beta_1 = params.get('beta_1', 0.9)
beta_2 = params.get('beta_2', 0.999)
epsilon = params.get('epsilon', 1e-07)
optimizer = tf.keras.optimizers.Adam(
learning_rate=base_learning_rate,
beta_1=beta_1,
beta_2=beta_2,
epsilon=epsilon)
elif optimizer_name == 'adamw':
logging.info('Using AdamW')
weight_decay = params.get('weight_decay', 0.01)
beta_1 = params.get('beta_1', 0.9)
beta_2 = params.get('beta_2', 0.999)
epsilon = params.get('epsilon', 1e-07)
optimizer = tfa.optimizers.AdamW(
weight_decay=weight_decay,
learning_rate=base_learning_rate,
beta_1=beta_1,
beta_2=beta_2,
epsilon=epsilon)
else:
raise ValueError('Unknown optimizer %s' % optimizer_name)
if params.get('lookahead', None):
logging.info('Using lookahead optimizer.')
optimizer = tfa.optimizers.Lookahead(optimizer)
# Moving average should be applied last, as it's applied at test time
moving_average_decay = params.get('moving_average_decay', 0.)
if moving_average_decay is not None and moving_average_decay > 0.:
if model is None:
raise ValueError(
'`model` must be provided if using `ExponentialMovingAverage`.')
logging.info('Including moving average decay.')
optimizer = optimization.ExponentialMovingAverage(
optimizer=optimizer, average_decay=moving_average_decay)
optimizer.shadow_copy(model)
return optimizer
def build_learning_rate(params: base_configs.LearningRateConfig,
batch_size: Optional[int] = None,
train_epochs: Optional[int] = None,
train_steps: Optional[int] = None):
"""Build the learning rate given the provided configuration."""
decay_type = params.name
base_lr = params.initial_lr
decay_rate = params.decay_rate
if params.decay_epochs is not None:
decay_steps = params.decay_epochs * train_steps
else:
decay_steps = 0
if params.warmup_epochs is not None:
warmup_steps = params.warmup_epochs * train_steps
else:
warmup_steps = 0
lr_multiplier = params.scale_by_batch_size
if lr_multiplier and lr_multiplier > 0:
# Scale the learning rate based on the batch size and a multiplier
base_lr *= lr_multiplier * batch_size
logging.info(
'Scaling the learning rate based on the batch size '
'multiplier. New base_lr: %f', base_lr)
if decay_type == 'exponential':
logging.info(
'Using exponential learning rate with: '
'initial_learning_rate: %f, decay_steps: %d, '
'decay_rate: %f', base_lr, decay_steps, decay_rate)
lr = tf.keras.optimizers.schedules.ExponentialDecay(
initial_learning_rate=base_lr,
decay_steps=decay_steps,
decay_rate=decay_rate,
staircase=params.staircase)
elif decay_type == 'stepwise':
steps_per_epoch = params.examples_per_epoch // batch_size
boundaries = [boundary * steps_per_epoch for boundary in params.boundaries]
multipliers = [batch_size * multiplier for multiplier in params.multipliers]
logging.info(
'Using stepwise learning rate. Parameters: '
'boundaries: %s, values: %s', boundaries, multipliers)
lr = tf.keras.optimizers.schedules.PiecewiseConstantDecay(
boundaries=boundaries, values=multipliers)
elif decay_type == 'cosine_with_warmup':
lr = learning_rate.CosineDecayWithWarmup(
batch_size=batch_size,
total_steps=train_epochs * train_steps,
warmup_steps=warmup_steps)
if warmup_steps > 0:
if decay_type not in ['cosine_with_warmup']:
logging.info('Applying %d warmup steps to the learning rate',
warmup_steps)
lr = learning_rate.WarmupDecaySchedule(
lr, warmup_steps, warmup_lr=base_lr)
return lr
| apache-2.0 | 6,286,244,039,494,606,000 | 36.879121 | 80 | 0.687554 | false |
mahabs/nitro | nssrc/com/citrix/netscaler/nitro/resource/config/cs/cspolicylabel_cspolicy_binding.py | 1 | 9664 | #
# Copyright (c) 2008-2015 Citrix Systems, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License")
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from nssrc.com.citrix.netscaler.nitro.resource.base.base_resource import base_resource
from nssrc.com.citrix.netscaler.nitro.resource.base.base_resource import base_response
from nssrc.com.citrix.netscaler.nitro.service.options import options
from nssrc.com.citrix.netscaler.nitro.exception.nitro_exception import nitro_exception
from nssrc.com.citrix.netscaler.nitro.util.nitro_util import nitro_util
class cspolicylabel_cspolicy_binding(base_resource) :
""" Binding class showing the cspolicy that can be bound to cspolicylabel.
"""
def __init__(self) :
self._policyname = ""
self._priority = 0
self._targetvserver = ""
self._gotopriorityexpression = ""
self._invoke = False
self._labeltype = ""
self._invoke_labelname = ""
self._labelname = ""
self.___count = 0
@property
def priority(self) :
"""Specifies the priority of the policy.
"""
try :
return self._priority
except Exception as e:
raise e
@priority.setter
def priority(self, priority) :
"""Specifies the priority of the policy.
"""
try :
self._priority = priority
except Exception as e:
raise e
@property
def gotopriorityexpression(self) :
"""Expression specifying the priority of the next policy which will get evaluated if the current policy rule evaluates to TRUE.
"""
try :
return self._gotopriorityexpression
except Exception as e:
raise e
@gotopriorityexpression.setter
def gotopriorityexpression(self, gotopriorityexpression) :
"""Expression specifying the priority of the next policy which will get evaluated if the current policy rule evaluates to TRUE.
"""
try :
self._gotopriorityexpression = gotopriorityexpression
except Exception as e:
raise e
@property
def policyname(self) :
"""Name of the content switching policy.
"""
try :
return self._policyname
except Exception as e:
raise e
@policyname.setter
def policyname(self, policyname) :
"""Name of the content switching policy.
"""
try :
self._policyname = policyname
except Exception as e:
raise e
@property
def targetvserver(self) :
"""Name of the virtual server to which to forward requests that match the policy.
"""
try :
return self._targetvserver
except Exception as e:
raise e
@targetvserver.setter
def targetvserver(self, targetvserver) :
"""Name of the virtual server to which to forward requests that match the policy.
"""
try :
self._targetvserver = targetvserver
except Exception as e:
raise e
@property
def labeltype(self) :
"""Type of policy label invocation.<br/>Possible values = policylabel.
"""
try :
return self._labeltype
except Exception as e:
raise e
@labeltype.setter
def labeltype(self, labeltype) :
"""Type of policy label invocation.<br/>Possible values = policylabel
"""
try :
self._labeltype = labeltype
except Exception as e:
raise e
@property
def labelname(self) :
"""Name of the policy label to which to bind a content switching policy.
"""
try :
return self._labelname
except Exception as e:
raise e
@labelname.setter
def labelname(self, labelname) :
"""Name of the policy label to which to bind a content switching policy.
"""
try :
self._labelname = labelname
except Exception as e:
raise e
@property
def invoke_labelname(self) :
"""Name of the label to invoke if the current policy rule evaluates to TRUE.
"""
try :
return self._invoke_labelname
except Exception as e:
raise e
@invoke_labelname.setter
def invoke_labelname(self, invoke_labelname) :
"""Name of the label to invoke if the current policy rule evaluates to TRUE.
"""
try :
self._invoke_labelname = invoke_labelname
except Exception as e:
raise e
@property
def invoke(self) :
try :
return self._invoke
except Exception as e:
raise e
@invoke.setter
def invoke(self, invoke) :
try :
self._invoke = invoke
except Exception as e:
raise e
def _get_nitro_response(self, service, response) :
""" converts nitro response into object and returns the object array in case of get request.
"""
try :
result = service.payload_formatter.string_to_resource(cspolicylabel_cspolicy_binding_response, response, self.__class__.__name__)
if(result.errorcode != 0) :
if (result.errorcode == 444) :
service.clear_session(self)
if result.severity :
if (result.severity == "ERROR") :
raise nitro_exception(result.errorcode, str(result.message), str(result.severity))
else :
raise nitro_exception(result.errorcode, str(result.message), str(result.severity))
return result.cspolicylabel_cspolicy_binding
except Exception as e :
raise e
def _get_object_name(self) :
""" Returns the value of object identifier argument
"""
try :
if (self.labelname) :
return str(self.labelname)
return None
except Exception as e :
raise e
@classmethod
def add(cls, client, resource) :
try :
if resource and type(resource) is not list :
updateresource = cspolicylabel_cspolicy_binding()
updateresource.labelname = resource.labelname
updateresource.policyname = resource.policyname
updateresource.targetvserver = resource.targetvserver
updateresource.gotopriorityexpression = resource.gotopriorityexpression
updateresource.invoke = resource.invoke
updateresource.labeltype = resource.labeltype
updateresource.invoke_labelname = resource.invoke_labelname
return updateresource.update_resource(client)
else :
if resource and len(resource) > 0 :
updateresources = [cspolicylabel_cspolicy_binding() for _ in range(len(resource))]
for i in range(len(resource)) :
updateresources[i].labelname = resource[i].labelname
updateresources[i].policyname = resource[i].policyname
updateresources[i].targetvserver = resource[i].targetvserver
updateresources[i].gotopriorityexpression = resource[i].gotopriorityexpression
updateresources[i].invoke = resource[i].invoke
updateresources[i].labeltype = resource[i].labeltype
updateresources[i].invoke_labelname = resource[i].invoke_labelname
return cls.update_bulk_request(client, updateresources)
except Exception as e :
raise e
@classmethod
def delete(cls, client, resource) :
try :
if resource and type(resource) is not list :
deleteresource = cspolicylabel_cspolicy_binding()
deleteresource.labelname = resource.labelname
deleteresource.policyname = resource.policyname
return deleteresource.delete_resource(client)
else :
if resource and len(resource) > 0 :
deleteresources = [cspolicylabel_cspolicy_binding() for _ in range(len(resource))]
for i in range(len(resource)) :
deleteresources[i].labelname = resource[i].labelname
deleteresources[i].policyname = resource[i].policyname
return cls.delete_bulk_request(client, deleteresources)
except Exception as e :
raise e
@classmethod
def get(cls, service, labelname) :
""" Use this API to fetch cspolicylabel_cspolicy_binding resources.
"""
try :
obj = cspolicylabel_cspolicy_binding()
obj.labelname = labelname
response = obj.get_resources(service)
return response
except Exception as e:
raise e
@classmethod
def get_filtered(cls, service, labelname, filter_) :
""" Use this API to fetch filtered set of cspolicylabel_cspolicy_binding resources.
Filter string should be in JSON format.eg: "port:80,servicetype:HTTP".
"""
try :
obj = cspolicylabel_cspolicy_binding()
obj.labelname = labelname
option_ = options()
option_.filter = filter_
response = obj.getfiltered(service, option_)
return response
except Exception as e:
raise e
@classmethod
def count(cls, service, labelname) :
""" Use this API to count cspolicylabel_cspolicy_binding resources configued on NetScaler.
"""
try :
obj = cspolicylabel_cspolicy_binding()
obj.labelname = labelname
option_ = options()
option_.count = True
response = obj.get_resources(service, option_)
if response :
return response[0].__dict__['___count']
return 0
except Exception as e:
raise e
@classmethod
def count_filtered(cls, service, labelname, filter_) :
""" Use this API to count the filtered set of cspolicylabel_cspolicy_binding resources.
Filter string should be in JSON format.eg: "port:80,servicetype:HTTP".
"""
try :
obj = cspolicylabel_cspolicy_binding()
obj.labelname = labelname
option_ = options()
option_.count = True
option_.filter = filter_
response = obj.getfiltered(service, option_)
if response :
return response[0].__dict__['___count']
return 0
except Exception as e:
raise e
class Labeltype:
policylabel = "policylabel"
class cspolicylabel_cspolicy_binding_response(base_response) :
def __init__(self, length=1) :
self.cspolicylabel_cspolicy_binding = []
self.errorcode = 0
self.message = ""
self.severity = ""
self.sessionid = ""
self.cspolicylabel_cspolicy_binding = [cspolicylabel_cspolicy_binding() for _ in range(length)]
| apache-2.0 | 3,208,087,756,175,860,700 | 28.735385 | 132 | 0.714714 | false |
gfetterman/bark | bark/tools/barkutils.py | 1 | 12062 | import os.path
from glob import glob
import bark
import argparse
from bark import stream
import arrow
from dateutil import tz
import numpy
import sys
import subprocess
def meta_attr():
p = argparse.ArgumentParser(
description="Create/Modify a metadata attribute")
p.add_argument("name", help="name of bark object (Entry or Dataset)")
p.add_argument("attribute",
help="name of bark attribute to create or modify")
p.add_argument("value", help="value of attribute")
args = p.parse_args()
name, attr, val = (args.name, args.attribute, args.value)
attrs = bark.read_metadata(name)
try:
attrs[attr] = eval(val) # try to parse
except Exception:
attrs[attr] = val # assign as string
bark.write_metadata(name, **attrs)
def meta_column_attr():
p = argparse.ArgumentParser(
description="Create/Modify a metadata attribute for a column of data")
p.add_argument("name", help="name of bark object (Entry or Dataset)")
p.add_argument("column", help="name of the column of a Dataset")
p.add_argument("attribute",
help="name of bark attribute to create or modify")
p.add_argument("value", help="value of attribute")
args = p.parse_args()
name, column, attr, val = (args.name, args.column, args.attribute, args.value)
attrs = bark.read_metadata(name)
columns = attrs['columns']
if 'dtype' in attrs:
column = int(column)
try:
columns[column][attr] = eval(val) # try to parse
except Exception:
columns[column][attr] = val # assign as string
bark.write_metadata(name, **attrs)
def mk_entry():
p = argparse.ArgumentParser(description="create a bark entry")
p.add_argument("name", help="name of bark entry")
p.add_argument("-a",
"--attributes",
action='append',
type=lambda kv: kv.split("="),
dest='keyvalues',
help="extra metadata in the form of KEY=VALUE")
p.add_argument("-t",
"--timestamp",
help="format: YYYY-MM-DD or YYYY-MM-DD_HH-MM-SS.S")
p.add_argument("-p",
"--parents",
help="no error if already exists, new meta-data written",
action="store_true")
p.add_argument('--timezone',
help="timezone of timestamp, default: America/Chicago",
default='America/Chicago')
args = p.parse_args()
timestamp = arrow.get(args.timestamp).replace(
tzinfo=tz.gettz(args.timezone)).datetime
attrs = dict(args.keyvalues) if args.keyvalues else {}
bark.create_entry(args.name, timestamp, args.parents, **attrs)
def _clean_metafiles(path, recursive, meta='.meta.yaml'):
metafiles = glob(os.path.join(path, "*" + meta))
for mfile in metafiles:
if not os.path.isfile(mfile[:-len(meta)]):
os.remove(mfile)
if recursive:
dirs = [x
for x in os.listdir(path)
if os.path.isdir(os.path.join(path, x))]
for d in dirs:
_clean_metafiles(os.path.join(path, d), True, meta)
def clean_metafiles():
"""
remove x.meta.yaml files with no associated file (x)
"""
p = argparse.ArgumentParser(
description="remove x.meta.yaml files with no associated file (x)")
p.add_argument("path", help="name of bark entry", default=".")
p.add_argument("-r",
"--recursive",
help="search recursively",
action="store_true")
args = p.parse_args()
_clean_metafiles(args.path, args.recursive)
def rb_concat():
p = argparse.ArgumentParser(
description="""Concatenate raw binary files by adding new samples.
Do not confuse with merge, which combines channels""")
p.add_argument("input", help="input raw binary files", nargs="+")
p.add_argument("-a",
"--attributes",
action='append',
type=lambda kv: kv.split("="),
dest='keyvalues',
help="extra metadata in the form of KEY=VALUE")
p.add_argument("-o", "--out", help="name of output file", required=True)
args = p.parse_args()
if args.keyvalues:
attrs = dict(args.keyvalues)
else:
attrs = {}
streams = [stream.read(x) for x in args.input]
streams[0].chain(*streams[1:]).write(args.out, **attrs)
def rb_decimate():
' Downsample raw binary file.'
p = argparse.ArgumentParser(description="Downsample raw binary file")
p.add_argument("input", help="input bark file")
p.add_argument("--factor",
required=True,
type=int,
help="downsample factor")
p.add_argument("-a",
"--attributes",
action='append',
type=lambda kv: kv.split("="),
dest='keyvalues',
help="extra metadata in the form of KEY=VALUE")
p.add_argument("-o", "--out", help="name of output file", required=True)
args = p.parse_args()
if args.keyvalues:
attrs = dict(args.keyvalues)
else:
attrs = {}
stream.read(args.input).decimate(args.factor).write(args.out, **attrs)
def rb_select():
p = argparse.ArgumentParser(description='''
Select a subset of channels from a sampled dataset
''')
p.add_argument('dat', help='dat file')
p.add_argument('-o', '--out', help='name of output datfile')
p.add_argument('-c',
'--channels',
help='''channels to extract,
zero indexed channel numbers
unless --col-attr is set, in which case
channels are metadata values''',
nargs='+',
required=True)
p.add_argument('--col-attr',
help='name of column attribute to select channels with')
args = p.parse_args()
fname, outfname, channels, col_attr = (args.dat, args.out, args.channels,
args.col_attr)
stream = bark.read_sampled(fname).toStream()
if col_attr:
columns = stream.attrs['columns']
rev_attr = {col[col_attr]: idx
for idx, col in columns.items()
if col_attr in col} # so you can tag only some channels
channels = [rev_attr[c] for c in channels]
else:
channels = [int(c) for c in channels]
stream[channels].write(outfname)
def rb_filter():
p = argparse.ArgumentParser(description="""
filter a sampled dataset
""")
p.add_argument("dat", help="dat file")
p.add_argument("-o", "--out", help="name of output dat file")
p.add_argument("--order", help="filter order", default=3, type=int)
p.add_argument("--highpass", help="highpass frequency", type=float)
p.add_argument("--lowpass", help="lowpass frequency", type=float)
p.add_argument("-f",
"--filter",
help="filter type: butter or bessel",
default="bessel")
opt = p.parse_args()
dtype = bark.read_metadata(opt.dat)['dtype']
stream.read(opt.dat)._analog_filter(opt.filter,
highpass=opt.highpass,
lowpass=opt.lowpass,
order=opt.order).write(opt.out, dtype)
attrs = bark.read_metadata(opt.out)
attrs['highpass'] = opt.highpass
attrs['lowpass'] = opt.lowpass
attrs['filter'] = opt.filter
attrs['filter_order'] = opt.order
bark.write_metadata(opt.out, **attrs)
def rb_diff():
p = argparse.ArgumentParser(description="""
Subtracts one channel from another
""")
p.add_argument("dat", help="dat file")
p.add_argument("-c",
"--channels",
help="""channels to difference, zero indexed, default: 0 1,
subtracts second channel from first.""",
type=int,
nargs="+")
p.add_argument("-o", "--out", help="name of output dat file")
opt = p.parse_args()
dat, out, channels = opt.dat, opt.out, opt.channels
if not channels:
channels = (0, 1)
(stream.read(dat)[channels[0]] - stream.read(dat)[channels[1]]).write(out)
def rb_join():
p = argparse.ArgumentParser(description="""
Combines dat files by adding new channels with the same number
samples. To add additional samples, use dat-cat""")
p.add_argument("dat", help="dat files", nargs="+")
p.add_argument("-o", "--out", help="name of output dat file")
opt = p.parse_args()
streams = [stream.read(fname) for fname in opt.dat]
streams[0].merge(*streams[1:]).write(opt.out)
def rb_to_audio():
p = argparse.ArgumentParser()
p.add_argument("dat",
help="""dat file to convert to audio,
can be any number of channels but you probably want 1 or 2""")
p.add_argument("out", help="name of output file, with filetype extension")
opt = p.parse_args()
attrs = bark.read_metadata(opt.dat)
sr = str(attrs['sampling_rate'])
ch = str(len(attrs['columns']))
dt = numpy.dtype(attrs['dtype'])
bd = str(dt.itemsize * 8)
if dt.name[:5] == 'float':
enc = 'floating-point'
elif dt.name[:3] == 'int':
enc = 'signed-integer'
elif dt.name[:4] == 'uint':
enc = 'unsigned-integer'
else:
raise TypeError('cannot handle dtype of ' + dtname)
if dt.byteorder == '<':
order = 'little'
elif dt.byteorder == '>':
order = 'big'
elif dt.byteorder == '=': # native
order = sys.byteorder
else:
raise ValueError('unrecognized endianness: ' + dt.byteorder)
sox_cmd = ['sox', '-r', sr, '-c', ch, '-b', bd, '-e', enc,
'--endian', order, '-t', 'raw', opt.dat, opt.out]
try:
subprocess.run(sox_cmd)
except FileNotFoundError as e:
if "'sox'" in str(e):
raise FileNotFoundError(str(e) + '. dat-to-audio requires SOX')
else:
raise
def rb_to_wave_clus():
import argparse
p = argparse.ArgumentParser(prog="dat2wave_clus",
description="""
Converts a raw binary file to a wav_clus compatible matlab file
""")
p.add_argument("dat", help="dat file")
p.add_argument("-o", "--out", help="name of output .mat file")
opt = p.parse_args()
from scipy.io import savemat
dataset = bark.read_sampled(opt.dat)
savemat(opt.out,
{'data': dataset.data.T,
'sr': dataset.attrs['sampling_rate']},
appendmat=False)
def _datchunk():
p = argparse.ArgumentParser(description="split a dat file by samples")
p.add_argument("dat", help="datfile")
p.add_argument("stride",
type=float,
help="number of samples to chunk together")
p.add_argument("--seconds",
help="specify seconds instead of samples",
action='store_true')
p.add_argument("--onecut",
help="only perform the first cut",
action="store_true")
args = p.parse_args()
datchunk(args.dat, args.stride, args.seconds, args.onecut)
def datchunk(dat, stride, use_seconds, one_cut):
def write_chunk(chunk, attrs, i):
filename = "{}-chunk-{}.dat".format(basename, i)
attrs['offset'] = stride * i
bark.write_sampled(filename, chunk, **attrs)
attrs = bark.read_metadata(dat)
if use_seconds:
stride = stride * attrs['sampling_rate']
stride = int(stride)
basename = os.path.splitext(dat)[0]
if one_cut:
sds = bark.read_sampled(dat)
write_chunk(sds.data[:stride,:], attrs, 0)
write_chunk(sds.data[stride:,:], attrs, 1)
else:
for i, chunk in enumerate(stream.read(dat, chunksize=stride)):
write_chunk(chunk, attrs, i)
| gpl-2.0 | -8,583,137,634,202,377,000 | 36.113846 | 82 | 0.570718 | false |
taosheng/jarvis | chatbot/src/socialEnBrain.py | 1 | 2090 | #!/usr/bin/env python2.7
# -*- coding: utf-8 -*-
import json
import requests
import re
import random
import time
import sys
import csv
from genericKB import genericHandler
from esHealth import esHealthHandler
from wikiFinder import findWikiEn
#from io import open
import codecs
#from pttChat import pttHandler
#from wikiChat import wikiHandler
import os
class GenericEnBrain():
listIdx = [('enbasic1',0.8), ('enbot1',2.0)]
kb = {}
notFoundResList = []
def __init__(self):
with open('basickb_en.csv') as csvfile:
spamreader = csv.reader(csvfile, delimiter=',', quotechar='"')
for row in spamreader:
if(len(row)>=2):
self.kb[row[0].strip()] = row[1].strip()
def randomAct(self, actKey):
res_act = self.kb[actKey].split(";")
return random.choice(res_act)
def think(self, msg):
response = ''
dirtylist = self.kb['dirty_words'].lower().split(";")
msg = msg.strip()
for dword in dirtylist:
dword = dword.strip()
if dword in msg:
return self.randomAct('dirty_words_res')
for cnf in self.listIdx:
response = genericHandler(cnf[0], 'fb', msg, min_score=cnf[1])
if response != '':
return response
if response == '': # Wikifinedr
nltk_data_path = os.getcwd()+'/nltk_data'
print(nltk_data_path)
os.environ['NLTK_DATA'] = nltk_data_path
from textblob import TextBlob
b = TextBlob(msg.lower())
if len(b.noun_phrases) > 0:
toFindInWiki = b.noun_phrases[0]
wikiResponse = findWikiEn(toFindInWiki)
response = wikiResponse[0:256] + "...<search from wiki>"
if response == '':
response = self.randomAct('act_no_info')
return response
genBrain = GenericEnBrain()
if __name__ == '__main__':
msg = sys.argv[1]
print(genBrain.think(msg))
# print(gBrain.think(msg))
# print(fbBrain.think(msg))
| apache-2.0 | -2,871,116,191,798,822,400 | 26.142857 | 74 | 0.571292 | false |
jas14/khmer | scripts/extract-paired-reads.py | 1 | 3488 | #! /usr/bin/env python
#
# This script is part of khmer, https://github.com/dib-lab/khmer/, and is
# Copyright (C) Michigan State University, 2009-2015. It is licensed under
# the three-clause BSD license; see LICENSE.
# Contact: [email protected]
#
# pylint: disable=invalid-name,missing-docstring
"""
Split up pairs and singletons.
Take a file containing a mixture of interleaved and orphaned reads, and
extract them into separate files (.pe and .se).
% scripts/extract-paired-reads.py <infile>
Reads FASTQ and FASTA input, retains format for output.
"""
from __future__ import print_function
import screed
import sys
import os.path
import textwrap
import argparse
import khmer
from khmer.kfile import check_input_files, check_space
from khmer.khmer_args import info
from khmer.utils import broken_paired_reader, write_record, write_record_pair
def get_parser():
epilog = """
The output is two files, <input file>.pe and <input file>.se, placed in the
current directory. The .pe file contains interleaved and properly paired
sequences, while the .se file contains orphan sequences.
Many assemblers (e.g. Velvet) require that you give them either perfectly
interleaved files, or files containing only single reads. This script takes
files that were originally interleaved but where reads may have been
orphaned via error filtering, application of abundance filtering, digital
normalization in non-paired mode, or partitioning.
Example::
extract-paired-reads.py tests/test-data/paired.fq
"""
parser = argparse.ArgumentParser(
description='Take a mixture of reads and split into pairs and '
'orphans.', epilog=textwrap.dedent(epilog))
parser.add_argument('infile')
parser.add_argument('--version', action='version', version='%(prog)s ' +
khmer.__version__)
parser.add_argument('-f', '--force', default=False, action='store_true',
help='Overwrite output file if it exists')
return parser
def main():
info('extract-paired-reads.py')
args = get_parser().parse_args()
check_input_files(args.infile, args.force)
infiles = [args.infile]
check_space(infiles, args.force)
outfile = os.path.basename(args.infile)
if len(sys.argv) > 2:
outfile = sys.argv[2]
single_fp = open(outfile + '.se', 'w')
paired_fp = open(outfile + '.pe', 'w')
print('reading file "%s"' % args.infile, file=sys.stderr)
print('outputting interleaved pairs to "%s.pe"' % outfile, file=sys.stderr)
print('outputting orphans to "%s.se"' % outfile, file=sys.stderr)
n_pe = 0
n_se = 0
screed_iter = screed.open(args.infile, parse_description=False)
for index, is_pair, read1, read2 in broken_paired_reader(screed_iter):
if index % 100000 == 0 and index > 0:
print('...', index, file=sys.stderr)
if is_pair:
write_record_pair(read1, read2, paired_fp)
n_pe += 1
else:
write_record(read1, single_fp)
n_se += 1
single_fp.close()
paired_fp.close()
if n_pe == 0:
raise Exception("no paired reads!? check file formats...")
print('DONE; read %d sequences,'
' %d pairs and %d singletons' %
(n_pe * 2 + n_se, n_pe, n_se), file=sys.stderr)
print('wrote to: ' + outfile + '.se' + ' and ' + outfile + '.pe',
file=sys.stderr)
if __name__ == '__main__':
main()
| bsd-3-clause | -76,326,370,296,495,920 | 31.296296 | 79 | 0.653956 | false |
pattisdr/osf.io | tests/test_registrations/base.py | 1 | 2821 | import copy
import datetime as dt
from django.utils import timezone
from framework.auth import Auth
from osf.utils import permissions
from osf.models import RegistrationSchema
from tests.base import OsfTestCase
from osf_tests.factories import AuthUserFactory, ProjectFactory, DraftRegistrationFactory
class RegistrationsTestBase(OsfTestCase):
def setUp(self):
super(RegistrationsTestBase, self).setUp()
self.user = AuthUserFactory()
self.auth = Auth(self.user)
self.node = ProjectFactory(creator=self.user)
self.non_admin = AuthUserFactory()
self.node.add_contributor(
self.non_admin,
permissions.DEFAULT_CONTRIBUTOR_PERMISSIONS,
auth=self.auth,
save=True
)
self.non_contrib = AuthUserFactory()
self.meta_schema = RegistrationSchema.objects.get(name='Open-Ended Registration', schema_version=2)
self.draft = DraftRegistrationFactory(
initiator=self.user,
branched_from=self.node,
registration_schema=self.meta_schema,
registration_metadata={
'summary': {'value': 'Some airy'}
}
)
current_month = timezone.now().strftime('%B')
current_year = timezone.now().strftime('%Y')
valid_date = timezone.now() + dt.timedelta(days=180)
self.embargo_payload = {
'data': {
'attributes': {
'children': [self.node._id],
'draft_registration': self.draft._id,
'lift_embargo': unicode(valid_date.strftime('%a, %d, %B %Y %H:%M:%S')) + u' GMT',
'registration_choice': 'embargo',
},
'type': 'registrations',
},
}
self.invalid_embargo_date_payload = copy.deepcopy(self.embargo_payload)
self.invalid_embargo_date_payload['data']['attributes']['lift_embargo'] = u'Thu, 01 {month} {year} 05:00:00 GMT'.format(
month=current_month,
year=str(int(current_year) - 1)
)
self.immediate_payload = {
'data': {
'attributes': {
'children': [self.node._id],
'draft_registration': self.draft._id,
'registration_choice': 'immediate',
},
'type': 'registrations',
},
}
self.invalid_payload = copy.deepcopy(self.immediate_payload)
self.invalid_payload['data']['attributes']['registration_choice'] = 'foobar'
def draft_url(self, view_name):
return self.node.web_url_for(view_name, draft_id=self.draft._id)
def draft_api_url(self, view_name):
return self.node.api_url_for(view_name, draft_id=self.draft._id)
| apache-2.0 | 575,793,879,495,920,960 | 34.708861 | 128 | 0.575328 | false |
CondensedOtters/PHYSIX_Utils | Projects/Moog_2016-2019/CO2/CO2_NN/analysis.py | 1 | 9175 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Sun Jun 14 05:54:11 2020
@author: mathieumoog
"""
import cpmd
import filexyz
import numpy as np
import matplotlib.pyplot as plt
# MSMbuilder ( lacks CK validation )
from msmbuilder.msm import MarkovStateModel
from msmbuilder.msm import BayesianMarkovStateModel
from msmbuilder.utils import dump
# PyEMMMA ( has CK validation )
import pyemma as pe
from pyemma.datasets import double_well_discrete
def getDistance1Dsq( position1, position2, length):
dist = position1-position2
half_length = length*0.5
if dist > half_length :
dist -= length
elif dist < -half_length:
dist += length
return dist*dist
def getDistanceOrtho( positions, index1, index2, cell_lengths ):
dist=0
for i in range(3):
dist += getDistance1Dsq( positions[index1,i], positions[index2,i], cell_lengths[i] )
return np.sqrt(dist)
def computeContactMatrix( positions, cell_lengths, cut_off ):
nb_atoms = len(positions[:,0])
matrix = np.zeros(( nb_atoms, nb_atoms ))
for atom in range(nb_atoms):
for atom2 in range(atom+1,nb_atoms):
if getDistanceOrtho( positions, atom, atom2, cell_lengths ) < cut_off :
matrix[atom,atom2] = 1
matrix[atom2,atom] = 1
return matrix
def computeTransitionMatrix( states, nb_states, tau, step_max ):
nb_step = len(states)
matrix = np.zeros((nb_states,nb_states))
for step in range( nb_step-step_max ):
matrix[ states[step], states[step+tau] ] += 1
return matrix
def computeChapmanKolmogorov( matrix, nb_states ):
matrix_ck = np.zeros((nb_states,nb_states),dtype=float)
for state_i in range( nb_states ):
for state_j in range( nb_states ):
for i in range(nb_states):
matrix_ck[ state_i, state_j ] += matrix[state_i,i]*matrix[i,state_j]
return matrix_ck
volume=8.82
temperature=3000
# run_nb=1
path_sim = str( "/Users/mathieumoog/Documents/CO2/" +
str(volume) + "/" +
str(temperature) + "K/"
# + str(run_nb) + "-run/"
)
cell_lengths = np.ones(3)*volume
traj_path = str( path_sim + "TRAJEC_fdb_wrapped.xyz" )
traj = filexyz.readAsArray( traj_path )
nbC=32
nbO=64
nb_atoms=nbC+nbO
max_neigh=5
nb_step=len(traj[:,0,0])
cut_off = 1.75
min_stat=1000
# Build States
coordC = np.zeros( (nb_step,nbC), dtype=int )
coordO = np.zeros( (nb_step,nbO), dtype=int )
for step in range(nb_step):
matrix = computeContactMatrix( traj[step,:,:], cell_lengths, cut_off)
for carbon in range(0,nbC):
coordC[ step, carbon ] = int( sum(matrix[carbon,:]) )
for oxygen in range(nbC,nb_atoms):
coordO[ step, oxygen-nbC ] = int( sum(matrix[oxygen,:]) )
c_min = coordC.min()
o_min = coordO.min()
# Adapting the labels to make sure they are in the 0-nb_states range
coordC -= c_min
coordO -= c_min
msm = MarkovStateModel( lag_time=1, n_timescales=6)
msm.fit( coordC[:,0] )
msm.timescales_
# Computing nb of states (max)
nb_states_C = coordC.max()+1
nb_states_O = coordO.max()+1
# Computing Equilibrium States Probabilities
coordC_hist = np.zeros( nb_states_C )
ones_ = np.ones((nb_step,nbC), dtype=int )
for i in range( nb_states_C ):
coordC_hist[i] = sum( ones_[ coordC == i ] )
# Clean marginal states
# for state in range( nb_states_C ):
# if coordC_hist[state] < min_stat:
# mask_to_clean = coordC[ :, : ]
coordC_hist /= sum(coordC_hist[:])
# Computing Equilibrium States Probabilities, cleaning marginals
ones_ = np.ones((nb_step,nbO), dtype=int )
coordO_hist = np.zeros( nb_states_O )
for i in range( nb_states_O ):
coordO_hist[i] = sum( ones_[ coordO == i ] )
coordO_hist /= sum(coordO_hist[:])
# Plotting Oxygens
plt.figure()
plt.plot(coordC_hist,"b.-")
plt.plot(coordO_hist,"r.-")
plt.legend(["C states","O states"])
plt.show()
dt=5*0.001
frac = 0.75
max_step=int(nb_step*frac)
nb_tau_min=int(250)
nb_tau_max=int(2*nb_tau_min)
# Computing Transition Matrix for a given tau
matrix_tot=np.zeros((nb_states_C,nb_states_C,nb_tau_max), dtype=float )
matrix_tot_ck=np.zeros((nb_states_C,nb_states_C,nb_tau_min), dtype=float )
for tau in range(nb_tau_max):
matrix = np.zeros((nb_states_C,nb_states_C),dtype=float)
for carbon in range(nbC):
matrix += computeTransitionMatrix( coordC[:,carbon], nb_states_C, tau+1, max_step )
for state in range(nb_states_C):
matrix[state,:] /= sum( matrix[state,:] )
matrix_tot[:,:,tau] = matrix[:,:]
if tau < nb_tau_min:
matrix_tot_ck[:,:,tau] = computeChapmanKolmogorov( matrix_tot[:,:,tau], nb_states_C )
carbon_target=3
matrix_markov = np.zeros( (4,4,nb_tau_min), dtype=float )
matrix_markov_ck = np.zeros( (4,4,nb_tau_min), dtype=float )
for tau in range(1,nb_tau_min+1):
msm_matrix = MarkovStateModel( lag_time=tau, reversible_type="mle" ,n_timescales=nb_states_C, ergodic_cutoff="on", sliding_window=True, verbose=True)
msm_matrix.fit( coordC[:,carbon_target] )
matrix_markov[:,:,tau-1] = msm_matrix.transmat_
for state_i in range( len(matrix_markov) ):
for state_j in range( len(matrix_markov) ):
for i in range( len(matrix_markov) ):
matrix_markov_ck[ state_i, state_j, tau-1 ] += matrix_markov[state_i,i,tau-1]*matrix_markov[i,state_j,tau-1]
# PyEMMA
lags = [1,5,10,15,20,50,100,200]
implied_timescales = pe.msm.its(dtrajs=coordC[:,carbon_target].tolist(),lags=lags)
pe.plots.plot_implied_timescales(implied_timescales,units='time-steps', ylog=False)
M = pe.msm.estimate_markov_model(dtrajs=coordC[:,carbon_target].tolist(), lag = 10 )
cktest = M.cktest(nsets=3)
cktplt = pe.plots.plot_cktest(cktest)
plt.figure()
plt.xlabel("Time lag (ps)")
plt.ylabel("P_ij, P_ij^CK")
# plt.plot( np.arange(0,dt*nb_tau_max,dt*1), matrix_tot[0,0,:], "k-" )
# plt.plot( np.arange(0,dt*nb_tau_max,dt*2), matrix_tot_ck[0,0,:], "k--" )
plt.plot( np.arange(0,dt*nb_tau_min,dt*1), matrix_markov[0,0,:], "k-" )
plt.plot( np.arange(0,2*dt*nb_tau_min,dt*2), matrix_markov_ck[0,0,:], "k--" )
plt.plot( np.arange(0,dt*nb_tau_max,dt*1), matrix_tot[1,1,:], "r-" )
plt.plot( np.arange(0,dt*nb_tau_max,dt*2), matrix_tot_ck[1,1,:], "r--" )
plt.plot( np.arange(0,dt*nb_tau_min,dt*1), matrix_markov[0,1,:], "k-" )
plt.plot( np.arange(0,2*dt*nb_tau_min,dt*2), matrix_markov_ck[0,1,:], "k--" )
plt.plot( np.arange(0,dt*nb_tau_max,dt*1), matrix_tot[1,2,:], "b-" )
plt.plot( np.arange(0,dt*nb_tau_max,dt*2), matrix_tot_ck[1,2,:], "b--" )
plt.plot( np.arange(0,dt*nb_tau_min,dt*1), matrix_markov[0,2,:], "k-" )
plt.plot( np.arange(0,2*dt*nb_tau_min,dt*2), matrix_markov_ck[0,2,:], "k--" )
plt.plot( np.arange(0,dt*nb_tau_max,dt*1), matrix_tot[1,3,:], "g-" )
plt.plot( np.arange(0,dt*nb_tau_max,dt*2), matrix_tot_ck[1,3,:], "g--" )
plt.plot( np.arange(0,dt*nb_tau_min,dt*1), matrix_markov[0,3,:], "k-" )
plt.plot( np.arange(0,2*dt*nb_tau_min,dt*2), matrix_markov_ck[0,3,:], "k--" )
plt.plot( np.arange(0,dt*nb_tau_max,dt*1), matrix_tot[1,4,:], "m-" )
plt.plot( np.arange(0,dt*nb_tau_max,dt*2), matrix_tot_ck[1,4,:], "m--" )
plt.show()
rmseC = np.zeros(nb_tau_min, dtype=float)
for tau in range(nb_tau_min):
mat = matrix_tot[:,:,2*tau]-matrix_tot_ck[:,:,tau]
rmseC[tau] = sum(sum( mat*mat ))/(nb_states_C*nb_states_C)
plt.figure()
plt.xlabel("Time lag (ps)")
plt.ylabel("RMSE C (%)")
plt.plot( np.arange(0,dt*nb_tau_max,dt*2), rmseC*100 )
plt.show()
matrix_tot=np.zeros((nb_states_O,nb_states_O,nb_tau_max), dtype=float )
matrix_tot_ck=np.zeros((nb_states_O,nb_states_O,nb_tau_min), dtype=float )
for tau in range(nb_tau_max):
matrix = np.zeros((nb_states_O,nb_states_O),dtype=float)
for carbon in range(nbC):
matrix += computeTransitionMatrix( coordO[:,carbon], nb_states_O, tau, max_step )
for state in range(nb_states_O):
matrix[state,:] /= sum( matrix[state,:] )
matrix_tot[:,:,tau] = matrix[:,:]
if tau < nb_tau_min:
matrix_tot_ck[:,:,tau] = computeChapmanKolmogorov( matrix_tot[:,:,tau], nb_states_O )
plt.figure()
plt.xlabel("Time lag (ps)")
plt.ylabel("P_ij, P_ij^CK")
plt.plot( np.arange(0,dt*nb_tau_max,dt*1), matrix_tot[0,0,:], "k-" )
plt.plot( np.arange(0,dt*nb_tau_max,dt*2), matrix_tot_ck[0,0,:], "k--" )
plt.plot( np.arange(0,dt*nb_tau_max,dt*1), matrix_tot[1,1,:], "r-" )
plt.plot( np.arange(0,dt*nb_tau_max,dt*2), matrix_tot_ck[1,1,:], "r--" )
plt.plot( np.arange(0,dt*nb_tau_max,dt*1), matrix_tot[2,2,:], "b-" )
plt.plot( np.arange(0,dt*nb_tau_max,dt*2), matrix_tot_ck[2,2,:], "b--" )
plt.plot( np.arange(0,dt*nb_tau_max,dt*1), matrix_tot[3,3,:], "g-" )
plt.plot( np.arange(0,dt*nb_tau_max,dt*2), matrix_tot_ck[3,3,:], "g--" )
plt.show()
rmseO = np.zeros(nb_tau_min, dtype=float)
for tau in range(nb_tau_min):
mat = matrix_tot[:,:,2*tau]-matrix_tot_ck[:,:,tau]
rmseO[tau] = sum(sum( mat*mat ))/(nb_states_O*nb_states_O)
plt.figure()
plt.xlabel("Time lag (ps)")
plt.ylabel("RMSE O (%)")
plt.plot( np.arange(0,dt*nb_tau_max,dt*2), rmseO*100 )
plt.show()
plt.figure()
plt.xlabel("Time lag (ps)")
plt.ylabel("RMSE all (%)")
plt.plot( np.arange(0,dt*nb_tau_max,dt*2), (rmseO+rmseC)*100*0.5 )
plt.show()
| gpl-3.0 | -8,227,085,920,944,906,000 | 35.7 | 153 | 0.637275 | false |
Ecogenomics/CheckM | checkm/plot/distributionPlots.py | 1 | 2841 | ###############################################################################
#
# codingDensityPlots.py - Create a GC histogram and a delta-CD plot.
#
###############################################################################
# #
# This program is free software: you can redistribute it and/or modify #
# it under the terms of the GNU General Public License as published by #
# the Free Software Foundation, either version 3 of the License, or #
# (at your option) any later version. #
# #
# This program is distributed in the hope that it will be useful, #
# but WITHOUT ANY WARRANTY; without even the implied warranty of #
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
# GNU General Public License for more details. #
# #
# You should have received a copy of the GNU General Public License #
# along with this program. If not, see <http://www.gnu.org/licenses/>. #
# #
###############################################################################
from checkm.plot.AbstractPlot import AbstractPlot
from checkm.plot.gcPlots import GcPlots
from checkm.plot.codingDensityPlots import CodingDensityPlots
from checkm.plot.tetraDistPlots import TetraDistPlots
class DistributionPlots(AbstractPlot):
def __init__(self, options):
AbstractPlot.__init__(self, options)
self.options = options
def plot(self, fastaFile, tetraSigs, distributionsToPlot):
# Set size of figure
self.fig.clear()
self.fig.set_size_inches(self.options.width, self.options.height)
axesHistGC = self.fig.add_subplot(321)
axesDeltaGC = self.fig.add_subplot(322)
axesHistTD = self.fig.add_subplot(323)
axesDeltaTD = self.fig.add_subplot(324)
axesHistCD = self.fig.add_subplot(325)
axesDeltaCD = self.fig.add_subplot(326)
gcPlots = GcPlots(self.options)
gcPlots.plotOnAxes(fastaFile, distributionsToPlot, axesHistGC, axesDeltaGC)
tetraDistPlots = TetraDistPlots(self.options)
tetraDistPlots.plotOnAxes(fastaFile, tetraSigs, distributionsToPlot, axesHistTD, axesDeltaTD)
codingDensityPlots = CodingDensityPlots(self.options)
codingDensityPlots.plotOnAxes(fastaFile, distributionsToPlot, axesHistCD, axesDeltaCD)
self.fig.tight_layout(pad=1, w_pad=2, h_pad=2)
self.draw()
| gpl-3.0 | 5,811,296,671,881,534,000 | 48.732143 | 101 | 0.529743 | false |
dotmpe/htcache | Protocol.py | 1 | 18422 | """
The Protocol object relays the client request, accumulates the server response
data, and combines it with the cached. From there the Response object
reads this to the client.
"""
import calendar, os, time, socket, re
import Params, Runtime, Response, Resource, Rules
import HTTP
#from util import *
import log
mainlog = log.get_log('main')
class DNSLookupException(Exception):
def __init__(self, addr, exc):
self.addr = addr
self.exc = exc
def __str__(self):
return "DNS lookup error for %s: %s" % ( self.addr, self.exc )
DNSCache = {}
def connect(addr):
# FIXME: return HTTP 5xx
assert Runtime.ONLINE, \
'operating in off-line mode'
if addr not in DNSCache:
mainlog.debug('Requesting address info for %s:%i', *addr)
try:
DNSCache[ addr ] = socket.getaddrinfo(
addr[ 0 ], addr[ 1 ], Runtime.FAMILY, socket.SOCK_STREAM )
except Exception, e:
raise DNSLookupException(addr, e)
family, socktype, proto, canonname, sockaddr = DNSCache[ addr ][ 0 ]
mainlog.info('Connecting to %s:%i', *sockaddr)
sock = socket.socket( family, socktype, proto )
sock.setblocking( 0 )
sock.connect_ex( sockaddr )
return sock
class BlindProtocol:
"""
Blind protocol is used to aim for gracefull recovery upon unexpected
requests.
"""
Response = None
data = None
def __init__(self, request):
self.__socket = connect( request.hostinfo )
self.__sendbuf = request.recvbuf()
def socket(self):
return self.__socket
def recvbuf(self):
return ''
def hasdata(self):
return True
def send(self, sock):
bytecnt = sock.send( self.__sendbuf )
self.__sendbuf = self.__sendbuf[ bytecnt: ]
if not self.__sendbuf:
self.Response = Response.BlindResponse
def done(self):
pass
class CachingProtocol(object):
"""
Open cache and descriptor index for requested resources.
Filter requests using Drop, NoCache and .. rules.
"""
Response = None
"the htcache response class"
capture = None
"XXX: old indicator to track hashsum of response entity."
data = None
@property
def url(self):
# XXX: update this with data from content-location
return self.request.url
def __init__(self, request):
"Determine and open cache location, get descriptor backend. "
super(CachingProtocol, self).__init__()
self.request = request
self.data = None
# Track server response
self.__status, self.__message = None, None
def has_response(self):
return self.__status and self.__message
def prepare_direct_response(self,request):
"""
Serve either a proxy page, a replacement for blocked content, of static
content. All directly from local storage.
Returns true on direct-response ready.
"""
host, port = request.hostinfo
verb, path, proto = request.envelope
# XXX: move this to request phase
if port == Runtime.PORT:
mainlog.info("Direct request: %s", path)
localhosts = ( 'localhost', Runtime.HOSTNAME, '127.0.0.1', '127.0.1.1' )
assert host in localhosts, "Cannot service for %s, use from %s" % (host, localhosts)
self.Response = Response.ProxyResponse
# XXX: Respond by writing message as plain text, e.g echo/debug it:
#self.Response = Response.DirectResponse
# Filter request by regex from rules.drop
filtered_path = "%s%s" % ( host, path )
m = Rules.Drop.match( filtered_path )
if m:
self.set_blocked_response( path )
mainlog.note('Dropping connection, '
'request matches pattern: %r.', m)
def prepare_nocache_response(self):
"Blindly respond for NoCache rule matches. "
pattern = Rules.NoCache.match( self.url )
if pattern:
mainlog.note('Not caching request, matches pattern: %r.', pattern)
self.Response = Response.BlindResponse
return True
def set_blocked_response(self, path):
"Respond to client by writing filter warning about blocked content. "
if '?' in path or '#' in path:
pf = path.find( '#' )
pq = path.find( '?' )
p = len( path )
if pf > 0: p = pf
if pq > 0: p = pq
nameext = os.path.splitext( path[:p] )
else:
nameext = os.path.splitext( path )
if len( nameext ) == 2 and nameext[1][1:] in Params.IMG_TYPE_EXT:
self.Response = Response.BlockedImageContentResponse
else:
self.Response = Response.BlockedContentResponse
def get_size(self):
return self.data.descriptor.size;
def set_size(self, size):
self.data.descriptor.size = size
size = property( get_size, set_size )
def get_mtime(self):
return self.cache.mtime;
def set_mtime(self, mtime):
self.cache.mtime = mtime
mtime = property( get_mtime, set_mtime )
def read(self, pos, size):
return self.cache.read( pos, size )
def write(self, chunk):
return self.cache.write( chunk )
def tell(self):
return self.cache.tell()
def finish(self):
self.data.finish_response()
def __str__(self):
return "[CachingProtocol %s]" % hex(id(self))
class HttpProtocol(CachingProtocol):
rewrite = None
def __init__(self,request):
super(HttpProtocol, self).__init__(request)
host, port = request.hostinfo
verb, path, proto = request.envelope
# Serve direct response
self.prepare_direct_response(request)
if self.Response:
self.__socket = None
return
# Prepare to forward request
self.data = Resource.ProxyData(self)
# Skip server-round trip in static mode
if Runtime.STATIC: # and self.cache.full: # FIXME
mainlog.note('Static mode; serving file directly from cache')
self.__socket = None
if self.data.prepare_static():
self.Response = Response.DataResponse
else:
self.Response = Response.NotFoundResponse
return
proxy_req_headers = self.data.prepare_request( request )
mainlog.debug("Prepared request headers")
for key in proxy_req_headers:
mainlog.debug('> %s: %s',
key, proxy_req_headers[ key ].replace( '\r\n', ' > ' ) )
# Forward request to remote server, fiber will handle this
head = 'GET /%s HTTP/1.1' % path
# FIXME return proper HTTP error upon connection failure
try:
self.__socket = connect(request.hostinfo)
except Exception, e:
self.Response = Response.ExceptionResponse(self, request, e )
return
self.__sendbuf = '\r\n'.join(
[ head ] + map( ': '.join, proxy_req_headers.items() ) + [ '', '' ] )
self.__recvbuf = ''
# Proxy protocol continues in self.recv after server response haders are
# parsed, before the response entity is read from the remote server
self.__parse = HttpProtocol.__parse_head
@property
def cache(self):
# XXX: the other way around?
return self.data.cache
def hasdata(self):
"Indicator wether Protocol object has more request data available. "
return bool( self.__sendbuf )
def send(self, sock):
"fiber hook to send request data. "
assert self.hasdata(), "no data"
bytecnt = sock.send( self.__sendbuf )
self.__sendbuf = self.__sendbuf[ bytecnt: ]
def __parse_head(self, chunk):
eol = chunk.find( '\n' ) + 1
assert eol
line = chunk[ :eol ]
mainlog.note("%s: Server responds %r",self, line.strip())
fields = line.split()
assert (2 <= len( fields )) \
and fields[ 0 ].startswith( 'HTTP/' ) \
and fields[ 1 ].isdigit(), 'invalid header line: %r' % line
self.__status = int( fields[ 1 ] )
self.__message = ' '.join( fields[ 2: ] )
self.__args = {}
mainlog.info("%s: finished parse_head (%s, %s)",self, self.__status, self.__message)
self.__parse = HttpProtocol.__parse_args
return eol
def __parse_args(self, chunk):
eol = chunk.find( '\n' ) + 1
assert eol
line = chunk[ :eol ]
if ':' in line:
mainlog.debug('> '+ line.rstrip())
key, value = line.split( ':', 1 )
if key.lower() in HTTP.Header_Map:
key = HTTP.Header_Map[key.lower()]
else:
mainlog.warn("Warning: %r not a known HTTP (response) header (%r)",
key,value.strip())
key = key.title() # XXX: bad? :)
if key in self.__args:
self.__args[ key ] += '\r\n' + key + ': ' + value.strip()
else:
self.__args[ key ] = value.strip()
elif line in ( '\r\n', '\n' ):
mainlog.note("%s: finished parsing args", self)
self.__parse = None
else:
mainlog.err('Error: ignored server response header line: '+ line)
return eol
def recv(self, sock):
""""
Process server response until headers are fully parsed, then
prepare response handler.
"""
assert not self.hasdata(), "has data"
chunk = sock.recv( Params.MAXCHUNK, socket.MSG_PEEK )
mainlog.info("%s: recv'd chunk (%i)",self, len(chunk))
assert chunk, 'server closed connection before sending '\
'a complete message header, '\
'parser: %r, data: %r' % (self.__parse, self.__recvbuf)
self.__recvbuf += chunk
while self.__parse:
bytecnt = self.__parse(self, self.__recvbuf )
assert bytecnt
# sock.recv( len( chunk ) )
# return
self.__recvbuf = self.__recvbuf[ bytecnt: ]
sock.recv( len( chunk ) - len( self.__recvbuf ) )
# Server response header was parsed
self.chunked = self.__args.pop( 'Transfer-Encoding', None )
# XXX: transfer-encoding, chunking.. to client too?
# Check wether to step back now
if self.prepare_nocache_response():
self.data.descriptor = None
return
# Process and update headers before deferring to response class
# 2xx
if self.__status in ( HTTP.OK, ):
mainlog.info("%s: Caching new download. ", self)
self.data.finish_request()
# self.recv_entity()
self.set_dataresponse();
elif self.__status in ( HTTP.MULTIPLE_CHOICES, ):
assert False, HTTP.MULTIPLE_CHOICES
elif self.__status == HTTP.PARTIAL_CONTENT \
and self.cache.partial:
mainlog.debug("Updating partial download. ")
self.__args = self.data.prepare_response()
startpos, endpos = HTTP.parse_content_range(self.__args['Content-Range'])
assert endpos == '*' or endpos == self.data.descriptor.size, \
"Expected server to continue to end of resource."
if self.__args['ETag']:
assert self.__args['ETag'].strip('"') == self.data.descriptor.etag, (
self.__args['ETag'], self.data.descriptor.etag )
self.recv_part()
self.set_dataresponse();
# 3xx: redirects
elif self.__status in (HTTP.FOUND,
HTTP.MOVED_PERMANENTLY,
HTTP.TEMPORARY_REDIRECT):
self.data.finish_request()
# XXX:
#location = self.__args.pop( 'Location', None )
# self.descriptor.move( self.cache.path, self.__args )
# self.cache.remove_partial()
self.Response = Response.BlindResponse
elif self.__status == HTTP.NOT_MODIFIED:
assert self.cache.full, "XXX sanity"
mainlog.info("Reading complete file from cache at %s" %
self.cache.path)
self.data.finish_request()
self.Response = Response.DataResponse
# 4xx: client error
elif self.__status in ( HTTP.FORBIDDEN, HTTP.METHOD_NOT_ALLOWED ):
if self.data:
self.data.set_broken( self.__status )
self.Response = Response.BlindResponse
elif self.__status in ( HTTP.NOT_FOUND, HTTP.GONE ):
self.Response = Response.BlindResponse
#if self.descriptor:
# self.descriptor.update( self.__args )
elif self.__status in ( HTTP.REQUEST_RANGE_NOT_STATISFIABLE, ):
if self.cache.partial:
mainlog.warn("Warning: Cache corrupted?: %s", self.url)
self.cache.remove_partial()
elif self.cache.full:
self.cache.remove_full()
# XXX
# if self.descriptor:
# self.descriptor.drop()
# log("Dropped descriptor: %s" % self.url)
self.Response = Response.BlindResponse
else:
mainlog.warn("Warning: unhandled: %s, %s", self.__status, self.url)
self.Response = Response.BlindResponse
# def recv_entity(self):
# """
# Prepare to receive new entity.
# """
# if self.cache.full:
# log("HttpProtocol.recv_entity: overwriting cache: %s" %
# self.url, Params.LOG_NOTE)
# self.cache.remove_full()
# self.cache.open_new()
# else:
# log("HttpProtocol.recv_entity: new cache: %s" %
# self.url, Params.LOG_NOTE)
# self.cache.open_new()
# self.cache.stat()
# assert self.cache.partial
def recv_part(self):
"""
Prepare to receive partial entity.
"""
byterange = self.__args.pop( 'Content-Range', 'none specified' )
assert byterange.startswith( 'bytes ' ), \
'unhandled content-range type: %s' % byterange
byterange, size = byterange[ 6: ].split( '/' )
beg, end = byterange.split( '-' )
self.size = int( size )
# Sanity check
assert self.size == int( end ) + 1, \
"Complete range %r should match entity size of %s"%(end, self.size)
self.cache.open_partial( int( beg ) )
assert self.cache.partial, "Missing cache but receiving partial entity. "
def set_dataresponse(self):
mediatype = self.data.descriptor.mediatype
if Runtime.PROXY_INJECT and mediatype and 'html' in mediatype:
mainlog.note("XXX: Rewriting HTML resource: "+self.url)
self.rewrite = True
#te = self.__args.get( 'Transfer-Encoding', None )
if self.chunked:#te == 'chunked':
mainlog.info("%s: Chunked response", self)
self.Response = Response.ChunkedDataResponse
else:
self.Response = Response.DataResponse
def recvbuf(self):
return self.print_message()
def print_message(self, args=None):
if not args:
args = self.__args
return '\r\n'.join(
[ '%s %i %s' % (
self.request.envelope[2],
self.__status,
self.__message ) ] +
map( ': '.join, args.items() ) + [ '', '' ] )
def responsebuf(self):
return self.print_message(self.__args)
def args(self):
try:
return self.__args.copy()
except AttributeError, e:
return {}
#return hasattr(self, '__args') and self.__args.copy() or {}
def socket(self):
return self.__socket
def __str__(self):
return "[HttpProtocol %s]" % hex(id(self))
"""
class FtpProtocol( CachingProtocol ):
Response = None
def __init__(self,request):
super(FtpProtocol, self).__init__( request )
if Runtime.STATIC and self.cache.full:
self.__socket = None
log("Static FTP cache : %s" % self.url)
self.cache.open_full()
self.Response = Response.DataResponse
return
self.__socket = connect(request.hostinfo)
self.__path = request.envelope[1]
self.__sendbuf = ''
self.__recvbuf = ''
self.__handle = FtpProtocol.__handle_serviceready
def socket(self):
return self.__socket
def hasdata(self):
return self.__sendbuf != ''
def send(self, sock):
assert self.hasdata()
bytecnt = sock.send( self.__sendbuf )
self.__sendbuf = self.__sendbuf[ bytecnt: ]
def recv(self, sock):
assert not self.hasdata()
chunk = sock.recv( Params.MAXCHUNK )
assert chunk, 'server closed connection prematurely'
self.__recvbuf += chunk
while '\n' in self.__recvbuf:
reply, self.__recvbuf = self.__recvbuf.split( '\n', 1 )
log('S: %s' % reply.rstrip(), 2)
if reply[ :3 ].isdigit() and reply[ 3 ] != '-':
self.__handle(self, int( reply[ :3 ] ), reply[ 4: ] )
log('C: %s' % self.__sendbuf.rstrip(), 2)
def __handle_serviceready(self, code, line):
assert code == 220, \
'server sends %i; expected 220 (service ready)' % code
self.__sendbuf = 'USER anonymous\r\n'
self.__handle = FtpProtocol.__handle_password
def __handle_password(self, code, line):
assert code == 331, \
'server sends %i; expected 331 (need password)' % code
self.__sendbuf = 'PASS anonymous@\r\n'
self.__handle = FtpProtocol.__handle_loggedin
def __handle_loggedin(self, code, line):
assert code == 230, \
'server sends %i; expected 230 (user logged in)' % code
self.__sendbuf = 'TYPE I\r\n'
self.__handle = FtpProtocol.__handle_binarymode
def __handle_binarymode(self, code, line):
assert code == 200,\
'server sends %i; expected 200 (binary mode ok)' % code
self.__sendbuf = 'PASV\r\n'
self.__handle = FtpProtocol.__handle_passivemode
def __handle_passivemode(self, code, line):
assert code == 227, \
'server sends %i; expected 227 (passive mode)' % code
channel = eval( line.strip('.').split()[ -1 ] )
addr = '%i.%i.%i.%i' % channel[ :4 ], channel[ 4 ] * 256 + channel[ 5 ]
self.__socket = connect( addr )
self.__sendbuf = 'SIZE %s\r\n' % self.__path
self.__handle = FtpProtocol.__handle_size
def __handle_size(self, code, line):
if code == 550:
self.Response = Response.NotFoundResponse
return
assert code == 213,\
'server sends %i; expected 213 (file status)' % code
self.size = int( line )
log('File size: %s' % self.size)
self.__sendbuf = 'MDTM %s\r\n' % self.__path
self.__handle = FtpProtocol.__handle_mtime
def __handle_mtime(self, code, line):
if code == 550:
self.Response = Response.NotFoundResponse
return
assert code == 213, \
'server sends %i; expected 213 (file status)' % code
self.mtime = calendar.timegm( time.strptime(
line.rstrip(), '%Y%m%d%H%M%S' ) )
log('Modification time: %s' % time.strftime(
Params.TIMEFMT, time.gmtime( self.mtime ) ))
stat = self.cache.partial
if stat and stat.st_mtime == self.mtime:
self.__sendbuf = 'REST %i\r\n' % stat.st_size
self.__handle = FtpProtocol.__handle_resume
else:
stat = self.cache.full
if stat and stat.st_mtime == self.mtime:
log("Unmodified FTP cache : %s" % self.url)
self.cache.open_full()
self.Response = Response.DataResponse
else:
self.cache.open_new()
self.__sendbuf = 'RETR %s\r\n' % self.__path
self.__handle = FtpProtocol.__handle_data
def __handle_resume(self, code, line):
assert code == 350, 'server sends %i; ' \
'expected 350 (pending further information)' % code
self.cache.open_partial()
self.__sendbuf = 'RETR %s\r\n' % self.__path
self.__handle = FtpProtocol.__handle_data
def __handle_data(self, code, line):
if code == 550:
self.Response = Response.NotFoundResponse
return
assert code == 150, \
'server sends %i; expected 150 (file ok)' % code
self.Response = Response.DataResponse
"""
class ProxyProtocol:
"""
"""
Response = Response.ProxyResponse
data = None
def __init__(self,request):
method, reqname, proto = request.envelope
assert reqname.startswith('/'), reqname
self.reqname = reqname[1:]
self.status = HTTP.OK
if method is not 'GET':
self.status = HTTP.METHOD_NOT_ALLOWED
if self.reqname not in Response.ProxyResponse.urlmap.keys():
self.status = HTTP.NOT_FOUND
assert proto in ('', 'HTTP/1.0', 'HTTP/1.1'), proto
def socket(self):
return None
def recvbuf(self):
return ''
def hasdata(self):
return True
def send(self, sock):
bytecnt = sock.send( self.__sendbuf )
self.__sendbuf = self.__sendbuf[ bytecnt: ]
if not self.__sendbuf:
self.Response = Response.BlindResponse
def done(self):
pass
def has_response(self):
return False
| gpl-3.0 | -626,849,070,475,962,400 | 26.827795 | 87 | 0.660026 | false |
TakashiMatsuda/sag_svm | scaling.py | 1 | 1287 | #!/Users/takashi/.pyenv/shims/python
import numpy as np
import math
def scaling(data):
"""
Scaling. Make x's average to 0, variance to 1
=> CHANGED. Divide by normal deviation
"""
print("input:")
print(data)
scaled_data = np.zeros_like(data)
"""
average section
"""
sumlist = np.sum(data, axis=0)
avglist = np.array([d / len(data) for d in sumlist])
print("avglist:")
print(avglist)
for i, x in enumerate(data):
scaled_data[i] = np.array([x[j] - avglist[j] for j in range(len(x))])
"""
variance section
"""
vrlist = np.var(scaled_data, axis=0)
print("average=0 data:")
print(scaled_data)
return np.divide(scaled_data, vrlist)
"""
vr = (math.sqrt(np.sum(np.square(scaled_data)))) / len(data)
scaled_data = np.array([x / vr for x in scaled_data])
"""
# print(scaled_data)
# return scaled_data
def test_scaling():
"""
TODO: More Precise Test is necessary
"""
data = [[(i+1) * (j+1) for i in range(5)] for j in range(2)]
res = scaling(data)
print("res:")
print(res)
"""
average test
"""
assert np.sum(res, axis=0)[1] == 0
"""
variance test
"""
assert np.var(res, axis=0)[1] == 1
| mit | -6,234,009,568,127,102,000 | 22.4 | 77 | 0.554002 | false |
andyneff/voxel-globe | voxel_globe/build_voxel_world/tasks.py | 1 | 6207 | from voxel_globe.common_tasks import shared_task, VipTask
from celery.utils.log import get_task_logger
logger = get_task_logger(__name__)
import logging
import os
@shared_task(base=VipTask, bind=True)
def run_build_voxel_model(self, image_collection_id, scene_id, bbox,
skip_frames, cleanup=True, history=None):
from distutils.dir_util import remove_tree
from shutil import move
import random
from vsi.tools.redirect import Redirect, Logger as LoggerWrapper
from voxel_globe.meta import models
from voxel_globe.tools.camera import get_krt
import voxel_globe.tools
from boxm2_scene_adaptor import boxm2_scene_adaptor
from vil_adaptor import load_image
from vpgl_adaptor import load_perspective_camera
from voxel_globe.tools.wget import download as wget
from vsi.vxl.create_scene_xml import create_scene_xml
from vsi.tools.dir_util import copytree, mkdtemp
with Redirect(stdout_c=LoggerWrapper(logger, lvl=logging.INFO),
stderr_c=LoggerWrapper(logger, lvl=logging.WARNING)):
openclDevice = os.environ['VIP_OPENCL_DEVICE']
opencl_memory = os.environ.get('VIP_OPENCL_MEMORY', None)
scene = models.Scene.objects.get(id=scene_id)
imageCollection = models.ImageCollection.objects.get(\
id=image_collection_id).history(history);
imageList = imageCollection.images.all();
with voxel_globe.tools.task_dir('voxel_world') as processing_dir:
logger.warning(bbox)
if bbox['geolocated']:
create_scene_xml(openclDevice, 3, float(bbox['voxel_size']),
lla1=(float(bbox['x_min']), float(bbox['y_min']),
float(bbox['z_min'])),
lla2=(float(bbox['x_max']), float(bbox['y_max']),
float(bbox['z_max'])),
origin=scene.origin, model_dir='.', number_bins=1,
output_file=open(os.path.join(processing_dir, 'scene.xml'), 'w'),
n_bytes_gpu=opencl_memory)
else:
create_scene_xml(openclDevice, 3, float(bbox['voxel_size']),
lvcs1=(float(bbox['x_min']), float(bbox['y_min']),
float(bbox['z_min'])),
lvcs2=(float(bbox['x_max']), float(bbox['y_max']),
float(bbox['z_max'])),
origin=scene.origin, model_dir='.', number_bins=1,
output_file=open(os.path.join(processing_dir, 'scene.xml'), 'w'),
n_bytes_gpu=opencl_memory)
counter = 1;
imageNames = []
cameraNames = []
os.mkdir(os.path.join(processing_dir, 'local'))
#Prepping
for image in imageList:
self.update_state(state='INITIALIZE', meta={'stage':'image fetch',
'i':counter,
'total':len(imageList)})
image = image.history(history)
(K,R,T,o) = get_krt(image.history(history), history=history)
krtName = os.path.join(processing_dir, 'local', 'frame_%05d.krt' % counter)
with open(krtName, 'w') as fid:
print >>fid, (("%0.18f "*3+"\n")*3) % (K[0,0], K[0,1], K[0,2],
K[1,0], K[1,1], K[1,2], K[2,0], K[2,1], K[2,2]);
print >>fid, (("%0.18f "*3+"\n")*3) % (R[0,0], R[0,1], R[0,2],
R[1,0], R[1,1], R[1,2], R[2,0], R[2,1], R[2,2]);
print >>fid, ("%0.18f "*3+"\n") % (T[0,0], T[1,0], T[2,0]);
imageName = image.originalImageUrl;
extension = os.path.splitext(imageName)[1]
localName = os.path.join(processing_dir, 'local',
'frame_%05d%s' % (counter, extension));
wget(imageName, localName, secret=True)
counter += 1;
imageNames.append(localName)
cameraNames.append(krtName)
variance = 0.06
vxl_scene = boxm2_scene_adaptor(os.path.join(processing_dir, "scene.xml"),
openclDevice);
current_level = 0;
loaded_imgs = [];
loaded_cams = [];
for i in range(0, len(imageNames), skip_frames):
logger.debug("i: %d img name: %s cam name: %s", i, imageNames[i],
cameraNames[i])
self.update_state(state='PRELOADING', meta={'stage':'image load',
'i':i,
'total':len(imageNames)})
img, ni, nj = load_image(imageNames[i])
loaded_imgs.append(img)
pcam = load_perspective_camera(cameraNames[i])
loaded_cams.append(pcam)
refine_cnt = 5;
for rfk in range(0, refine_cnt, 1):
pair = zip(loaded_imgs, loaded_cams)
random.shuffle(pair)
for idx, (img, cam) in enumerate(pair):
self.update_state(state='PROCESSING', meta={'stage':'update',
'i':rfk+1, 'total':refine_cnt, 'image':idx+1,
'images':len(loaded_imgs)})
logger.debug("refine_cnt: %d, idx: %d", rfk, idx)
vxl_scene.update(cam,img,True,True,None,openclDevice[0:3],variance,
tnear = 1000.0, tfar = 100000.0);
logger.debug("writing cache: %d", rfk)
vxl_scene.write_cache();
logger.debug("wrote cache: %d", rfk)
if rfk < refine_cnt-1:
self.update_state(state='PROCESSING', meta={'stage':'refine',
'i':rfk,
'total':refine_cnt})
logger.debug("refining %d...", rfk)
refine_device = openclDevice[0:3]
if refine_device == 'cpu':
refine_device = 'cpp'
vxl_scene.refine(0.3, refine_device);
vxl_scene.write_cache();
voxel_world_dir = mkdtemp(dir=os.environ['VIP_STORAGE_DIR'])
copytree(processing_dir, voxel_world_dir, ignore=lambda x,y:['images'])
models.VoxelWorld.create(
name='%s world (%s)' % (imageCollection.name, self.request.id),
origin=scene.origin,
directory=voxel_world_dir,
service_id=self.request.id).save();
| mit | 2,948,493,370,364,752,000 | 38.535032 | 83 | 0.544546 | false |
sunlightlabs/tcamp | tcamp/reg/forms.py | 1 | 1825 | from django import forms
from localflavor.us.us_states import STATE_CHOICES
from bootstrap_toolkit.widgets import BootstrapTextInput
import datetime
from reg.models import Sale, Ticket, AMBASSADOR_PROGRAM_CHOICES
class SaleForm(forms.ModelForm):
class Meta:
model = Sale
class TicketForm(forms.ModelForm):
#ambassador_program = forms.ChoiceField(initial="no", widget=forms.RadioSelect, choices=AMBASSADOR_PROGRAM_CHOICES, label="Would you like to be part of the TCamp Ambassador Program?")
class Meta:
model = Ticket
exclude = ['event', 'sale', 'success', 'checked_in', 'lobby_day', 'ambassador_program']
widgets = {
'twitter': BootstrapTextInput(attrs={'placeholder': "e.g., \"tcampdc\""}),
}
_current_year = datetime.datetime.now().year
class PaymentForm(forms.Form):
first_name = forms.CharField(max_length=255)
last_name = forms.CharField(max_length=255)
email = forms.EmailField()
address1 = forms.CharField(max_length=1024, label="Address Line 1")
address2 = forms.CharField(max_length=1024, label="Address Line 2", required=False)
city = forms.CharField(max_length=255)
state = forms.CharField(max_length=255, widget=forms.Select(choices=STATE_CHOICES + (('non-us', 'Outside the USA'),)))
zip = forms.CharField(max_length=255, label="Zip/Postal Code")
exp_month = forms.ChoiceField(initial="01", label="Expiration", choices=(("01","01"),("02","02"),("03","03"),("04","04"),("05","05"),("06","06"),("07","07"),("08","08"),("09","09"),("10","10"),("11","11"),("12","12")))
exp_year = forms.ChoiceField(initial="2014", label="Year", choices=tuple([2*(str(_current_year + i),) for i in xrange(11)]))
# will be encrypted
number = forms.CharField(max_length=4096)
cvv = forms.CharField(max_length=4096) | bsd-3-clause | 5,021,559,516,469,790,000 | 48.351351 | 222 | 0.673973 | false |
ewheeler/vaxtrack | vaxapp/migrations/0018_auto__add_field_countrystockstats_days_of_stock_data.py | 1 | 12586 | # encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'CountryStockStats.days_of_stock_data'
db.add_column('vaxapp_countrystockstats', 'days_of_stock_data', self.gf('django.db.models.fields.related.ForeignKey')(blank=True, related_name='days_of_stock_data', null=True, to=orm['vaxapp.Dicty']), keep_default=False)
def backwards(self, orm):
# Deleting field 'CountryStockStats.days_of_stock_data'
db.delete_column('vaxapp_countrystockstats', 'days_of_stock_data_id')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'vaxapp.alert': {
'Meta': {'object_name': 'Alert'},
'analyzed': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'countrystock': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['vaxapp.CountryStock']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'reference_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'risk': ('django.db.models.fields.CharField', [], {'default': "'U'", 'max_length': '2', 'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.CharField', [], {'default': "'U'", 'max_length': '2', 'null': 'True', 'blank': 'True'}),
'text': ('django.db.models.fields.CharField', [], {'default': "'U'", 'max_length': '2', 'null': 'True', 'blank': 'True'})
},
'vaxapp.country': {
'Meta': {'object_name': 'Country'},
'iso2_code': ('django.db.models.fields.CharField', [], {'max_length': '2', 'primary_key': 'True'}),
'iso3_code': ('django.db.models.fields.CharField', [], {'max_length': '3', 'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '160', 'null': 'True', 'blank': 'True'}),
'name_fr': ('django.db.models.fields.CharField', [], {'max_length': '160', 'null': 'True', 'blank': 'True'}),
'numerical_code': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'printable_name': ('django.db.models.fields.CharField', [], {'max_length': '80'})
},
'vaxapp.countrystock': {
'Meta': {'object_name': 'CountryStock'},
'country': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['vaxapp.Country']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'md5_hash': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'vaccine': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['vaxapp.Vaccine']"})
},
'vaxapp.countrystockstats': {
'Meta': {'object_name': 'CountryStockStats'},
'actual_cons_rate': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'actual_cons_rate'", 'null': 'True', 'to': "orm['vaxapp.Dicty']"}),
'analyzed': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'annual_demand': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'annual_demand'", 'null': 'True', 'to': "orm['vaxapp.Dicty']"}),
'consumed_in_year': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'consumed_in_year'", 'null': 'True', 'to': "orm['vaxapp.Dicty']"}),
'countrystock': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['vaxapp.CountryStock']"}),
'days_of_stock': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'days_of_stock_data': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'days_of_stock_data'", 'null': 'True', 'to': "orm['vaxapp.Dicty']"}),
'demand_for_period': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'doses_delivered_this_year': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'doses_on_orders': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'est_daily_cons': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'nine_by_year': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'nine_by_year'", 'null': 'True', 'to': "orm['vaxapp.Dicty']"}),
'percent_coverage': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'reference_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'three_by_year': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'three_by_year'", 'null': 'True', 'to': "orm['vaxapp.Dicty']"})
},
'vaxapp.dicty': {
'Meta': {'object_name': 'Dicty'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '160'})
},
'vaxapp.document': {
'Meta': {'object_name': 'Document'},
'date_created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.utcnow'}),
'date_exception': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'date_process_end': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'date_process_start': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'date_queued': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'date_stored': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'date_uploaded': ('django.db.models.fields.DateTimeField', [], {}),
'exception': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'local_document': ('django.db.models.fields.files.FileField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'remote_document': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.CharField', [], {'default': "'U'", 'max_length': '1'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}),
'uuid': ('django.db.models.fields.CharField', [], {'max_length': '36'})
},
'vaxapp.keyval': {
'Meta': {'object_name': 'KeyVal'},
'dicty': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['vaxapp.Dicty']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '160'}),
'val': ('django.db.models.fields.CharField', [], {'max_length': '160', 'null': 'True', 'blank': 'True'})
},
'vaxapp.userprofile': {
'Meta': {'object_name': 'UserProfile'},
'country': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['vaxapp.Country']", 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'vaxapp.vaccine': {
'Meta': {'object_name': 'Vaccine'},
'abbr_en': ('django.db.models.fields.CharField', [], {'max_length': '30', 'null': 'True', 'blank': 'True'}),
'abbr_en_alt': ('django.db.models.fields.CharField', [], {'max_length': '30', 'null': 'True', 'blank': 'True'}),
'abbr_fr': ('django.db.models.fields.CharField', [], {'max_length': '30', 'null': 'True', 'blank': 'True'}),
'abbr_fr_alt': ('django.db.models.fields.CharField', [], {'max_length': '30', 'null': 'True', 'blank': 'True'}),
'group': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['vaxapp.VaccineGroup']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '160', 'null': 'True', 'blank': 'True'}),
'slug': ('django.db.models.fields.CharField', [], {'max_length': '160', 'null': 'True', 'blank': 'True'})
},
'vaxapp.vaccinegroup': {
'Meta': {'object_name': 'VaccineGroup'},
'abbr_en': ('django.db.models.fields.CharField', [], {'max_length': '160', 'null': 'True', 'blank': 'True'}),
'abbr_fr': ('django.db.models.fields.CharField', [], {'max_length': '160', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
}
}
complete_apps = ['vaxapp']
| bsd-3-clause | -6,147,009,584,548,764,000 | 78.658228 | 228 | 0.546719 | false |
OpenMOOC/moocng | moocng/api/urls.py | 1 | 2050 | # -*- coding: utf-8 -*-
# Copyright 2012-2013 UNED
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from django.conf.urls import include, patterns, url
from tastypie.api import Api
from moocng.api import resources
v1_api = Api(api_name='v1')
v1_api.register(resources.UnitResource())
v1_api.register(resources.KnowledgeQuantumResource())
v1_api.register(resources.PrivateKnowledgeQuantumResource())
v1_api.register(resources.AttachmentResource())
v1_api.register(resources.QuestionResource())
v1_api.register(resources.PrivateQuestionResource())
v1_api.register(resources.OptionResource())
v1_api.register(resources.AnswerResource())
v1_api.register(resources.ActivityResource())
v1_api.register(resources.CourseResource())
v1_api.register(resources.UserResource())
v1_api.register(resources.PeerReviewAssignmentResource())
v1_api.register(resources.PrivatePeerReviewAssignmentResource())
v1_api.register(resources.EvaluationCriterionResource())
v1_api.register(resources.PrivateEvaluationCriterionResource())
v1_api.register(resources.PeerReviewSubmissionsResource())
v1_api.register(resources.PeerReviewReviewsResource())
v1_api.register(resources.AssetResource())
v1_api.register(resources.AssetAvailabilityResource())
v1_api.register(resources.ReservationResource())
v1_api.register(resources.PrivateAssetResource())
v1_api.register(resources.PrivateAssetAvailabilityResource())
v1_api.register(resources.ReservationCount())
v1_api.register(resources.OccupationInformation())
urlpatterns = patterns(
'',
url(r'', include(v1_api.urls))
)
| apache-2.0 | -2,709,888,307,165,766,000 | 39.196078 | 74 | 0.801463 | false |
georgekis/salary | main/control/pay.py | 1 | 2523 | from flask.ext import wtf
import auth
import flask
import model
import wtforms
import util
from main import app
###############################################################################
# Create
###############################################################################
class PayUpdateForm(wtf.Form):
name = wtforms.StringField('Name', [wtforms.validators.required()])
date_for = wtforms.DateField('Date For', [wtforms.validators.required()])
date_paid = wtforms.DateField('Date Paid', [wtforms.validators.required()])
code = wtforms.StringField('Code', [wtforms.validators.required()])
amount = wtforms.FloatField('Amount', [wtforms.validators.required()])
add_more = wtforms.BooleanField('Add more', [wtforms.validators.optional()], default=True)
@app.route('/pay/<int:pay_id>/', methods=['GET', 'POST'])
@app.route('/pay/create/', methods=['GET', 'POST'])
@auth.login_required
def pay_update(pay_id=0):
if pay_id:
pay_db = model.Pay.get_by_id(pay_id, parent=auth.current_user_key())
else:
pay_db = model.Pay(parent=auth.current_user_key())
if not pay_db:
flask.abort(404)
form = PayUpdateForm(obj=pay_db)
if form.validate_on_submit():
form.populate_obj(pay_db)
pay_db.put()
if form.add_more.data:
return flask.redirect(flask.url_for('pay_update'))
return flask.redirect(flask.url_for('pay_list'))
return flask.render_template(
'pay/pay_update.html',
html_class='pay-update',
title=pay_db.name or 'Create Pay',
form=form,
pay_db=pay_db,
)
###############################################################################
# List
###############################################################################
@app.route('/pay/')
@auth.login_required
def pay_list():
pay_dbs, pay_cursor = auth.current_user_db().get_pay_dbs()
return flask.render_template(
'pay/pay_list.html',
html_class='pay-list',
title='Pay List',
pay_dbs=pay_dbs,
next_url=util.generate_next_url(pay_cursor),
)
###############################################################################
# Admin Pay List
###############################################################################
@app.route('/admin/pay/')
@auth.admin_required
def admin_pay_list():
pay_dbs, pay_cursor = model.Pay.get_dbs()
return flask.render_template(
'admin/pay_list.html',
html_class='admin-pay-list',
title='Pay List',
pay_dbs=pay_dbs,
next_url=util.generate_next_url(pay_cursor),
)
| mit | 1,797,666,842,330,151,400 | 30.148148 | 92 | 0.537852 | false |
postlund/pyatv | tests/scripts/script_env.py | 1 | 4004 | """Simulated environment for functional script testing."""
from contextlib import contextmanager
from importlib import import_module
from io import StringIO
import sys
from unittest.mock import patch
from aiohttp.test_utils import AioHTTPTestCase
import pyatv
from pyatv.const import Protocol
from tests import fake_udns
from tests.fake_device import FakeAppleTV
from tests.utils import faketime, stub_sleep, unstub_sleep
IP_1 = "10.0.0.1"
IP_2 = "127.0.0.1"
DMAP_ID = "dmapid"
MRP_ID = "mrp_id"
AIRPLAY_ID = "AA:BB:CC:DD:EE:FF"
@contextmanager
def capture_output(argv, inputs):
new_out, new_err, new_in = StringIO(), StringIO(), StringIO(inputs)
old_out, old_err, old_in = sys.stdout, sys.stderr, sys.stdin
old_argv = sys.argv
try:
sys.stdout, sys.stderr, sys.stdin = new_out, new_err, new_in
sys.argv = argv
yield sys.stdout, sys.stderr
finally:
sys.stdout, sys.stderr, sys.stdin = old_out, old_err, old_in
sys.argv = old_argv
class ScriptTest(AioHTTPTestCase):
async def setUpAsync(self):
await AioHTTPTestCase.setUpAsync(self)
stub_sleep()
self.setup_environment()
await self.fake_udns.start()
self.stdout = None
self.stderr = None
self.retcode = None
self.inputs = []
def tearDown(self):
unstub_sleep()
AioHTTPTestCase.tearDown(self)
def setup_environment(self):
airplay_port = self.server.port
self.fake_udns.add_service(
fake_udns.homesharing_service(
DMAP_ID, "Apple TV 1", "aaaa", addresses=[IP_1]
)
)
self.fake_udns.add_service(
fake_udns.mrp_service(
"DDDD",
"Apple TV 2",
MRP_ID,
addresses=[IP_2],
port=self.fake_atv.get_port(Protocol.MRP),
)
)
self.fake_udns.add_service(
fake_udns.airplay_service(
"Apple TV 2", AIRPLAY_ID, addresses=[IP_2], port=airplay_port
)
)
self.airplay_usecase.airplay_playback_playing()
self.airplay_usecase.airplay_playback_idle()
async def get_application(self, loop=None):
self.fake_udns = fake_udns.FakeUdns(self.loop)
self.fake_udns.ip_filter = IP_2
self.fake_atv = FakeAppleTV(self.loop)
self.state, self.usecase = self.fake_atv.add_service(Protocol.MRP)
self.airplay_state, self.airplay_usecase = self.fake_atv.add_service(
Protocol.AirPlay
)
return self.fake_atv.app
def user_input(self, text):
self.inputs.append(text)
def has_output(self, *strings):
for string in strings:
self.assertIn(string, self.stdout)
def has_error(self, *strings):
for string in strings:
self.assertIn(string, self.stderr)
def exit(self, code):
self.assertEqual(self.retcode, code)
async def run_script(self, script, *args):
argv = [script] + list(args)
inputs = "\n".join(self.inputs) + "\n"
with capture_output(argv, inputs) as (out, err):
udns_port = str(self.fake_udns.port)
with patch.dict("os.environ", {"PYATV_UDNS_PORT": udns_port}):
with fake_udns.stub_multicast(self.fake_udns, self.loop):
with faketime("pyatv", 0):
# Stub away port knocking and ignore result (not tested here)
with patch("pyatv.support.knock.knock") as mock_knock:
async def _no_action(*args):
pass
mock_knock.side_effect = _no_action
module = import_module(f"pyatv.scripts.{script}")
self.retcode = await module.appstart(self.loop)
self.stdout = out.getvalue()
self.stderr = err.getvalue()
| mit | 2,771,662,040,255,258,600 | 31.290323 | 85 | 0.57992 | false |
goujonpa/jeankevin | modules/numberCoupleClass.py | 1 | 2707 | #!/usr/local/bin/python
# -*-coding:Utf-8 -*
from modules.individualClass import Individual
import random
class NumberCouple(Individual):
"""NumberCouple class: represent one couple of real individual, inherits from the Individual class
Properties:
key : standardized representation of the problem [[x1, 'real'][x2, 'real']]
fitness : = 1/1+f(x) with f(x) = 100(x2 - x1^2)^2 + (x1 - 1)^2
+ every property from the Individual class
Methods:
__init__()
get_binary_standard()
get_real_standard()
get_binary_unstandardized()
get_real_unstandardized
_calcul_fitness()
_random_initialisation()
+ every method from the Individual Class
"""
def __init__(self, key=None):
"""Class constuctor"""
super(NumberCouple, self).__init__(key)
def _random_initialisation(self):
"""Randomly initialises an individual, Returns a random key"""
key = list()
for i in range(0, 2):
x = random.uniform(-2.048, 2.048)
key.append((x, 'real'))
return key
def _calcul_fitness(self):
"""Calculates the individuals fitness"""
x1, x2 = self._key
x1 = x1[0]
x2 = x2[0]
functionResult = 100 * pow((x2 - pow(x1, 2)), 2) + pow((x1 - 1), 2)
fitness = 1.0 / (1 + functionResult)
return fitness
def get_binary_standard(self):
"""Returns the standardised representation of the key for binary manipulations"""
x1, x2 = self.key
x1 = 1000 * x1[0]
x2 = 1000 * x2[0]
result = list()
result.append((self._binarize(x1, 12), 15, 3, 14))
result.append((self._binarize(x2, 12), 15, 3, 14))
return result
def get_real_standard(self):
"""Returns the standardised representation of the key for real manipulations"""
x1, x2 = self.key
x1 = 1000 * x1[0]
x2 = 1000 * x2[0]
result = list()
result.append((self._realize(x1, 12), 13, 9, 12))
result.append((self._realize(x2, 12), 13, 9, 12))
return result
@staticmethod
def get_binary_unstandardized(l):
"""Returns the unstandardisation of a standardised binary representation of the key"""
key = list()
for element in l:
a = int(element, 2)
a = a / 1000.0
key.append((a, 'real'))
return key
@staticmethod
def get_real_unstandardized(l):
"""Returns the unstandardisation of a real binary representation of the key"""
key = list()
for element in l:
a = int(element)
a = a / 1000.0
key.append((a, 'real'))
return key
| mit | -5,521,277,182,482,253,000 | 30.847059 | 102 | 0.574437 | false |
makerhanoi/tagio | tagio/views/api/__init__.py | 1 | 1249 | """API."""
from flask import Blueprint, jsonify, request
from tagio.models.user import User
from tagio.extensions import csrf_protect
from . import user
__all__ = ('user',)
blueprint = Blueprint('api',
__name__,
url_prefix='/api/v<string:version>')
@blueprint.route('/login', methods=['POST'])
@csrf_protect.exempt
def login(version):
"""Login.
login to retrieve token.
"""
if version == '1':
return _login_first_version()
return jsonify({'code': 1, 'msg': 'Invalid version'})
def _login_first_version():
username = request.form.get('username')
password = request.form.get('password')
if username is None or password is None:
return jsonify({'code': 2, 'msg': 'Invalid parameter'})
username = username.strip().lower()
obj = User.query.filter(User.username == username).first()
if obj is None:
return jsonify({'code': 2, 'msg': 'Invalid parameter'})
flag = obj.check_password(password)
if not flag:
return jsonify({'code': 2, 'msg': 'Invalid parameter'})
if not obj.active:
return jsonify({'code': 2, 'msg': 'Invalid parameter'})
return jsonify({'code': 0, 'token': obj.get_auth_token()})
| bsd-3-clause | 8,983,722,349,411,378,000 | 23.98 | 63 | 0.610088 | false |
oryxr/dxf2gcode | Core/Config.py | 1 | 14468 | # -*- coding: utf-8 -*-
############################################################################
#
# Copyright (C) 2009-2014
# Christian Kohlöffel
# Jean-Paul Schouwstra
#
# This file is part of DXF2GCODE.
#
# DXF2GCODE is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# DXF2GCODE is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with DXF2GCODE. If not, see <http://www.gnu.org/licenses/>.
#
############################################################################
import os
from Core.configobj import ConfigObj, flatten_errors
from Core.validate import Validator
#from dotdictlookup import DictDotLookup
#import time
import pprint
import Core.constants as c
import Core.Globals as g
from d2gexceptions import *
from PyQt4 import QtGui, QtCore
import logging
logger = logging.getLogger("Core.Config")
CONFIG_VERSION = "9.2"
"""
version tag - increment this each time you edit CONFIG_SPEC
compared to version number in config file so
old versions are recognized and skipped"
"""
CONFIG_SPEC = str('''
# Section and variable names must be valid Python identifiers
# do not use whitespace in names
# do not edit the following section name:
[Version]
# do not edit the following value:
config_version = string(default = "''' + \
str(CONFIG_VERSION) + '")\n' + \
'''
[Paths]
# by default look for DXF files in
import_dir = string(default = "D:/Eclipse_Workspace/DXF2GCODE/trunk/dxf")
# export generated gcode by default to
output_dir = string(default = "D:")
[Depth_Coordinates]
axis3_retract = float(default = 15.0)
axis3_slice_depth = float(default = -1.5)
axis3_safe_margin = float(default = 3.0)
axis3_start_mill_depth = float(default = 0.0)
axis3_mill_depth = float(default = -3.0)
[Axis_letters]
ax1_letter = string(default = "X")
ax2_letter = string(default = "Y")
ax3_letter = string(default = "Z")
[Plane_Coordinates]
axis1_start_end = float(default = 0)
axis2_start_end = float(default = 0)
[General]
write_to_stdout = boolean(default = False)
live_update_export_route = boolean(default = False)
default_SplitEdges = boolean(default = False)
default_AutomaticCutterCompensation = boolean(default = False)
machine_type = option('milling', 'drag_knife', 'lathe', default = 'milling')
[Drag_Knife_Options]
dragAngle = float(default = 20)
[Route_Optimisation]
default_TSP = boolean(default = False)
# Path optimizer behaviour:
# CONSTRAIN_ORDER_ONLY: fixed Shapes and optimized Shapes can be mixed. Only order of fixed shapes is kept
# CONSTRAIN_PLACE_AFTER: optimized Shapes are always placed after any fixed Shape
TSP_shape_order = option('CONSTRAIN_ORDER_ONLY', 'CONSTRAIN_PLACE_AFTER', default = 'CONSTRAIN_ORDER_ONLY')
mutation_rate = float(default = 0.95)
max_population = integer(default = 20)
max_iterations = integer(default = 300)
begin_art = string(default = "heurestic")
[Import_Parameters]
point_tolerance = float(default = 0.001)
spline_check = integer(default = 3)
fitting_tolerance = float(default = 0.001)
[Layer_Options]
idfloatseparator = string(default = ":")
# mill options
mill_depth_identifiers = list(default = list('MillDepth', 'Md', 'TiefeGesamt', 'Tg'))
slice_depth_identifiers = list(default = list('SliceDepth', 'Sd', 'TiefeZustellung', 'Tz'))
start_mill_depth_identifiers = list(default = list('StartMillDepth', 'SMd', 'StartTiefe', 'St'))
retract_identifiers = list(default = list('RetractHeight', 'Rh', 'FreifahrHohe', 'FFh'))
safe_margin_identifiers = list(default = list('SafeMargin', 'Sm', 'SicherheitsHoehe', 'Sh'))
f_g1_plane_identifiers = list(default = list('FeedXY', 'Fxy', 'VorschubXY', 'Vxy', 'F'))
f_g1_depth_identifiers = list(default = list('FeedZ', 'Fz', 'VorschubZ', 'Vz'))
#tool options
tool_nr_identifiers = list(default = list('ToolNr', 'Tn', 'T', 'WerkzeugNummer', 'Wn'))
tool_diameter_identifiers = list(default = list('ToolDiameter', 'Td', 'WerkzeugDurchmesser', 'Wd'))
spindle_speed_identifiers = list(default = list('SpindleSpeed', 'Drehzahl', 'RPM', 'UPM', 'S'))
start_radius_identifiers = list(default = list('StartRadius', 'Sr'))
[Tool_Parameters]
[[1]]
diameter = float(default = 1.0)
speed = float(default = 1000)
start_radius = float(default = 0.2)
[[4]]
diameter = float(default = 1.0)
speed = float(default = 1500)
start_radius = float(default = 0.2)
[[10]]
diameter = float(default = 1.0)
speed = float(default = 1000)
start_radius = float(default = 0.2)
[[15]]
diameter = float(default = 1.0)
speed = float(default = 900)
start_radius = float(default = 0.2)
[[20]]
diameter = float(default = 1.0)
speed = float(default = 800.0)
start_radius = float(default = 1.0)
[[30]]
diameter = float(default = 1.0)
speed = float(default = 700.0)
start_radius = float(default = 2.0)
[[__many__]]
diameter = float(default = 3.0)
speed = float(default = 6000)
start_radius = float(default = 3.0)
[Custom_Actions]
[[custom_gcode]]
gcode = string(default = '"""(change subsection name and insert your custom GCode here. Use triple quote to place the code on several lines)"""')
[[__many__]]
gcode = string(default = "(change subsection name and insert your custom GCode here. Use triple quote to place the code on several lines)")
[Filters]
pstoedit_cmd = string(default = "C:\Program Files (x86)\pstoedit\pstoedit.exe")
pstoedit_opt = list(default = list('-f', 'dxf', '-mm'))
[Logging]
# Logging to textfile is enabled automatically for now
logfile = string(default = "logfile.txt")
# log levels are one in increasing importance:
# DEBUG INFO WARNING ERROR CRITICAL
# log events with importance >= loglevel are logged to the
# corresponding output
# this really goes to stderr
console_loglevel = option('DEBUG', 'INFO', 'WARNING', 'ERROR','CRITICAL', default = 'CRITICAL')
file_loglevel = option('DEBUG', 'INFO', 'WARNING', 'ERROR','CRITICAL', default = 'DEBUG')
# logging level for the message window
window_loglevel = option('DEBUG', 'INFO', 'WARNING', 'ERROR','CRITICAL', default = 'INFO')
[Feed_Rates]
f_g1_plane = float(default = 400)
f_g1_depth = float(default = 150)
''').splitlines()
""" format, type and default value specification of the global config file"""
class MyConfig(QtCore.QObject):
"""
This class hosts all functions related to the Config File.
"""
def __init__(self):
"""
initialize the varspace of an existing plugin instance
init_varspace() is a superclass method of plugin
"""
self.folder = os.path.join(g.folder, c.DEFAULT_CONFIG_DIR)
self.filename = os.path.join(self.folder, 'config' + c.CONFIG_EXTENSION)
self.default_config = False # whether a new name was generated
self.var_dict = dict()
self.spec = ConfigObj(CONFIG_SPEC, interpolation=False, list_values=False, _inspec=True)
#try:
self.load_config()
self.machine_type = self.vars.General['machine_type']
self.fitting_tolerance = self.vars.Import_Parameters['fitting_tolerance']
self.point_tolerance = self.vars.Import_Parameters['point_tolerance']
#except Exception, msg:
# logger.warning(self.tr("Config loading failed: %s") % (msg))
# return False
def tr(self, string_to_translate):
"""
Translate a string using the QCoreApplication translation framework
@param: string_to_translate: a unicode string
@return: the translated unicode string if it was possible to translate
"""
return unicode(QtGui.QApplication.translate("MyConfig",
string_to_translate,
None,
QtGui.QApplication.UnicodeUTF8))
def make_settings_folder(self):
"""Create settings folder if necessary"""
try:
os.mkdir(self.folder)
except OSError:
pass
def load_config(self):
"""Load Config File"""
if os.path.isfile(self.filename):
try:
# file exists, read & validate it
self.var_dict = ConfigObj(self.filename, configspec=CONFIG_SPEC)
_vdt = Validator()
result = self.var_dict.validate(_vdt, preserve_errors=True)
validate_errors = flatten_errors(self.var_dict, result)
if validate_errors:
logger.error(self.tr("errors reading %s:") % (self.filename))
for entry in validate_errors:
section_list, key, error = entry
if key is not None:
section_list.append(key)
else:
section_list.append('[missing section]')
section_string = ', '.join(section_list)
if error == False:
error = self.tr('Missing value or section.')
logger.error( section_string + ' = ' + error)
if validate_errors:
raise BadConfigFileError,"syntax errors in config file"
# check config file version against internal version
if CONFIG_VERSION:
fileversion = self.var_dict['Version']['config_version'] # this could raise KeyError
if fileversion != CONFIG_VERSION:
raise VersionMismatchError, (fileversion, CONFIG_VERSION)
except VersionMismatchError, values:
raise VersionMismatchError, (fileversion, CONFIG_VERSION)
except Exception, inst:
logger.error(inst)
(base, ext) = os.path.splitext(self.filename)
badfilename = base + c.BAD_CONFIG_EXTENSION
logger.debug(self.tr("trying to rename bad cfg %s to %s") % (self.filename, badfilename))
try:
os.rename(self.filename, badfilename)
except OSError, e:
logger.error(self.tr("rename(%s,%s) failed: %s") % (self.filename, badfilename, e.strerror))
raise
else:
logger.debug(self.tr("renamed bad varspace %s to '%s'") % (self.filename, badfilename))
self.create_default_config()
self.default_config = True
logger.debug(self.tr("created default varspace '%s'") % (self.filename))
else:
self.default_config = False
#logger.debug(self.dir())
#logger.debug(self.tr("created default varspace '%s'") %(self.filename))
#logger.debug(self.tr("read existing varspace '%s'") %(self.filename))
else:
self.create_default_config()
self.default_config = True
logger.debug(self.tr("created default varspace '%s'") % (self.filename))
# convenience - flatten nested config dict to access it via self.config.sectionname.varname
self.var_dict.main.interpolation = False # avoid ConfigObj getting too clever
self.vars = DictDotLookup(self.var_dict)
def create_default_config(self):
#check for existing setting folder or create one
self.make_settings_folder()
# derive config file with defaults from spec
self.var_dict = ConfigObj(configspec=CONFIG_SPEC)
_vdt = Validator()
self.var_dict.validate(_vdt, copy=True)
self.var_dict.filename = self.filename
self.var_dict.write()
def _save_varspace(self):
"""Saves Variables space"""
self.var_dict.filename = self.filename
self.var_dict.write()
def print_vars(self):
"""Prints Variables"""
print "Variables:"
for k, v in self.var_dict['Variables'].items():
print k, " = ", v
class DictDotLookup(object):
"""
Creates objects that behave much like a dictionaries, but allow nested
key access using object '.' (dot) lookups.
"""
def __init__(self, d):
for k in d:
if isinstance(d[k], dict):
self.__dict__[k] = DictDotLookup(d[k])
elif isinstance(d[k], (list, tuple)):
l = []
for v in d[k]:
if isinstance(v, dict):
l.append(DictDotLookup(v))
else:
l.append(v)
self.__dict__[k] = l
else:
self.__dict__[k] = d[k]
def __getitem__(self, name):
if name in self.__dict__:
return self.__dict__[name]
def __iter__(self):
return iter(self.__dict__.keys())
def __repr__(self):
return pprint.pformat(self.__dict__)
#if __name__ == '__main__':
# cfg_data = eval("""{
# 'foo' : {
# 'bar' : {
# 'tdata' : (
# {'baz' : 1 },
# {'baz' : 2 },
# {'baz' : 3 },
# ),
# },
# },
# 'quux' : False,
# }""")
#
# cfg = DictDotLookup(cfg_data)
#
# # iterate
# for k, v in cfg.__iter__(): #foo.bar.iteritems():
# print k, " = ", v
#
# print "cfg=", cfg
#
# # Standard nested dictionary lookup.
# print 'normal lookup :', cfg['foo']['bar']['tdata'][0]['baz']
#
# # Dot-style nested lookup.
# print 'dot lookup :', cfg.foo.bar.tdata[0].baz
#
# print "qux=", cfg.quux
# cfg.quux = '123'
# print "qux=", cfg.quux
#
# del cfg.foo.bar
# cfg.foo.bar = 4711
# print 'dot lookup :', cfg.foo.bar #.tdata[0].baz
| gpl-3.0 | 5,142,544,186,670,711,000 | 34.545455 | 149 | 0.585609 | false |
EdDev/vdsm | tests/storage_check_test.py | 1 | 15159 | #
# Copyright 2016 Red Hat, Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#
# Refer to the README and COPYING files for full details of the license
#
from __future__ import print_function
import logging
import os
import pprint
import re
import threading
import time
from contextlib import contextmanager
from fakelib import FakeLogger
from monkeypatch import MonkeyPatch
from monkeypatch import MonkeyPatchScope
from testValidation import slowtest
from testlib import VdsmTestCase
from testlib import expandPermutations, permutations
from testlib import start_thread
from testlib import temporaryPath
from vdsm import concurrent
from vdsm import constants
from vdsm.storage import check
from vdsm.storage import asyncevent
from vdsm.storage import exception
@expandPermutations
class TestDirectioChecker(VdsmTestCase):
def setUp(self):
self.loop = asyncevent.EventLoop()
self.results = []
self.checks = 1
def tearDown(self):
self.loop.close()
def complete(self, result):
self.results.append(result)
if len(self.results) == self.checks:
self.loop.stop()
def test_path_missing(self):
self.checks = 1
checker = check.DirectioChecker(self.loop, "/no/such/path",
self.complete)
checker.start()
self.loop.run_forever()
pprint.pprint(self.results)
result = self.results[0]
self.assertRaises(exception.MiscFileReadException, result.delay)
def test_path_ok(self):
self.checks = 1
with temporaryPath(data=b"blah") as path:
checker = check.DirectioChecker(self.loop, path, self.complete)
checker.start()
self.loop.run_forever()
pprint.pprint(self.results)
result = self.results[0]
delay = result.delay()
print("delay:", delay)
self.assertEqual(type(delay), float)
@MonkeyPatch(constants, "EXT_DD", "/no/such/executable")
def test_executable_missing(self):
self.checks = 1
with temporaryPath(data=b"blah") as path:
checker = check.DirectioChecker(self.loop, path, self.complete)
checker.start()
self.loop.run_forever()
pprint.pprint(self.results)
result = self.results[0]
self.assertRaises(exception.MiscFileReadException, result.delay)
@MonkeyPatch(constants, "EXT_TASKSET", "/no/such/executable")
def test_taskset_missing(self):
self.checks = 1
with temporaryPath(data=b"blah") as path:
checker = check.DirectioChecker(self.loop, path, self.complete)
checker.start()
self.loop.run_forever()
pprint.pprint(self.results)
result = self.results[0]
self.assertRaises(exception.MiscFileReadException, result.delay)
@slowtest
@permutations([
# interval, delay, expected
(0.20, 0.10, 0.20),
(0.10, 0.12, 0.20),
])
def test_interval(self, interval, delay, expected):
self.checks = 5
clock_res = 0.01
with fake_dd(delay):
checker = check.DirectioChecker(self.loop, "/path", self.complete,
interval=interval)
checker.start()
self.loop.run_forever()
pprint.pprint(self.results)
for i in range(self.checks - 1):
r1 = self.results[i]
r2 = self.results[i + 1]
actual = r2.time - r1.time
self.assertAlmostEqual(actual, expected, delta=clock_res)
@MonkeyPatch(check, "_log", FakeLogger(logging.WARNING))
def test_block_warnings(self):
self.checks = 1
with fake_dd(0.3):
checker = check.DirectioChecker(self.loop, "/path", self.complete,
interval=0.2)
checker.start()
self.loop.run_forever()
msg = check._log.messages[0][1]
# Matching time value is too fragile
r = re.compile(r"Checker '/path' is blocked for .+ seconds")
self.assertRegexpMatches(msg, r)
# In the idle state the checker is not running so there is nothing to
# cleanup.
def test_idle_stop_ignored(self):
checker = check.DirectioChecker(self.loop, "/path", self.complete)
checker.stop() # Will be ignored
self.assertFalse(checker.is_running())
def test_idle_repr(self):
checker = check.DirectioChecker(self.loop, "/path", self.complete)
print(checker)
self.assertIn("/path", str(checker))
self.assertIn(check.IDLE, str(checker))
self.assertNotIn("next_check=", str(checker))
# In the running state, the checker complete callback will stop the event
# loop. We need to run the loop until it is stopped.
def test_running_start_raises(self):
checker = check.DirectioChecker(self.loop, "/path", self.complete)
checker.start()
try:
self.assertRaises(RuntimeError, checker.start)
finally:
self.loop.run_forever()
def test_running_repr(self):
checker = check.DirectioChecker(self.loop, "/path", self.complete)
checker.start()
try:
print(checker)
self.assertIn("/path", str(checker))
self.assertIn(check.RUNNING, str(checker))
self.assertIn("next_check=", str(checker))
finally:
self.loop.run_forever()
# In the stopping state, the checker will not call the complete callback.
# We need to wait on the checker and stop the loop when it completes.
def test_stopping_stop_ignored(self):
checker = check.DirectioChecker(self.loop, "/path", self.complete)
checker.start()
try:
checker.stop()
checker.stop() # Will be ignored
self.assertTrue(checker.is_running())
finally:
start_thread(self.wait_for_checker, checker)
self.loop.run_forever()
def test_stopping_start_raises(self):
checker = check.DirectioChecker(self.loop, "/path", self.complete)
checker.start()
try:
checker.stop()
self.assertRaises(RuntimeError, checker.start)
finally:
start_thread(self.wait_for_checker, checker)
self.loop.run_forever()
def test_stopping_repr(self):
checker = check.DirectioChecker(self.loop, "/path", self.complete)
checker.start()
try:
checker.stop()
print(checker)
self.assertIn("/path", str(checker))
self.assertIn(check.STOPPING, str(checker))
self.assertNotIn("next_check=", str(checker))
finally:
start_thread(self.wait_for_checker, checker)
self.loop.run_forever()
def wait_for_checker(self, checker):
checker.wait(5)
self.loop.call_soon_threadsafe(self.loop.stop)
@expandPermutations
class TestDirectioCheckerWaiting(VdsmTestCase):
def setUp(self):
self.loop = asyncevent.EventLoop()
self.thread = concurrent.thread(self.loop.run_forever)
self.thread.start()
self.completed = threading.Event()
def tearDown(self):
self.loop.call_soon_threadsafe(self.loop.stop)
self.thread.join()
self.loop.close()
def complete(self, result):
self.completed.set()
def test_running_stop_during_wait(self):
checker = check.DirectioChecker(self.loop, "/path", self.complete)
self.loop.call_soon_threadsafe(checker.start)
self.assertTrue(self.completed.wait(1.0))
self.loop.call_soon_threadsafe(checker.stop)
self.assertTrue(checker.wait(1.0))
self.assertFalse(checker.is_running())
@slowtest
def test_running_stop_during_check(self):
with fake_dd(0.2):
checker = check.DirectioChecker(self.loop, "/path", self.complete)
self.loop.call_soon_threadsafe(checker.start)
self.loop.call_soon_threadsafe(checker.stop)
self.assertTrue(checker.wait(1.0))
self.assertFalse(self.completed.is_set())
self.assertFalse(checker.is_running())
@slowtest
def test_stopping_timeout(self):
with fake_dd(0.2):
checker = check.DirectioChecker(self.loop, "/path", self.complete)
self.loop.call_soon_threadsafe(checker.start)
self.loop.call_soon_threadsafe(checker.stop)
self.assertFalse(checker.wait(0.1))
self.assertTrue(checker.is_running())
@expandPermutations
class TestDirectioCheckerTimings(VdsmTestCase):
def setUp(self):
self.loop = asyncevent.EventLoop()
self.results = []
def tearDown(self):
self.loop.close()
def complete(self, result):
self.results.append(result)
if len(self.results) == self.checkers:
self.loop.stop()
@slowtest
@permutations([[1], [50], [100], [200]])
def test_path_ok(self, checkers):
self.checkers = checkers
with temporaryPath(data=b"blah") as path:
start = time.time()
for i in range(checkers):
checker = check.DirectioChecker(self.loop, path, self.complete)
checker.start()
self.loop.run_forever()
elapsed = time.time() - start
self.assertEqual(len(self.results), self.checkers)
print("%d checkers: %f seconds" % (checkers, elapsed))
# Make sure all succeeded
for res in self.results:
res.delay()
@slowtest
@permutations([[1], [50], [100], [200]])
def test_path_missing(self, checkers):
self.checkers = checkers
start = time.time()
for i in range(checkers):
checker = check.DirectioChecker(self.loop, "/no/such/path",
self.complete)
checker.start()
self.loop.run_forever()
elapsed = time.time() - start
self.assertEqual(len(self.results), self.checkers)
print("%d checkers: %f seconds" % (checkers, elapsed))
# Make sure all failed
for res in self.results:
self.assertRaises(exception.MiscFileReadException, res.delay)
@expandPermutations
class TestCheckResult(VdsmTestCase):
@permutations([
# err, seconds
(b"1\n2\n1 byte (1 B) copied, 1 s, 1 B/s\n",
1.0),
(b"1\n2\n1024 bytes (1 kB) copied, 1 s, 1 kB/s\n",
1.0),
(b"1\n2\n1572864 bytes (1.5 MB) copied, 1.5 s, 1 MB/s\n",
1.5),
(b"1\n2\n1610612736 bytes (1.5 GB) copied, 1000.5 s, 1.53 MB/s\n",
1000.5),
(b"1\n2\n479 bytes (479 B) copied, 5.6832e-05 s, 8.4 MB/s\n",
5.6832e-05),
(b"1\n2\n512 bytes (512e-3 MB) copied, 1 s, 512e-3 MB/s\n",
1.0),
(b"1\n2\n524288 bytes (512e3 B) copied, 1 s, 512e3 B/s\n",
1.0),
(b"1\n2\n517 bytes (517 B) copied, 0 s, Infinity B/s\n",
0.0),
(b"1\n2\n4096 bytes (4.1 kB, 4.0 KiB) copied, "
b"0.00887814 s, 461 kB/s\n",
0.00887814),
(b"1\n2\n30 bytes copied, 0.00156704 s, 19.1 kB/s",
0.00156704),
])
def test_success(self, err, seconds):
result = check.CheckResult("/path", 0, err, 0, 0)
self.assertEqual(result.delay(), seconds)
def test_non_zero_exit_code(self):
path = "/path"
reason = "REASON"
result = check.CheckResult(path, 1, reason, 0, 0)
with self.assertRaises(exception.MiscFileReadException) as ctx:
result.delay()
self.assertIn(path, str(ctx.exception))
self.assertIn(reason, str(ctx.exception))
@permutations([
(b"",),
(b"1\n2\n\n",),
(b"1\n2\n1024 bytes (1 kB) copied, BAD, 1 kB/s\n",),
(b"1\n2\n1024 bytes (1 kB) copied, BAD s, 1 kB/s\n",),
(b"1\n2\n1024 bytes (1 kB) copied, -1- s, 1 kB/s\n",),
(b"1\n2\n1024 bytes (1 kB) copied, e3- s, 1 kB/s\n",),
])
def test_unexpected_output(self, err):
result = check.CheckResult("/path", 0, err, 0, 0)
self.assertRaises(exception.MiscFileReadException, result.delay)
class TestCheckService(VdsmTestCase):
def setUp(self):
self.service = check.CheckService()
self.service.start()
self.result = None
self.completed = threading.Event()
def tearDown(self):
self.service.stop()
def complete(self, result):
self.result = result
self.completed.set()
def test_start_checking(self):
with fake_dd(0.0):
self.service.start_checking("/path", self.complete)
self.assertTrue(self.service.is_checking("/path"))
self.assertTrue(self.completed.wait(1.0))
self.assertEqual(self.result.rc, 0)
def test_start_checking_already_watched(self):
with fake_dd(0.0):
self.service.start_checking("/path", self.complete)
with self.assertRaises(RuntimeError):
self.service.start_checking("/path", self.complete)
def test_stop_checking(self):
with fake_dd(0.0):
self.service.start_checking("/path", self.complete)
self.service.stop_checking("/path")
self.assertFalse(self.service.is_checking("/path"))
def test_stop_checking_not_watched(self):
with self.assertRaises(KeyError):
self.service.stop_checking("/path")
def test_stop_checking_and_wait(self):
with fake_dd(0.0):
self.service.start_checking("/path", self.complete)
self.assertTrue(self.service.stop_checking("/path", timeout=1.0))
self.assertFalse(self.service.is_checking("/path"))
@slowtest
def test_stop_checking_timeout(self):
with fake_dd(0.2):
self.service.start_checking("/path", self.complete)
self.assertFalse(self.service.stop_checking("/path", timeout=0.1))
self.assertFalse(self.service.is_checking("/path"))
@contextmanager
def fake_dd(delay):
script = "#!/bin/sh\nsleep %.1f\n" % delay
script = script.encode('ascii')
with temporaryPath(data=script) as fake_dd:
os.chmod(fake_dd, 0o700)
with MonkeyPatchScope([(constants, "EXT_DD", fake_dd)]):
yield
| gpl-2.0 | 4,354,706,890,416,066,000 | 34.501171 | 79 | 0.606241 | false |
kushankr/approval_frame | approval_frame/urls.py | 1 | 1127 | from django.conf.urls import include, patterns, url
from django.contrib import admin
from approval_frame import views
from views import CustomRegistrationView
# autodiscover is required only for older versions of Django
admin.autodiscover()
urlpatterns = patterns(
'',
url(r'^approval_polls/', include('approval_polls.urls', namespace="approval_polls")),
url(r'^admin/', include(admin.site.urls)),
url(r'^accounts/register/$', CustomRegistrationView.as_view(),
name='registration_register'),
url(r'^accounts/', include('registration.backends.default.urls')),
url(r'^accounts/username/change/$', views.changeUsername, name="username_change"),
url(r'^accounts/username/change/done/$', views.changeUsernameDone, name="username_change_done"),
url(r'^accounts/password/change/$', 'django.contrib.auth.views.password_change',
{'post_change_redirect': '/accounts/password_change/done/'}, name="password_change"),
url(r'^accounts/password/change/done/$', 'django.contrib.auth.views.password_change_done'),
url('', include('social.apps.django_app.urls', namespace='social'))
)
| gpl-3.0 | -25,690,787,189,746,730 | 48 | 100 | 0.723159 | false |
NeverDecaf/syncplay | syncplay/messages_de.py | 1 | 38136 | # coding:utf8
"""Deutsch dictionary"""
de = {
"LANGUAGE": "Deutsch", # (German)
# Client notifications
"config-cleared-notification": "Einstellungen gelöscht. Änderungen werden gespeichert, wenn du eine gültige Konfiguration speicherst.",
"relative-config-notification": "Relative Konfigurationsdatei(en) geladen: {}",
"connection-attempt-notification": "Verbinde mit {}:{}", # Port, IP
"reconnection-attempt-notification": "Verbindung zum Server verloren, versuche erneut",
"disconnection-notification": "Verbindung zum Server beendet",
"connection-failed-notification": "Verbindung zum Server fehlgeschlagen",
"connected-successful-notification": "Erfolgreich mit Server verbunden",
"retrying-notification": "%s, versuche erneut in %d Sekunden...", # Seconds
"reachout-successful-notification": "Successfully reached {} ({})", # TODO: Translate
"rewind-notification": "Zurückgespult wegen Zeitdifferenz mit {}", # User
"fastforward-notification": "Vorgespult wegen Zeitdifferenz mit {}", # User
"slowdown-notification": "Verlangsamt wegen Zeitdifferenz mit {}", # User
"revert-notification": "Normalgeschwindigkeit",
"pause-notification": "{} pausierte", # User
"unpause-notification": "{} startete", # User
"seek-notification": "{} sprang von {} nach {}", # User, from time, to time
"current-offset-notification": "Aktueller Offset: {} Sekunden", # Offset
"media-directory-list-updated-notification": "Syncplay media directories have been updated.", # TODO: Translate
"room-join-notification": "{} hat den Raum '{}' betreten", # User
"left-notification": "{} ist gegangen", # User
"left-paused-notification": "{} ist gegangen, {} pausierte", # User who left, User who paused
"playing-notification": "{} spielt '{}' ({})", # User, file, duration
"playing-notification/room-addendum": " in Raum: '{}'", # Room
"not-all-ready": "Noch nicht bereit: {}", # Usernames
"all-users-ready": "Alle sind bereit ({} Nutzer)", # Number of ready users
"ready-to-unpause-notification": "Du bist bereit - noch einmal fortsetzen klicken zum abspielen",
"set-as-ready-notification": "Du bist bereit",
"set-as-not-ready-notification": "Du bist nicht bereit",
"autoplaying-notification": "Starte in {}...", # Number of seconds until playback will start
"identifying-as-controller-notification": "Identifiziere als Raumleiter mit Passwort '{}'...", # TODO: find a better translation to "room operator"
"failed-to-identify-as-controller-notification": "{} konnte sich nicht als Raumleiter identifizieren.",
"authenticated-as-controller-notification": "{} authentifizierte sich als Raumleiter",
"created-controlled-room-notification": "Gesteuerten Raum '{}' mit Passwort '{}' erstellt. Bitte diese Informationen für die Zukunft aufheben!", # RoomName, operatorPassword
"file-different-notification": "Deine Datei scheint sich von {}s zu unterscheiden", # User
"file-differences-notification": "Deine Datei unterscheidet sich auf folgende Art: {}",
"room-file-differences": "Unterschiedlich in: {}", # File differences (filename, size, and/or duration)
"file-difference-filename": "Name",
"file-difference-filesize": "Größe",
"file-difference-duration": "Dauer",
"alone-in-the-room": "Du bist alleine im Raum",
"different-filesize-notification": " (ihre Dateigröße ist anders als deine!)",
"userlist-playing-notification": "{} spielt:", # Username
"file-played-by-notification": "Datei: {} wird gespielt von:", # File
"no-file-played-notification": "{} spielt keine Datei ab", # Username
"notplaying-notification": "Personen im Raum, die keine Dateien spielen:",
"userlist-room-notification": "In Raum '{}':", # Room
"userlist-file-notification": "Datei",
"controller-userlist-userflag": "Raumleiter",
"ready-userlist-userflag": "Bereit",
"update-check-failed-notification": "Konnte nicht automatisch prüfen, ob Syncplay {} aktuell ist. Soll https://syncplay.pl/ geöffnet werden, um manuell nach Updates zu suchen?", # Syncplay version
"syncplay-uptodate-notification": "Syncplay ist aktuell",
"syncplay-updateavailable-notification": "Eine neuere Version von Syncplay ist verfügbar. Soll die Download-Seite geöffnet werden?",
"mplayer-file-required-notification": "Syncplay für mplayer benötigt eine Dateiangabe beim Start",
"mplayer-file-required-notification/example": "Anwendungsbeispiel: syncplay [optionen] [url|pfad/]Dateiname",
"mplayer2-required": "Syncplay ist inkompatibel zu MPlayer 1.x, bitte nutze MPlayer2 oder mpv",
"unrecognized-command-notification": "Unbekannter Befehl",
"commandlist-notification": "Verfügbare Befehle:",
"commandlist-notification/room": "\tr [Name] - Raum ändern",
"commandlist-notification/list": "\tl - Nutzerliste anzeigen",
"commandlist-notification/undo": "\tu - Letzter Zeitsprung rückgängig",
"commandlist-notification/pause": "\tp - Pausieren / weiter",
"commandlist-notification/seek": "\t[s][+-]Zeit - zu einer bestimmten Zeit spulen, ohne + oder - wird als absolute Zeit gewertet; Angabe in Sekunden oder Minuten:Sekunden",
"commandlist-notification/help": "\th - Diese Hilfe",
"commandlist-notification/toggle": "\tt - Bereitschaftsanzeige umschalten",
"commandlist-notification/create": "\tc [name] - erstelle zentral gesteuerten Raum mit dem aktuellen Raumnamen",
"commandlist-notification/auth": "\ta [password] - authentifiziere als Raumleiter mit Passwort",
"commandlist-notification/chat": "\tch [message] - send a chat message in a room", # TODO: Translate
"syncplay-version-notification": "Syncplay Version: {}", # syncplay.version
"more-info-notification": "Weitere Informationen auf: {}", # projectURL
"gui-data-cleared-notification": "Syncplay hat die Pfad und Fensterdaten der Syncplay-GUI zurückgesetzt.",
"language-changed-msgbox-label": "Die Sprache wird geändert, wenn du Syncplay neu startest.",
"promptforupdate-label": "Soll Syncplay regelmäßig nach Updates suchen?",
"media-player-latency-warning": "Warnung: Der Mediaplayer brauchte {} Sekunden zum Antworten. Wenn Probleme bei der Synchronisation auftreten, schließe bitte andere Anwendungen, um Ressourcen freizugeben. Sollte das nicht funktionieren, versuche es mit einem anderen Media-Player.", # Seconds to respond
"mpv-unresponsive-error": "MPV hat für {} Sekunden nicht geantwortet und scheint abgestürzt zu sein. Bitte starte Syncplay neu.", # Seconds to respond
# Client prompts
"enter-to-exit-prompt": "Enter drücken zum Beenden\n",
# Client errors
"missing-arguments-error": "Notwendige Argumente fehlen, siehe --help",
"server-timeout-error": "Timeout: Verbindung zum Server fehlgeschlagen",
"mpc-slave-error": "Kann MPC nicht im Slave-Modus starten!",
"mpc-version-insufficient-error": "MPC-Version nicht ausreichend, bitte nutze `mpc-hc` >= `{}`",
"mpc-be-version-insufficient-error": "MPC-Version nicht ausreichend, bitte nutze `mpc-be` >= `{}`",
"mpv-version-error": "Syncplay ist nicht kompatibel mit dieser Version von mpv. Bitte benutze eine andere Version (z.B. Git HEAD).",
"player-file-open-error": "Fehler beim Öffnen der Datei durch den Player",
"player-path-error": "Ungültiger Player-Pfad. Supported players are: mpv, mpv.net, VLC, MPC-HC, MPC-BE and mplayer2", # To do: Translate end
"hostname-empty-error": "Hostname darf nicht leer sein",
"empty-error": "{} darf nicht leer sein", # Configuration
"media-player-error": "Player-Fehler: \"{}\"", # Error line
"unable-import-gui-error": "Konnte die GUI-Bibliotheken nicht importieren. PySide muss installiert sein, damit die grafische Oberfläche funktioniert.",
"unable-import-twisted-error": "Could not import Twisted. Please install Twisted v16.4.0 or later.", #To do: translate
"arguments-missing-error": "Notwendige Argumente fehlen, siehe --help",
"unable-to-start-client-error": "Client kann nicht gestartet werden",
"player-path-config-error": "Player-Pfad ist nicht ordnungsgemäß gesetzt. Supported players are: mpv, mpv.net, VLC, MPC-HC, MPC-BE and mplayer2.", # To do: Translate end
"no-file-path-config-error": "Es muss eine Datei ausgewählt werden, bevor der Player gestartet wird.",
"no-hostname-config-error": "Hostname darf nicht leer sein",
"invalid-port-config-error": "Port muss gültig sein",
"empty-value-config-error": "{} darf nicht leer sein", # Config option
"not-json-error": "Kein JSON-String\n",
"hello-arguments-error": "Zu wenige Hello-Argumente\n",
"version-mismatch-error": "Verschiedene Versionen auf Client und Server\n",
"vlc-failed-connection": "Kann nicht zu VLC verbinden. Wenn du syncplay.lua nicht installiert hast, findest du auf https://syncplay.pl/LUA/ [Englisch] eine Anleitung.",
"vlc-failed-noscript": "Laut VLC ist das syncplay.lua Interface-Skript nicht installiert. Auf https://syncplay.pl/LUA/ [Englisch] findest du eine Anleitung.",
"vlc-failed-versioncheck": "Diese VLC-Version wird von Syncplay nicht unterstützt. Bitte nutze VLC 2.0",
"feature-sharedPlaylists": "shared playlists", # used for not-supported-by-server-error # TODO: Translate
"feature-chat": "chat", # used for not-supported-by-server-error # TODO: Translate
"feature-readiness": "readiness", # used for not-supported-by-server-error # TODO: Translate
"feature-managedRooms": "managed rooms", # used for not-supported-by-server-error # TODO: Translate
"not-supported-by-server-error": "Dieses Feature wird vom Server nicht unterstützt. Es wird ein Server mit Syncplay Version {}+ benötigt, aktuell verwendet wird jedoch Version {}.", # minVersion, serverVersion
"shared-playlists-not-supported-by-server-error": "The shared playlists feature may not be supported by the server. To ensure that it works correctly requires a server running Syncplay {}+, but the server is running Syncplay {}.", # minVersion, serverVersion # TODO: Translate
"shared-playlists-disabled-by-server-error": "The shared playlist feature has been disabled in the server configuration. To use this feature you will need to connect to a different server.", # TODO: Translate
"invalid-seek-value": "Ungültige Zeitangabe",
"invalid-offset-value": "Ungültiger Offset-Wert",
"switch-file-not-found-error": "Konnte nicht zur Datei '{0}' wechseln. Syncplay looks in the specified media directories.", # File not found, folder it was not found in # TODO: Re-translate "Syncplay sucht im Ordner der aktuellen Datei und angegebenen Medien-Verzeichnissen." to reference to checking in "current media directory"
"folder-search-timeout-error": "The search for media in media directories was aborted as it took too long to search through '{}'. This will occur if you select a folder with too many sub-folders in your list of media folders to search through. For automatic file switching to work again please select File->Set Media Directories in the menu bar and remove this directory or replace it with an appropriate sub-folder. If the folder is actually fine then you can re-enable it by selecting File->Set Media Directories and pressing 'OK'.", # Folder # TODO: Translate
"folder-search-first-file-timeout-error": "The search for media in '{}' was aborted as it took too long to access the directory. This could happen if it is a network drive or if you configure your drive to spin down after a period of inactivity. For automatic file switching to work again please go to File->Set Media Directories and either remove the directory or resolve the issue (e.g. by changing power saving settings).", # Folder # TODO: Translate
"added-file-not-in-media-directory-error": "You loaded a file in '{}' which is not a known media directory. You can add this as a media directory by selecting File->Set Media Directories in the menu bar.", # Folder # TODO: Translate
"no-media-directories-error": "No media directories have been set. For shared playlist and file switching features to work properly please select File->Set Media Directories and specify where Syncplay should look to find media files.", # TODO: Translate
"cannot-find-directory-error": "Could not find media directory '{}'. To update your list of media directories please select File->Set Media Directories from the menu bar and specify where Syncplay should look to find media files.", # TODO: Translate
"failed-to-load-server-list-error": "Konnte die Liste der öffentlichen Server nicht laden. Bitte besuche https://www.syncplay.pl/ [Englisch] mit deinem Browser.",
# Client arguments
"argument-description": 'Syncplay ist eine Anwendung um mehrere MPlayer, MPC-HC, MPC-BE und VLC-Instanzen über das Internet zu synchronisieren.',
"argument-epilog": 'Wenn keine Optionen angegeben sind, werden die _config-Werte verwendet',
"nogui-argument": 'Keine GUI anzeigen',
"host-argument": 'Server-Adresse',
"name-argument": 'Gewünschter Nutzername',
"debug-argument": 'Debug-Modus',
"force-gui-prompt-argument": 'Einstellungsfenster anzeigen',
"no-store-argument": 'keine Werte in .syncplay speichern',
"room-argument": 'Standard-Raum',
"password-argument": 'Server-Passwort',
"player-path-argument": 'Pfad zum Player',
"file-argument": 'Abzuspielende Datei',
"args-argument": 'Player-Einstellungen; Wenn du Einstellungen, die mit - beginnen, nutzen willst, stelle ein einzelnes \'--\'-Argument davor',
"clear-gui-data-argument": 'Setzt die Pfad- und GUI-Fenster-Daten die in den QSettings gespeichert sind zurück',
"language-argument": 'Sprache für Syncplay-Nachrichten (de/en/ru/it/es)',
"version-argument": 'gibt die aktuelle Version aus',
"version-message": "Du verwendest Syncplay v. {} ({})",
"load-playlist-from-file-argument": "loads playlist from text file (one entry per line)", # TODO: Translate
# Client labels
"config-window-title": "Syncplay Konfiguration",
"connection-group-title": "Verbindungseinstellungen",
"host-label": "Server-Adresse:",
"name-label": "Benutzername (optional):",
"password-label": "Server-Passwort (falls nötig):",
"room-label": "Standard-Raum:",
"media-setting-title": "Media-Player Einstellungen",
"executable-path-label": "Pfad zum Media-Player:",
"media-path-label": "Pfad zur Datei:", # Todo: Translate to 'Path to video (optional)'
"player-arguments-label": "Playerparameter:",
"browse-label": "Durchsuchen",
"update-server-list-label": "Liste aktualisieren",
"more-title": "Mehr Einstellungen zeigen",
"never-rewind-value": "Niemals",
"seconds-suffix": " sek",
"privacy-sendraw-option": "Klartext senden",
"privacy-sendhashed-option": "Hash senden",
"privacy-dontsend-option": "Nicht senden",
"filename-privacy-label": "Dateiname:",
"filesize-privacy-label": "Dateigröße:",
"checkforupdatesautomatically-label": "Automatisch nach Updates suchen",
"slowondesync-label": "Verlangsamen wenn nicht synchron (nicht unterstützt mit MPC-HC/BE)",
"dontslowdownwithme-label": "Nie verlangsamen oder andere zurückspulen (Experimentell)",
"pausing-title": "Pausing", # TODO: Translate
"pauseonleave-label": "Pausieren wenn ein Benutzer austritt",
"readiness-title": "Initial readiness state", # TODO: Translate
"readyatstart-label": "Standardmäßig auf \'Bereit\' stellen",
"forceguiprompt-label": "Diesen Dialog nicht mehr anzeigen",
"showosd-label": "OSD-Nachrichten anzeigen",
"showosdwarnings-label": "Zeige Warnungen (z.B. wenn Dateien verschieden)",
"showsameroomosd-label": "Zeige Ereignisse in deinem Raum",
"shownoncontrollerosd-label": "Zeige Ereignisse von nicht geführten Räumen in geführten Räumen.",
"showdifferentroomosd-label": "Zeige Ereignisse in anderen Räumen",
"showslowdownosd-label": "Zeige Verlangsamungs/Zurücksetzungs-Benachrichtigung",
"language-label": "Sprache:",
"automatic-language": "Automatisch ({})", # Default language
"showdurationnotification-label": "Zeige Warnung wegen unterschiedlicher Dauer",
"basics-label": "Grundlagen",
"readiness-label": "Play/Pause",
"misc-label": "Diverse",
"core-behaviour-title": "Verhalten des Raumes",
"syncplay-internals-title": "Syncplay intern",
"syncplay-mediasearchdirectories-title": "In diesen Verzeichnissen nach Medien suchen", # needs to be checked
"syncplay-mediasearchdirectories-label": "In diesen Verzeichnissen nach Medien suchen (ein Pfad pro Zeile)",
"sync-label": "Synchronisation",
"sync-otherslagging-title": "Wenn andere laggen...",
"sync-youlaggging-title": "Wenn du laggst...",
"messages-label": "Nachrichten",
"messages-osd-title": "OSD-(OnScreenDisplay)-Einstellungen",
"messages-other-title": "Weitere Display-Einstellungen",
"chat-label": "Chat", # TODO: Translate
"privacy-label": "Privatsphäre",
"privacy-title": "Privatsphäreneinstellungen",
"unpause-title": "Wenn du Play drückst, auf Bereit setzen und:",
"unpause-ifalreadyready-option": "Wiedergeben wenn bereits als Bereit gesetzt",
"unpause-ifothersready-option": "Wiedergeben wenn bereits als Bereit gesetzt oder alle anderen bereit sind (Standard)",
"unpause-ifminusersready-option": "Wiedergeben wenn bereits als Bereit gesetzt oder die minimale Anzahl anderer Nutzer bereit ist",
"unpause-always": "Immer wiedergeben",
"syncplay-trusteddomains-title": "Trusted domains (for streaming services and hosted content)", # TODO: Translate into German
"chat-title": "Chat message input", # TODO: Translate
"chatinputenabled-label": "Enable chat input via mpv (using enter key)", # TODO: Translate
"chatdirectinput-label": "Allow instant chat input (bypass having to press enter key to chat)", # TODO: Translate
"chatinputfont-label": "Chat input font", # TODO: Translate
"chatfont-label": "Set font", # TODO: Translate
"chatcolour-label": "Set colour", # TODO: Translate
"chatinputposition-label": "Position of message input area in mpv", # TODO: Translate
"chat-top-option": "Top", # TODO: Translate
"chat-middle-option": "Middle", # TODO: Translate
"chat-bottom-option": "Bottom", # TODO: Translate
"chatoutputheader-label": "Chat message output", # TODO: Translate
"chatoutputfont-label": "Chat output font", # TODO: Translate
"chatoutputenabled-label": "Enable chat output in media player (mpv only for now)", # TODO: Translate
"chatoutputposition-label": "Output mode", # TODO: Translate
"chat-chatroom-option": "Chatroom style", # TODO: Translate
"chat-scrolling-option": "Scrolling style", # TODO: Translate
"mpv-key-tab-hint": "[TAB] to toggle access to alphabet row key shortcuts.", # TODO: Translate
"mpv-key-hint": "[ENTER] to send message. [ESC] to escape chat mode.", # TODO: Translate
"alphakey-mode-warning-first-line": "You can temporarily use old mpv bindings with a-z keys.", # TODO: Translate
"alphakey-mode-warning-second-line": "Press [TAB] to return to Syncplay chat mode.", # TODO: Translate
"help-label": "Hilfe",
"reset-label": "Standardwerte zurücksetzen",
"run-label": "Syncplay starten",
"storeandrun-label": "Konfiguration speichern und Syncplay starten",
"contact-label": "Du hast eine Idee, einen Bug gefunden oder möchtest Feedback geben? Sende eine E-Mail an <a href=\"mailto:[email protected]\">[email protected]</a>, chatte auf dem <a href=\"https://webchat.freenode.net/?channels=#syncplay\">#Syncplay IRC-Kanal</a> auf irc.freenode.net oder <a href=\"https://github.com/Uriziel/syncplay/issues\">öffne eine Fehlermeldung auf GitHub</a>. Außerdem findest du auf <a href=\"https://syncplay.pl/\">https://syncplay.pl/</a> weitere Informationen, Hilfestellungen und Updates. OTE: Chat messages are not encrypted so do not use Syncplay to send sensitive information.", # TODO: Translate last sentence
"joinroom-label": "Raum beitreten",
"joinroom-menu-label": "Raum beitreten {}", # TODO: Might want to fix this
"seektime-menu-label": "Spule zu Zeit",
"undoseek-menu-label": "Rückgängig",
"play-menu-label": "Wiedergabe",
"pause-menu-label": "Pause",
"playbackbuttons-menu-label": "Wiedergabesteuerung anzeigen",
"autoplay-menu-label": "Auto-Play-Knopf anzeigen",
"autoplay-guipushbuttonlabel": "Automatisch abspielen wenn alle bereit sind",
"autoplay-minimum-label": "Minimum an Nutzern:",
"sendmessage-label": "Send", # TODO: Translate
"ready-guipushbuttonlabel": "Ich bin bereit den Film anzuschauen!",
"roomuser-heading-label": "Raum / Benutzer",
"size-heading-label": "Größe",
"duration-heading-label": "Länge",
"filename-heading-label": "Dateiname",
"notifications-heading-label": "Benachrichtigungen",
"userlist-heading-label": "Liste der gespielten Dateien",
"browseformedia-label": "Nach Mediendateien durchsuchen",
"file-menu-label": "&Datei", # & precedes shortcut key
"openmedia-menu-label": "&Mediendatei öffnen...",
"openstreamurl-menu-label": "&Stream URL öffnen",
"setmediadirectories-menu-label": "Set media &directories", # TODO: Translate
"loadplaylistfromfile-menu-label": "&Load playlist from file", # TODO: Translate
"saveplaylisttofile-menu-label": "&Save playlist to file", # TODO: Translate
"exit-menu-label": "&Beenden",
"advanced-menu-label": "&Erweitert",
"window-menu-label": "&Fenster",
"setoffset-menu-label": "&Offset einstellen",
"createcontrolledroom-menu-label": "&Zentral gesteuerten Raum erstellen",
"identifyascontroller-menu-label": "Als Raumleiter &identifizieren",
"settrusteddomains-menu-label": "Set &trusted domains", # TODO: Translate
"addtrusteddomain-menu-label": "Add {} as trusted domain", # Domain # TODO: Translate
"edit-menu-label": "&Bearbeiten",
"cut-menu-label": "Aus&schneiden",
"copy-menu-label": "&Kopieren",
"paste-menu-label": "&Einsetzen",
"selectall-menu-label": "&Alles auswälhen",
"playback-menu-label": "&Wiedergabe",
"help-menu-label": "&Hilfe",
"userguide-menu-label": "&Benutzerhandbuch öffnen",
"update-menu-label": "auf &Aktualisierung prüfen",
# startTLS messages - TODO: Translate
"startTLS-initiated": "Attempting secure connection",
"startTLS-secure-connection-ok": "Secure connection established ({})",
"startTLS-server-certificate-invalid": 'Secure connection failed. The server uses an invalid security certificate. This communication could be intercepted by a third party. For further details and troubleshooting see <a href="https://syncplay.pl/trouble">here</a>.',
"startTLS-not-supported-client": "This client does not support TLS",
"startTLS-not-supported-server": "This server does not support TLS",
# TLS certificate dialog - TODO: Translate
"tls-information-title": "Certificate Details",
"tls-dialog-status-label": "<strong>Syncplay is using an encrypted connection to {}.</strong>",
"tls-dialog-desc-label": "Encryption with a digital certificate keeps information private as it is sent to or from the<br/>server {}.",
"tls-dialog-connection-label": "Information encrypted using Transport Layer Security (TLS), version {} with the cipher<br/>suite: {}.",
"tls-dialog-certificate-label": "Certificate issued by {} valid until {}.",
# About dialog - TODO: Translate
"about-menu-label": "&About Syncplay",
"about-dialog-title": "About Syncplay",
"about-dialog-release": "Version {} release {}",
"about-dialog-license-text": "Licensed under the Apache License, Version 2.0",
"about-dialog-license-button": "License",
"about-dialog-dependencies": "Dependencies",
"setoffset-msgbox-label": "Offset einstellen",
"offsetinfo-msgbox-label": "Offset (siehe https://syncplay.pl/guide/ für eine Anleitung [Englisch]):",
"promptforstreamurl-msgbox-label": "Stream URL öffnen",
"promptforstreamurlinfo-msgbox-label": "Stream URL",
"addfolder-label": "Add folder", # TODO: Translate
"adduris-msgbox-label": "Add URLs to playlist (one per line)", # TODO: Translate
"editplaylist-msgbox-label": "Set playlist (one per line)", # TODO: Translate
"trusteddomains-msgbox-label": "Domains it is okay to automatically switch to (one per line)", # TODO: Translate
"createcontrolledroom-msgbox-label": "Zentral gesteuerten Raum erstellen",
"controlledroominfo-msgbox-label": "Namen des zentral gesteuerten Raums eingeben\r\n(siehe https://syncplay.pl/guide/ für eine Anleitung [Englisch]):",
"identifyascontroller-msgbox-label": "Als Raumleiter identifizieren",
"identifyinfo-msgbox-label": "Passwort des zentral gesteuerten Raums eingeben\r\n(siehe https://syncplay.pl/guide/ für eine Anleitung [Englisch]):",
"public-server-msgbox-label": "Einen öffentlichen Server für diese Sitzung auswählen",
"megabyte-suffix": " MB",
# Tooltips
"host-tooltip": "Hostname oder IP zu der verbunden werden soll. Optional mit Port (z.B.. syncplay.pl:8999). Synchronisation findet nur mit Personen auf dem selben Server und Port statt.",
"name-tooltip": "Dein Benutzername. Keine Registrierung, kann einfach geändert werden. Bei fehlender Angabe wird ein zufälliger Name generiert.",
"password-tooltip": "Passwörter sind nur bei Verbindung zu privaten Servern nötig.",
"room-tooltip": "Der Raum, der betreten werden soll, kann ein x-beliebiger sein. Allerdings werden nur Clients im selben Raum synchronisiert.",
"executable-path-tooltip": "Pfad zum ausgewählten, unterstützten Mediaplayer (MPC-HC, MPC-BE, VLC, mplayer2 or mpv).",
"media-path-tooltip": "Pfad zum wiederzugebenden Video oder Stream. Notwendig für mplayer2.", # TODO: Confirm translation
"player-arguments-tooltip": "Zusätzliche Kommandozeilenparameter / -schalter für diesen Mediaplayer.",
"mediasearcdirectories-arguments-tooltip": "Verzeichnisse, in denen Syncplay nach Mediendateien suchen soll, z.B. wenn du das Click-to-switch-Feature verwendest. Syncplay wird rekursiv Unterordner durchsuchen.", # TODO: Translate Click-to-switch? (or use as name for feature)
"more-tooltip": "Weitere Einstellungen anzeigen.",
"filename-privacy-tooltip": "Privatheitsmodus beim Senden des Namens der aktuellen Datei zum Server.",
"filesize-privacy-tooltip": "Privatheitsmodus beim Senden der Größe der aktuellen Datei zum Server.",
"privacy-sendraw-tooltip": "Die Information im Klartext übertragen. Dies ist die Standard-Einstellung mit der besten Funktionalität.",
"privacy-sendhashed-tooltip": "Die Informationen gehasht übertragen, um sie für andere Clients schwerer lesbar zu machen.",
"privacy-dontsend-tooltip": "Diese Information nicht übertragen. Dies garantiert den größtmöglichen Datanschutz.",
"checkforupdatesautomatically-tooltip": "Regelmäßig auf der Syncplay-Website nach Updates suchen.",
"slowondesync-tooltip": "Reduziert die Abspielgeschwindigkeit zeitweise, um die Synchronität zu den anderen Clients wiederherzustellen.",
"rewindondesync-label": "Zurückspulen bei großer Zeitdifferenz (empfohlen)",
"fastforwardondesync-label": "Vorspulen wenn das Video laggt (empfohlen)",
"dontslowdownwithme-tooltip": "Lässt andere nicht langsamer werden oder zurückspringen, wenn deine Wiedergabe hängt.",
"pauseonleave-tooltip": "Wiedergabe anhalten, wenn deine Verbindung verloren geht oder jemand den Raum verlässt.",
"readyatstart-tooltip": "Zu Beginn auf 'Bereit' setzen (sonst bist du als 'Nicht Bereit' gesetzt, bis du den Status änderst)",
"forceguiprompt-tooltip": "Der Konfigurationsdialog wird nicht angezeigt, wenn eine Datei mit Syncplay geöffnet wird.",
"nostore-tooltip": "Syncplay mit den angegebenen Einstellungen starten, diese aber nicht dauerhaft speichern.",
"rewindondesync-tooltip": "Zum Wiederherstellen der Synchronität in der Zeit zurückspringen (empfohlen)",
"fastforwardondesync-tooltip": "Nach vorne springen, wenn asynchron zum Raumleiter (oder deine vorgetäuschte Position, falls 'Niemals verlangsamen oder andere zurückspulen' aktiviert ist).",
"showosd-tooltip": "Syncplay-Nachrichten auf dem OSD (= OnScreenDisplay, ein eingeblendetes Textfeld) des Players anzeigen.",
"showosdwarnings-tooltip": "Warnungen bei Unterschiedlichen Dateien oder Alleinsein im Raum anzeigen.",
"showsameroomosd-tooltip": "OSD-Meldungen über Ereignisse im selben Raum anzeigen.",
"shownoncontrollerosd-tooltip": "OSD-Meldungen bei Ereignissen verursacht durch nicht-Raumleiter in zentral gesteuerten Räumen anzeigen.",
"showdifferentroomosd-tooltip": "OSD-Meldungen zu anderen Räumen als dem aktuell betretenen anzeigen.",
"showslowdownosd-tooltip": "Meldungen bei Geschwindigkeitsänderung anzeigen.",
"showdurationnotification-tooltip": "Nützlich, wenn z.B. ein Teil eines mehrteiligen Videos fehlt, kann jedoch auch fehlerhaft anschlagen.",
"language-tooltip": "Die verwendete Sprache von Syncplay",
"unpause-always-tooltip": "Wiedergabe startet immer (anstatt nur den Bereitschaftsstatus zu ändern)",
"unpause-ifalreadyready-tooltip": "Wenn du nicht bereit bist und Play drückst wirst du als bereit gesetzt - zum Starten der Wiedergabe nochmal drücken.",
"unpause-ifothersready-tooltip": "Wenn du Play drückst und nicht bereit bist, wird nur gestartet, wenn alle anderen bereit sind.",
"unpause-ifminusersready-tooltip": "Wenn du Play drückst und nicht bereit bist, wird nur gestartet, wenn die minimale Anzahl anderer Benutzer bereit ist.",
"trusteddomains-arguments-tooltip": "Domains that it is okay for Syncplay to automatically switch to when shared playlists is enabled.", # TODO: Translate into German
"chatinputenabled-tooltip": "Enable chat input in mpv (press enter to chat, enter to send, escape to cancel)", # TODO: Translate
"chatdirectinput-tooltip": "Skip having to press 'enter' to go into chat input mode in mpv. Press TAB in mpv to temporarily disable this feature.", # TODO: Translate
"font-label-tooltip": "Font used for when entering chat messages in mpv. Client-side only, so doesn't affect what other see.", # TODO: Translate
"set-input-font-tooltip": "Font family used for when entering chat messages in mpv. Client-side only, so doesn't affect what other see.", # TODO: Translate
"set-input-colour-tooltip": "Font colour used for when entering chat messages in mpv. Client-side only, so doesn't affect what other see.", # TODO: Translate
"chatinputposition-tooltip": "Location in mpv where chat input text will appear when you press enter and type.", # TODO: Translate
"chatinputposition-top-tooltip": "Place chat input at top of mpv window.", # TODO: Translate
"chatinputposition-middle-tooltip": "Place chat input in dead centre of mpv window.", # TODO: Translate
"chatinputposition-bottom-tooltip": "Place chat input at bottom of mpv window.", # TODO: Translate
"chatoutputenabled-tooltip": "Show chat messages in OSD (if supported by media player).", # TODO: Translate
"font-output-label-tooltip": "Chat output font.", # TODO: Translate
"set-output-font-tooltip": "Font used for when displaying chat messages.", # TODO: Translate
"chatoutputmode-tooltip": "How chat messages are displayed.", # TODO: Translate
"chatoutputmode-chatroom-tooltip": "Display new lines of chat directly below previous line.", # TODO: Translate
"chatoutputmode-scrolling-tooltip": "Scroll chat text from right to left.", # TODO: Translate
"help-tooltip": "Öffnet Hilfe auf syncplay.pl [Englisch]",
"reset-tooltip": "Alle Einstellungen auf Standardwerte zurücksetzen.",
"update-server-list-tooltip": "Mit syncplay.pl verbinden um die Liste öffentlicher Server zu aktualisieren.",
"sslconnection-tooltip": "Securely connected to server. Click for certificate details.", # TODO: Translate
"joinroom-tooltip": "Den aktuellen Raum verlassen und stattdessen den angegebenen betreten.",
"seektime-msgbox-label": "Springe zur angegebenen Zeit (in Sekunden oder min:sek). Verwende +/- zum relativen Springen.",
"ready-tooltip": "Zeigt an, ob du bereit zum anschauen bist",
"autoplay-tooltip": "Automatisch abspielen, wenn alle Nutzer bereit sind oder die minimale Nutzerzahl erreicht ist.",
"switch-to-file-tooltip": "Doppelklicken um zu {} zu wechseln", # Filename
"sendmessage-tooltip": "Send message to room", # TODO: Translate
# In-userlist notes (GUI)
"differentsize-note": "Verschiedene Größe!",
"differentsizeandduration-note": "Verschiedene Größe und Dauer!",
"differentduration-note": "Verschiedene Dauer!",
"nofile-note": "(keine Datei wird abgespielt)",
# Server messages to client
"new-syncplay-available-motd-message": "Du nutzt Syncplay Version {}, aber es gibt eine neuere Version auf https://syncplay.pl", # ClientVersion
# Server notifications
"welcome-server-notification": "Willkommen zum Syncplay-Server, v. {0}", # version
"client-connected-room-server-notification": "{0}({2}) hat den Raum '{1}' betreten", # username, host, room
"client-left-server-notification": "{0} hat den Server verlassen", # name
"no-salt-notification": "WICHTIGER HINWEIS: Damit von dem Server generierte Passwörter für geführte Räume auch nach einem Serverneustart funktionieren, starte den Server mit dem folgenden Parameter: --salt {}", # Salt
# Server arguments
"server-argument-description": 'Anwendung, um mehrere MPlayer, MPC-HC/BE und VLC-Instanzen über das Internet zu synchronisieren. Server',
"server-argument-epilog": 'Wenn keine Optionen angegeben sind, werden die _config-Werte verwendet',
"server-port-argument": 'Server TCP-Port',
"server-password-argument": 'Server Passwort',
"server-isolate-room-argument": 'Sollen die Räume isoliert sein?',
"server-salt-argument": "zufällige Zeichenkette, die zur Erstellung von Passwörtern verwendet wird",
"server-disable-ready-argument": "Bereitschaftsfeature deaktivieren",
"server-motd-argument": "Pfad zur Datei, von der die Nachricht des Tages geladen wird",
"server-chat-argument": "Should chat be disabled?", # TODO: Translate
"server-chat-maxchars-argument": "Maximum number of characters in a chat message (default is {})", # TODO: Translate
"server-maxusernamelength-argument": "Maximum number of characters in a username (default is {})", # TODO: Translate
"server-stats-db-file-argument": "Enable server stats using the SQLite db file provided", # TODO: Translate
"server-startTLS-argument": "Enable TLS connections using the certificate files in the path provided", # TODO: Translate
"server-messed-up-motd-unescaped-placeholders": "Die Nachricht des Tages hat unmaskierte Platzhalter. Alle $-Zeichen sollten verdoppelt werden ($$).",
"server-messed-up-motd-too-long": "Die Nachricht des Tages ist zu lang - Maximal {} Zeichen, aktuell {}.",
# Server errors
"unknown-command-server-error": "Unbekannter Befehl {}", # message
"not-json-server-error": "Kein JSON-String {}", # message
"line-decode-server-error": "Not a utf-8 string", # TODO: Translate
"not-known-server-error": "Der Server muss dich kennen, bevor du diesen Befehl nutzen kannst",
"client-drop-server-error": "Client verloren: {} -- {}", # host, error
"password-required-server-error": "Passwort nötig",
"wrong-password-server-error": "Ungültiges Passwort",
"hello-server-error": "Zu wenige Hello-Argumente",
# Playlists TODO: Translate all this to German
"playlist-selection-changed-notification": "{} changed the playlist selection", # Username
"playlist-contents-changed-notification": "{} updated the playlist", # Username
"cannot-find-file-for-playlist-switch-error": "Could not find file {} in media directories for playlist switch!", # Filename
"cannot-add-duplicate-error": "Could not add second entry for '{}' to the playlist as no duplicates are allowed.", # Filename
"cannot-add-unsafe-path-error": "Could not automatically load {} because it is not on a trusted domain. You can switch to the URL manually by double clicking it in the playlist, and add trusted domains via File->Advanced->Set Trusted Domains. If you right click on a URL then you can add its domain as a trusted domain via the context menu.", # Filename
"sharedplaylistenabled-label": "Enable shared playlists",
"removefromplaylist-menu-label": "Remove from playlist",
"shuffleremainingplaylist-menu-label": "Shuffle remaining playlist",
"shuffleentireplaylist-menu-label": "Shuffle entire playlist",
"undoplaylist-menu-label": "Undo last change to playlist",
"addfilestoplaylist-menu-label": "Add file(s) to bottom of playlist",
"addurlstoplaylist-menu-label": "Add URL(s) to bottom of playlist",
"editplaylist-menu-label": "Edit playlist",
"open-containing-folder": "Open folder containing this file",
"addyourfiletoplaylist-menu-label": "Add your file to playlist",
"addotherusersfiletoplaylist-menu-label": "Add {}'s file to playlist", # [Username]
"addyourstreamstoplaylist-menu-label": "Add your stream to playlist",
"addotherusersstreamstoplaylist-menu-label": "Add {}' stream to playlist", # [Username]
"openusersstream-menu-label": "Open {}'s stream", # [username]'s
"openusersfile-menu-label": "Open {}'s file", # [username]'s
"playlist-instruction-item-message": "Drag file here to add it to the shared playlist.",
"sharedplaylistenabled-tooltip": "Room operators can add files to a synced playlist to make it easy for everyone to watching the same thing. Configure media directories under 'Misc'.",
}
| apache-2.0 | 4,713,580,255,525,210,000 | 74.086957 | 650 | 0.725799 | false |
beiko-lab/gengis | bin/Lib/site-packages/numpy/lib/arraysetops.py | 1 | 12374 | """
Set operations for 1D numeric arrays based on sorting.
:Contains:
ediff1d,
unique,
intersect1d,
setxor1d,
in1d,
union1d,
setdiff1d
:Notes:
For floating point arrays, inaccurate results may appear due to usual round-off
and floating point comparison issues.
Speed could be gained in some operations by an implementation of
sort(), that can provide directly the permutation vectors, avoiding
thus calls to argsort().
To do: Optionally return indices analogously to unique for all functions.
:Author: Robert Cimrman
"""
__all__ = ['ediff1d', 'intersect1d', 'setxor1d', 'union1d', 'setdiff1d',
'unique', 'in1d']
import numpy as np
from numpy.lib.utils import deprecate
def ediff1d(ary, to_end=None, to_begin=None):
"""
The differences between consecutive elements of an array.
Parameters
----------
ary : array_like
If necessary, will be flattened before the differences are taken.
to_end : array_like, optional
Number(s) to append at the end of the returned differences.
to_begin : array_like, optional
Number(s) to prepend at the beginning of the returned differences.
Returns
-------
ediff1d : ndarray
The differences. Loosely, this is ``ary.flat[1:] - ary.flat[:-1]``.
See Also
--------
diff, gradient
Notes
-----
When applied to masked arrays, this function drops the mask information
if the `to_begin` and/or `to_end` parameters are used.
Examples
--------
>>> x = np.array([1, 2, 4, 7, 0])
>>> np.ediff1d(x)
array([ 1, 2, 3, -7])
>>> np.ediff1d(x, to_begin=-99, to_end=np.array([88, 99]))
array([-99, 1, 2, 3, -7, 88, 99])
The returned array is always 1D.
>>> y = [[1, 2, 4], [1, 6, 24]]
>>> np.ediff1d(y)
array([ 1, 2, -3, 5, 18])
"""
ary = np.asanyarray(ary).flat
ed = ary[1:] - ary[:-1]
arrays = [ed]
if to_begin is not None:
arrays.insert(0, to_begin)
if to_end is not None:
arrays.append(to_end)
if len(arrays) != 1:
# We'll save ourselves a copy of a potentially large array in
# the common case where neither to_begin or to_end was given.
ed = np.hstack(arrays)
return ed
def unique(ar, return_index=False, return_inverse=False):
"""
Find the unique elements of an array.
Returns the sorted unique elements of an array. There are two optional
outputs in addition to the unique elements: the indices of the input array
that give the unique values, and the indices of the unique array that
reconstruct the input array.
Parameters
----------
ar : array_like
Input array. This will be flattened if it is not already 1-D.
return_index : bool, optional
If True, also return the indices of `ar` that result in the unique
array.
return_inverse : bool, optional
If True, also return the indices of the unique array that can be used
to reconstruct `ar`.
Returns
-------
unique : ndarray
The sorted unique values.
unique_indices : ndarray, optional
The indices of the first occurrences of the unique values in the
(flattened) original array. Only provided if `return_index` is True.
unique_inverse : ndarray, optional
The indices to reconstruct the (flattened) original array from the
unique array. Only provided if `return_inverse` is True.
See Also
--------
numpy.lib.arraysetops : Module with a number of other functions for
performing set operations on arrays.
Examples
--------
>>> np.unique([1, 1, 2, 2, 3, 3])
array([1, 2, 3])
>>> a = np.array([[1, 1], [2, 3]])
>>> np.unique(a)
array([1, 2, 3])
Return the indices of the original array that give the unique values:
>>> a = np.array(['a', 'b', 'b', 'c', 'a'])
>>> u, indices = np.unique(a, return_index=True)
>>> u
array(['a', 'b', 'c'],
dtype='|S1')
>>> indices
array([0, 1, 3])
>>> a[indices]
array(['a', 'b', 'c'],
dtype='|S1')
Reconstruct the input array from the unique values:
>>> a = np.array([1, 2, 6, 4, 2, 3, 2])
>>> u, indices = np.unique(a, return_inverse=True)
>>> u
array([1, 2, 3, 4, 6])
>>> indices
array([0, 1, 4, 3, 1, 2, 1])
>>> u[indices]
array([1, 2, 6, 4, 2, 3, 2])
"""
try:
ar = ar.flatten()
except AttributeError:
if not return_inverse and not return_index:
items = sorted(set(ar))
return np.asarray(items)
else:
ar = np.asanyarray(ar).flatten()
if ar.size == 0:
if return_inverse and return_index:
return ar, np.empty(0, np.bool), np.empty(0, np.bool)
elif return_inverse or return_index:
return ar, np.empty(0, np.bool)
else:
return ar
if return_inverse or return_index:
if return_index:
perm = ar.argsort(kind='mergesort')
else:
perm = ar.argsort()
aux = ar[perm]
flag = np.concatenate(([True], aux[1:] != aux[:-1]))
if return_inverse:
iflag = np.cumsum(flag) - 1
iperm = perm.argsort()
if return_index:
return aux[flag], perm[flag], iflag[iperm]
else:
return aux[flag], iflag[iperm]
else:
return aux[flag], perm[flag]
else:
ar.sort()
flag = np.concatenate(([True], ar[1:] != ar[:-1]))
return ar[flag]
def intersect1d(ar1, ar2, assume_unique=False):
"""
Find the intersection of two arrays.
Return the sorted, unique values that are in both of the input arrays.
Parameters
----------
ar1, ar2 : array_like
Input arrays.
assume_unique : bool
If True, the input arrays are both assumed to be unique, which
can speed up the calculation. Default is False.
Returns
-------
intersect1d : ndarray
Sorted 1D array of common and unique elements.
See Also
--------
numpy.lib.arraysetops : Module with a number of other functions for
performing set operations on arrays.
Examples
--------
>>> np.intersect1d([1, 3, 4, 3], [3, 1, 2, 1])
array([1, 3])
"""
if not assume_unique:
# Might be faster than unique( intersect1d( ar1, ar2 ) )?
ar1 = unique(ar1)
ar2 = unique(ar2)
aux = np.concatenate( (ar1, ar2) )
aux.sort()
return aux[:-1][aux[1:] == aux[:-1]]
def setxor1d(ar1, ar2, assume_unique=False):
"""
Find the set exclusive-or of two arrays.
Return the sorted, unique values that are in only one (not both) of the
input arrays.
Parameters
----------
ar1, ar2 : array_like
Input arrays.
assume_unique : bool
If True, the input arrays are both assumed to be unique, which
can speed up the calculation. Default is False.
Returns
-------
setxor1d : ndarray
Sorted 1D array of unique values that are in only one of the input
arrays.
Examples
--------
>>> a = np.array([1, 2, 3, 2, 4])
>>> b = np.array([2, 3, 5, 7, 5])
>>> np.setxor1d(a,b)
array([1, 4, 5, 7])
"""
if not assume_unique:
ar1 = unique(ar1)
ar2 = unique(ar2)
aux = np.concatenate( (ar1, ar2) )
if aux.size == 0:
return aux
aux.sort()
# flag = ediff1d( aux, to_end = 1, to_begin = 1 ) == 0
flag = np.concatenate( ([True], aux[1:] != aux[:-1], [True] ) )
# flag2 = ediff1d( flag ) == 0
flag2 = flag[1:] == flag[:-1]
return aux[flag2]
def in1d(ar1, ar2, assume_unique=False):
"""
Test whether each element of a 1-D array is also present in a second array.
Returns a boolean array the same length as `ar1` that is True
where an element of `ar1` is in `ar2` and False otherwise.
Parameters
----------
ar1 : (M,) array_like
Input array.
ar2 : array_like
The values against which to test each value of `ar1`.
assume_unique : bool, optional
If True, the input arrays are both assumed to be unique, which
can speed up the calculation. Default is False.
Returns
-------
in1d : (M,) ndarray, bool
The values `ar1[in1d]` are in `ar2`.
See Also
--------
numpy.lib.arraysetops : Module with a number of other functions for
performing set operations on arrays.
Notes
-----
`in1d` can be considered as an element-wise function version of the
python keyword `in`, for 1-D sequences. ``in1d(a, b)`` is roughly
equivalent to ``np.array([item in b for item in a])``.
.. versionadded:: 1.4.0
Examples
--------
>>> test = np.array([0, 1, 2, 5, 0])
>>> states = [0, 2]
>>> mask = np.in1d(test, states)
>>> mask
array([ True, False, True, False, True], dtype=bool)
>>> test[mask]
array([0, 2, 0])
"""
# Ravel both arrays, behavior for the first array could be different
ar1 = np.asarray(ar1).ravel()
ar2 = np.asarray(ar2).ravel()
# This code is significantly faster when the condition is satisfied.
if len(ar2) < 10 * len(ar1) ** 0.145:
mask = np.zeros(len(ar1), dtype=np.bool)
for a in ar2:
mask |= (ar1 == a)
return mask
# Otherwise use sorting
if not assume_unique:
ar1, rev_idx = np.unique(ar1, return_inverse=True)
ar2 = np.unique(ar2)
ar = np.concatenate( (ar1, ar2) )
# We need this to be a stable sort, so always use 'mergesort'
# here. The values from the first array should always come before
# the values from the second array.
order = ar.argsort(kind='mergesort')
sar = ar[order]
equal_adj = (sar[1:] == sar[:-1])
flag = np.concatenate( (equal_adj, [False] ) )
indx = order.argsort(kind='mergesort')[:len( ar1 )]
if assume_unique:
return flag[indx]
else:
return flag[indx][rev_idx]
def union1d(ar1, ar2):
"""
Find the union of two arrays.
Return the unique, sorted array of values that are in either of the two
input arrays.
Parameters
----------
ar1, ar2 : array_like
Input arrays. They are flattened if they are not already 1D.
Returns
-------
union1d : ndarray
Unique, sorted union of the input arrays.
See Also
--------
numpy.lib.arraysetops : Module with a number of other functions for
performing set operations on arrays.
Examples
--------
>>> np.union1d([-1, 0, 1], [-2, 0, 2])
array([-2, -1, 0, 1, 2])
"""
return unique( np.concatenate( (ar1, ar2) ) )
def setdiff1d(ar1, ar2, assume_unique=False):
"""
Find the set difference of two arrays.
Return the sorted, unique values in `ar1` that are not in `ar2`.
Parameters
----------
ar1 : array_like
Input array.
ar2 : array_like
Input comparison array.
assume_unique : bool
If True, the input arrays are both assumed to be unique, which
can speed up the calculation. Default is False.
Returns
-------
setdiff1d : ndarray
Sorted 1D array of values in `ar1` that are not in `ar2`.
See Also
--------
numpy.lib.arraysetops : Module with a number of other functions for
performing set operations on arrays.
Examples
--------
>>> a = np.array([1, 2, 3, 2, 4, 1])
>>> b = np.array([3, 4, 5, 6])
>>> np.setdiff1d(a, b)
array([1, 2])
"""
if not assume_unique:
ar1 = unique(ar1)
ar2 = unique(ar2)
aux = in1d(ar1, ar2, assume_unique=True)
if aux.size == 0:
return aux
else:
return np.asarray(ar1)[aux == 0]
| gpl-3.0 | 5,377,771,176,711,761,000 | 26.843823 | 79 | 0.552772 | false |
seed2014/kraken | kraken-panel/panel/models.py | 1 | 2573 | from django.db import models
from django.utils import timezone
from datetime import timedelta
# Create your models here.
class Bot(models.Model):
computer_name = models.CharField(max_length=100)
system = models.CharField(max_length=100)
node = models.CharField(max_length=100)
release = models.CharField(max_length=100)
version = models.CharField(max_length=100)
machine = models.CharField(max_length=100)
processor = models.CharField(max_length=100)
first_checkin = models.DateTimeField('first check-in')
last_checkin = models.DateTimeField('last check-in')
ip = models.CharField(max_length=16)
def __str__(self):
return "%s (%s %s)" % (self.computer_name, self.system, self.release)
def artifact_count(self):
return self.artifact_set.count()
def is_alive(self):
# return str(timezone.now())
return self.last_checkin > timezone.now() - timedelta(hours=3) - timedelta(minutes=5)
# class Hunt(models.Model):
# date_found = models.DateTimeField('date found')
# bot = models.ForeignKey(Bot)
# def __str__(self):
# return "%s found %s matches on %s" % (self.bot.computer_name, self.artifact_set.count(), self.date_found)
class Query(models.Model):
QUERY_TYPES = (('hash', 'Cryptographic hash'), ('ctph', 'Context-triggered piecewise hash'), ('fs-regex', 'Filesystem regular expression'))
type = models.CharField(max_length=50, choices=QUERY_TYPES)
body = models.CharField(max_length=200)
def __str__(self):
return "%s (%s)" % (self.body, self.get_type_display())
class Artifact(models.Model):
data = models.CharField(max_length=200)
original_query = models.ForeignKey(Query)
bot = models.ForeignKey(Bot)
last_spotted = models.DateTimeField('last spotted')
def __str__(self):
return "%s" % (self.data)
def get_query_body(self):
return self.original_query.body
class Command(models.Model):
COMMAND_TYPES = (('regget', 'Retrieve arbitrary registry key'), ('regfind','Locate registry key'), ('ramdump', 'Dump volatile memory'), ('getfile', "Retrieve arbitrary file"), ('getfileenc', "Retrieve arbitrary file (encrypted)"))
RESULTS = ((0, 'Unknown'), (1, 'Success'), (-1, 'Error'))
type = models.CharField(max_length=50, choices=COMMAND_TYPES)
target = models.ForeignKey(Bot)
body = models.CharField(max_length=300)
done = models.BooleanField(default=False)
data = models.TextField(default="", null=True, blank=True)
def __str__(self):
return "%s on %s" % (self.get_type_display(), self.target)
class Config(models.Model):
key = models.CharField(max_length=50)
value = models.CharField(max_length=200) | gpl-2.0 | 5,171,206,282,541,833,000 | 31.175 | 231 | 0.710455 | false |
ForestClaw/forestclaw | applications/geoclaw/tohoku/make_plots.py | 1 | 11633 |
"""
Set up the plot figures, axes, and items to be done for each frame.
This module is imported by the plotting routines and then the
function setplot is called to set the plot parameters.
"""
#--------------------------
def setplot(plotdata):
#--------------------------
"""
Specify what is to be plotted at each frame.
Input: plotdata, an instance of clawpack.visclaw.data.ClawPlotData.
Output: a modified version of plotdata.
"""
from clawpack.visclaw import colormaps, geoplot
from numpy import linspace
plotdata.clearfigures() # clear any old figures,axes,items data
# To plot gauge locations on pcolor or contour plot, use this as
# an afteraxis function:
def addgauges(current_data):
from clawpack.visclaw import gaugetools
gaugetools.plot_gauge_locations(current_data.plotdata, \
gaugenos='all', format_string='ko', add_labels=False)
def fixup(current_data):
import pylab
addgauges(current_data)
t = current_data.t
t = t / 3600. # hours
pylab.title('Surface at %4.2f hours' % t, fontsize=20)
#pylab.xticks(fontsize=15)
#pylab.yticks(fontsize=15)
#-----------------------------------------
# Figure for imshow plot
#-----------------------------------------
plotfigure = plotdata.new_plotfigure(name='Domain', figno=1)
# Set up for axes in this figure:
plotaxes = plotfigure.new_plotaxes('imshow')
plotaxes.title = 'Surface'
plotaxes.scaled = True
plotaxes.afteraxes = fixup
# Water
plotitem = plotaxes.new_plotitem(plot_type='2d_imshow')
# plotitem.plot_var = geoplot.surface
plotitem.plot_var = geoplot.surface_or_depth
plotitem.imshow_cmap = geoplot.tsunami_colormap
plotitem.imshow_cmin = -0.5
plotitem.imshow_cmax = 0.5
plotitem.add_colorbar = True
plotitem.amr_celledges_show = [0,0,0]
plotitem.patchedges_show = 0
#plotitem.amr_patchedges_show = [1,1,1,0,0] # only coarse levels
# Land
plotitem = plotaxes.new_plotitem(plot_type='2d_imshow')
plotitem.plot_var = geoplot.land
plotitem.imshow_cmap = geoplot.land_colors
plotitem.imshow_cmin = 0.0
plotitem.imshow_cmax = 100.0
plotitem.add_colorbar = False
plotitem.amr_celledges_show = [0,0,0]
plotitem.patchedges_show = 0
#plotitem.amr_patchedges_show = [1,1,1,0,0] # only coarse levels
plotaxes.xlimits = 'auto'
plotaxes.ylimits = 'auto'
# add contour lines of bathy if desired:
plotitem = plotaxes.new_plotitem(plot_type='2d_contour')
plotitem.show = False
plotitem.plot_var = geoplot.topo
plotitem.contour_levels = linspace(-2000,0,5)
plotitem.amr_contour_colors = ['y'] # color on each level
plotitem.kwargs = {'linestyles':'solid','linewidths':2}
plotitem.amr_contour_show = [1,0,0]
plotitem.celledges_show = 0
plotitem.patchedges_show = 0
#-----------------------------------------
# Figure for zoom plot
#-----------------------------------------
plotfigure = plotdata.new_plotfigure(name='Maui', figno=2)
# Set up for axes in this figure:
plotaxes = plotfigure.new_plotaxes('imshow')
plotaxes.title = 'Surface'
plotaxes.scaled = True
plotaxes.afteraxes = fixup
# Water
plotitem = plotaxes.new_plotitem(plot_type='2d_imshow')
# plotitem.plot_var = geoplot.surface
plotitem.plot_var = geoplot.surface_or_depth
plotitem.imshow_cmap = geoplot.tsunami_colormap
plotitem.imshow_cmin = -1.
plotitem.imshow_cmax = 1.
plotitem.add_colorbar = True
plotitem.amr_celledges_show = [0,0,0]
plotitem.patchedges_show = 0
# Land
plotitem = plotaxes.new_plotitem(plot_type='2d_imshow')
plotitem.plot_var = geoplot.land
plotitem.imshow_cmap = geoplot.land_colors
plotitem.imshow_cmin = 0.0
plotitem.imshow_cmax = 100.0
plotitem.add_colorbar = False
plotitem.amr_celledges_show = [0,0,0]
plotitem.patchedges_show = 0
plotaxes.xlimits = [203.2, 204.1]
plotaxes.ylimits = [20.4, 21.3]
# add contour lines of bathy if desired:
plotitem = plotaxes.new_plotitem(plot_type='2d_contour')
plotitem.show = False
plotitem.plot_var = geoplot.topo
plotitem.contour_levels = linspace(-2000,0,5)
plotitem.amr_contour_colors = ['y'] # color on each level
plotitem.kwargs = {'linestyles':'solid','linewidths':2}
plotitem.amr_contour_show = [1,0,0]
plotitem.celledges_show = 0
plotitem.patchedges_show = 0
#-----------------------------------------
# Figure for zoom plot
#-----------------------------------------
plotfigure = plotdata.new_plotfigure(name='Kahului Harbor', figno=3)
# Set up for axes in this figure:
plotaxes = plotfigure.new_plotaxes('imshow')
plotaxes.title = 'Surface'
plotaxes.scaled = True
plotaxes.afteraxes = fixup
# Water
plotitem = plotaxes.new_plotitem(plot_type='2d_imshow')
# plotitem.plot_var = geoplot.surface
plotitem.plot_var = geoplot.surface_or_depth
plotitem.imshow_cmap = geoplot.tsunami_colormap
plotitem.imshow_cmin = -0.2
plotitem.imshow_cmax = 0.2
plotitem.add_colorbar = True
plotitem.celledges_show = 0
plotitem.patchedges_show = 0
# Land
plotitem = plotaxes.new_plotitem(plot_type='2d_imshow')
plotitem.plot_var = geoplot.land
plotitem.imshow_cmap = geoplot.land_colors
plotitem.imshow_cmin = 0.0
plotitem.imshow_cmax = 10.0
plotitem.add_colorbar = False
plotitem.celledges_show = 0
plotitem.patchedges_show = 0
plotaxes.xlimits = [203.48, 203.57]
plotaxes.ylimits = [20.88, 20.94]
# add contour lines of bathy if desired:
plotitem = plotaxes.new_plotitem(plot_type='2d_contour')
plotitem.show = False
plotitem.plot_var = geoplot.topo
#plotitem.contour_levels = linspace(-2000,0,5)
plotitem.contour_levels = linspace(0,8,9)
plotitem.amr_contour_colors = ['y'] # color on each level
plotitem.kwargs = {'linestyles':'solid','linewidths':2}
plotitem.amr_contour_show = [0,0,0,0,0,1]
plotitem.celledges_show = 0
plotitem.patchedges_show = 0
#-----------------------------------------
# Figures for gauges
#-----------------------------------------
plotfigure = plotdata.new_plotfigure(name='Surface', figno=300, \
type='each_gauge')
plotfigure.clf_each_gauge = True
# Set up for axes in this figure:
plotaxes = plotfigure.new_plotaxes()
#plotaxes.axescmd = 'subplot(2,1,1)'
plotaxes.title = 'Surface'
# Plot surface as blue curve:
plotitem = plotaxes.new_plotitem(plot_type='1d_plot')
plotitem.plot_var = 3
plotitem.plotstyle = 'b-'
plotitem.kwargs = {'linewidth':2}
# Plot topo as green curve:
plotitem = plotaxes.new_plotitem(plot_type='1d_plot')
plotitem.show = False
def gaugetopo(current_data):
q = current_data.q
h = q[0,:]
eta = q[3,:]
topo = eta - h
return topo
plotitem.plot_var = gaugetopo
plotitem.plotstyle = 'g-'
def add_zeroline(current_data):
from pylab import plot, legend, xticks, floor, xlim,ylim
t = current_data.t
#legend(('surface','topography'),loc='lower left')
plot(t, 0*t, 'k')
#n = int(floor(t.max()/1800.)) + 2
#xticks([1800*i for i in range(n)],[str(0.5*i) for i in range(n)])
#xlim(25000,t.max())
#ylim(-0.5,0.5)
print("+++ gaugeno = ",current_data.gaugeno)
def add_legend_eta(current_data):
from pylab import legend
legend(('Surface'),loc='lower left')
add_zeroline(current_data)
plotaxes.ylimits = [-2.5, 2.5]
plotaxes.afteraxes = add_zeroline
plotfigure = plotdata.new_plotfigure(name='Velocities', figno=301, \
type='each_gauge')
plotfigure.clf_each_gauge = True
plotaxes = plotfigure.new_plotaxes()
#plotaxes.axescmd = 'subplot(2,1,2)'
plotaxes.title = 'Velocities'
plotaxes.afteraxes = add_zeroline
# Plot velocity as red curve:
plotitem = plotaxes.new_plotitem(plot_type='1d_plot')
plotitem.show = True
def speed(current_data):
from numpy import where, sqrt
h = current_data.q[0,:]
h = where(h>0.01, h, 1.e6)
u = 100. * current_data.q[1,:] / h
v = 100. * current_data.q[2,:] / h
s = sqrt(u**2 + v**2)
return s
plotitem.plot_var = speed
plotitem.plotstyle = 'k-'
plotitem = plotaxes.new_plotitem(plot_type='1d_plot')
def uvel(current_data):
from numpy import where, sqrt
h = current_data.q[0,:]
h = where(h>0.01, h, 1.e6)
u = 100. * current_data.q[1,:] / h
return u
plotitem.plot_var = uvel
plotitem.plotstyle = 'r-'
plotitem.kwargs = {'linewidth':2}
plotitem = plotaxes.new_plotitem(plot_type='1d_plot')
def vvel(current_data):
from numpy import where, sqrt
h = current_data.q[0,:]
h = where(h>0.01, h, 1.e6)
v = 100. * current_data.q[2,:] / h
return v
plotitem.plot_var = vvel
plotitem.plotstyle = 'g-'
plotitem.kwargs = {'linewidth':2}
def add_legend_vel(current_data):
from pylab import legend
# legend(["u","v"],'upper left')
legend(['Speed','uvel','vvel'],loc='upper left')
add_zeroline(current_data)
plotaxes.ylimits = [-50,50]
plotaxes.afteraxes = add_legend_vel
#-----------------------------------------
# Plots of timing (CPU and wall time):
def make_timing_plots(plotdata):
from clawpack.visclaw import plot_timing_stats
import os,sys
try:
timing_plotdir = plotdata.plotdir + '/_timing_figures'
os.system('mkdir -p %s' % timing_plotdir)
# adjust units for plots based on problem:
units = {'comptime':'seconds', 'simtime':'hours',
'cell':'millions'}
plot_timing_stats.make_plots(outdir=plotdata.outdir,
make_pngs=True,
plotdir=timing_plotdir,
units=units)
except:
print('*** Error making timing plots')
otherfigure = plotdata.new_otherfigure(name='timing plots',
fname='_timing_figures/timing.html')
otherfigure.makefig = make_timing_plots
#-----------------------------------------
# Parameters used only when creating html and/or latex hardcopy
# e.g., via clawpack.visclaw.frametools.printframes:
plotdata.printfigs = True # print figures
plotdata.print_format = 'png' # file format
plotdata.print_framenos = 'all' # list of frames to print
plotdata.print_gaugenos = 'all' # list of gauges to print
plotdata.print_fignos = [1,2,3,300,301] # list of figures to print
plotdata.html = True # create html files of plots?
plotdata.html_homelink = '../README.html' # pointer for top of index
plotdata.latex = False # create latex file of plots?
plotdata.latex_figsperline = 2 # layout of plots
plotdata.latex_framesperline = 1 # layout of plots
plotdata.latex_makepdf = False # also run pdflatex?
plotdata.parallel = False
return plotdata
if __name__=="__main__":
from clawpack.visclaw.plotclaw import plotclaw
plotclaw(outdir='.',setplot=setplot,plotdir='_plots',format='forestclaw')
| bsd-2-clause | -9,101,027,784,213,857,000 | 32.621387 | 81 | 0.600963 | false |
better-dem/box_classify | specify_rect.py | 1 | 2299 | #!/usr/local/bin/python3.6
import tkinter as tk
from tkinter import messagebox as mb
from PIL import Image, ImageTk
class SelectRegionApp(tk.Tk):
def __init__(self, image_filename, image_resize, result):
tk.Tk.__init__(self)
self.result_dict = result
self.x = self.y = 0
im = Image.open(image_filename)
if not image_resize is None:
im = im.resize(image_resize)
self.tk_im = ImageTk.PhotoImage(im)
self.label = tk.Label(self, text="Select a Rectangle To Extract")
self.label.pack(side="top")
self.canvas = tk.Canvas(self, width=self.tk_im.width(), height=self.tk_im.height(), cursor="cross")
self.canvas.pack(side="top", fill="both", expand=True)
self.canvas.bind("<ButtonPress-1>", self.on_button_press)
self.canvas.bind("<B1-Motion>", self.on_move_press)
self.canvas.bind("<ButtonRelease-1>", self.on_button_release)
self.rect = None
self.start_x = None
self.start_y = None
self.canvas.create_image(0,0,anchor="nw",image=self.tk_im)
self.button = tk.Button(self, text="DONE", command=self.done)
self.button.pack(side="bottom")
def done(self):
if self.start_x is None:
mb.showwarning("warning","you need to drag a rectangle over the region you want to extract before continuing")
else:
self.result_dict["rect"] = self.canvas.coords(self.rect)
self.destroy()
def on_button_press(self, event):
if not self.rect is None:
self.canvas.delete(self.rect)
# save mouse drag start position
self.start_x = event.x
self.start_y = event.y
# create rectangle if not yet exist
#if not self.rect:
self.rect = self.canvas.create_rectangle(self.x, self.y, 1, 1, fill="")
def on_move_press(self, event):
curX, curY = (event.x, event.y)
# expand rectangle as you drag the mouse
self.canvas.coords(self.rect, self.start_x, self.start_y, curX, curY)
def on_button_release(self, event):
pass
def select_rectangle(image_filename, image_resize=None):
ans = dict()
app = SelectRegionApp(image_filename, image_resize, ans)
app.mainloop()
return ans['rect']
| gpl-3.0 | -4,937,644,953,297,175,000 | 32.808824 | 122 | 0.618965 | false |
Vykstorm/Othello | bots.py | 1 | 3184 | #!/usr/bin/python
# -*- coding: iso8859-1 -*-
# Autor: Víctor Ruiz Gómez
# Descripción: Este script define distintos bots que son jugadores del
# juego Othello.
from game2 import Player
from random import choice
from minmax import MinMax, MinMaxAlphaBeta
from othello import OthelloEval, OthelloEvalDiffPiezas, OthelloEvalComplex
# El siguiente bot selecciona un movimiento al azar entre el conjunto de
# movimientos posibles que puede realizar
class BotPlayerRandom(Player):
def play(self, game, opp_move):
# Obtenemos el conjunto de movimientos posibles.
moves = game.next_moves()
if len(moves) == 0:
return None
# Seleccionamos uno aleatoriamente.
return choice(moves)
def __repr__(self):
return 'Bot Aleatorio'
# El siguiente bot selecciona el movimiento que más piezas come.
class BotPlayerMaxFeed(Player):
def play(self, game, opp_move):
moves = game.next_moves()
if len(moves) == 0:
return None
best_move = moves[0]
max_pieces_eat = abs(game.transform(best_move).score() - game.score())
for i in range(1,len(moves)):
move = moves[i]
pieces_eat = abs(game.transform(move).score() - game.score())
if pieces_eat > max_pieces_eat:
max_pieces_eat = pieces_eat
best_move = move
return best_move
def __repr__(self):
return 'Bot mejor dif. Piezas'
# El siguiente bot usa el algorito MinMax para seleccionar el siguiente movimiento,
# usando la diferencia de piezas entre MIN y MAX como función de evaluación estática.
class BotPlayerMinMax(Player):
# Inicializa la instancia. Se puede indicar como parámetro el nivel de profundidad
# máxima para el algoritmo MinMax.
def __init__(self, max_deep, static_eval = None):
if static_eval is None:
static_eval = OthelloEvalDiffPiezas()
self.max_deep = max_deep
self.static_eval = static_eval
def get_static_eval(self):
return self.static_eval
def play(self, game, opp_move):
if len(game.next_moves()) == 0:
return None
minmax = MinMax(game, self.get_static_eval(), self.max_deep)
best_move = minmax()
return best_move
def __repr__(self):
return 'Bot min-max sin poda'
# Es igual que el anterior solo que el algoritmo Min-Max con poda alpha-beta
class BotPlayerMinMaxAlphaBeta(BotPlayerMinMax):
def __init__(self, max_deep):
BotPlayerMinMax.__init__(self, max_deep)
def play(self, game, opp_move):
if len(game.next_moves()) == 0:
return None
minmax = MinMaxAlphaBeta(game, self.get_static_eval(), self.max_deep)
best_move = minmax()
return best_move
def __repr__(self):
return 'Bot min-max con poda'
# Este último robot usa el algoritmo MinMax con poda alpha beta, usando
# una función de evaluación estática que tiene en cuenta posiciones estableces
# del tablero (bordes y esquinas)
class BotPlayerComplex(BotPlayerMinMax):
def __init__(self, max_deep):
BotPlayerMinMax.__init__(self, max_deep, OthelloEvalComplex())
def play(self, game, opp_move):
if len(game.next_moves()) == 0:
return None
minmax = MinMaxAlphaBeta(game, self.get_static_eval(), self.max_deep)
best_move = minmax()
return best_move
def __repr__(self):
return 'Bot min-max con poda y mejorado'
| mit | -6,708,081,805,361,755,000 | 28.915094 | 85 | 0.712709 | false |
vabs/outpost-public-api | eve/endpoints.py | 1 | 3032 | # -*- coding: utf-8 -*-
"""
eve.endpoints
~~~~~~~~~~~~~
This module implements the API endpoints. Each endpoint (resource, item,
home) invokes the appropriate method handler, returning its response
to the client, properly rendered.
:copyright: (c) 2013 by Nicola Iarocci.
:license: BSD, see LICENSE for more details.
"""
from eve.methods import get, getitem, post, patch, delete, delete_resource, put
from eve.methods.common import ratelimit
from eve.render import send_response
from eve.auth import requires_auth
from eve.utils import resource_uri, config, request_method, \
debug_error_message
from flask import abort
def collections_endpoint(url):
""" Resource endpoint handler
:param url: the url that led here
.. versionchanged:: 0.0.7
Using 'utils.request_method' helper function now.
.. versionchanged:: 0.0.6
Support for HEAD requests
.. versionchanged:: 0.0.2
Support for DELETE resource method.
"""
resource = config.RESOURCES[url]
response = None
method = request_method()
if method in ('GET', 'HEAD'):
response = get(resource)
elif method == 'POST':
response = post(resource)
elif method == 'DELETE':
response = delete_resource(resource)
elif method == 'OPTIONS':
send_response(resource, response)
else:
abort(405)
return send_response(resource, response)
def item_endpoint(url, **lookup):
""" Item endpoint handler
:param url: the url that led here
:param lookup: the query
.. versionchanged:: 0.1.0
Support for PUT method.
.. versionchanged:: 0.0.7
Using 'utils.request_method' helper function now.
.. versionchanged:: 0.0.6
Support for HEAD requests
"""
resource = config.RESOURCES[url]
response = None
method = request_method()
if method in ('GET', 'HEAD'):
response = getitem(resource, **lookup)
elif method == 'PATCH':
response = patch(resource, **lookup)
elif method == 'PUT':
response = put(resource, **lookup)
elif method == 'DELETE':
response = delete(resource, **lookup)
elif method == 'OPTIONS':
send_response(resource, response)
else:
abort(405)
return send_response(resource, response)
@ratelimit()
@requires_auth('home')
def home_endpoint():
""" Home/API entry point. Will provide links to each available resource
.. versionchanged:: 0.1.0
Support for optional HATEOAS.
"""
if config.HATEOAS:
response = {}
links = []
for resource in config.DOMAIN.keys():
links.append({'href': '%s' % resource_uri(resource),
'title': '%s' % config.URLS[resource]})
response['_links'] = {'child': links}
return send_response(None, (response,))
else:
abort(404, debug_error_message("HATEOAS is disabled so we have no data"
" to display at the API homepage."))
| gpl-2.0 | -5,097,041,677,335,367,000 | 27.603774 | 79 | 0.620053 | false |
NetApp/manila | manila/tests/api/views/test_share_networks.py | 1 | 8974 | # Copyright (c) 2015 Mirantis, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import ddt
from manila.api.views import share_networks
from manila import test
from manila.tests.api import fakes
@ddt.ddt
class ViewBuilderTestCase(test.TestCase):
def setUp(self):
super(ViewBuilderTestCase, self).setUp()
self.builder = share_networks.ViewBuilder()
def test__collection_name(self):
self.assertEqual('share_networks', self.builder._collection_name)
@ddt.data(
{'id': 'fake_sn_id', 'name': 'fake_sn_name'},
{'id': 'fake_sn_id', 'name': 'fake_sn_name', 'fake_extra_key': 'foo'},
)
def test_build_share_network_v_2_18(self, sn):
req = fakes.HTTPRequest.blank('/share-networks', version="2.18")
expected_keys = (
'id', 'name', 'project_id', 'created_at', 'updated_at',
'neutron_net_id', 'neutron_subnet_id', 'nova_net_id',
'network_type', 'segmentation_id', 'cidr', 'ip_version',
'gateway', 'description')
result = self.builder.build_share_network(req, sn)
self.assertEqual(1, len(result))
self.assertIn('share_network', result)
self.assertEqual(sn['id'], result['share_network']['id'])
self.assertEqual(sn['name'], result['share_network']['name'])
self.assertEqual(len(expected_keys), len(result['share_network']))
for key in expected_keys:
self.assertIn(key, result['share_network'])
@ddt.data(
[],
[dict(id='fake_id',
name='fake_name',
project_id='fake_project_id',
created_at='fake_created_at',
updated_at='fake_updated_at',
neutron_net_id='fake_neutron_net_id',
neutron_subnet_id='fake_neutron_subnet_id',
nova_net_id='fake_nova_net_id',
network_type='fake_network_type',
segmentation_id='fake_segmentation_id',
cidr='fake_cidr',
ip_version='fake_ip_version',
gateway='fake_gateway',
description='fake_description'),
dict(id='fake_id2', name='fake_name2')],
)
def test_build_share_networks_with_details_v_2_18(self, share_networks):
req = fakes.HTTPRequest.blank('/share-networks', version="2.18")
expected = []
for share_network in share_networks:
expected.append(dict(
id=share_network.get('id'),
name=share_network.get('name'),
project_id=share_network.get('project_id'),
created_at=share_network.get('created_at'),
updated_at=share_network.get('updated_at'),
neutron_net_id=share_network.get('neutron_net_id'),
neutron_subnet_id=share_network.get('neutron_subnet_id'),
nova_net_id=share_network.get('nova_net_id'),
network_type=share_network.get('network_type'),
segmentation_id=share_network.get('segmentation_id'),
cidr=share_network.get('cidr'),
ip_version=share_network.get('ip_version'),
gateway=share_network.get('gateway'),
description=share_network.get('description')))
expected = {'share_networks': expected}
result = self.builder.build_share_networks(
req, share_networks, True)
self.assertEqual(expected, result)
@ddt.data(
[],
[{'id': 'foo', 'name': 'bar'}],
[{'id': 'id1', 'name': 'name1'}, {'id': 'id2', 'name': 'name2'}],
[{'id': 'id1', 'name': 'name1'},
{'id': 'id2', 'name': 'name2', 'fake': 'I should not be returned'}],
)
def test_build_share_networks_without_details_v_2_18(self,
share_networks):
req = fakes.HTTPRequest.blank('/share-networks', version="2.18")
expected = []
for share_network in share_networks:
expected.append(dict(
id=share_network.get('id'), name=share_network.get('name')))
expected = {'share_networks': expected}
result = self.builder.build_share_networks(
req, share_networks, False)
self.assertEqual(expected, result)
@ddt.data(
{'id': 'fake_sn_id', 'name': 'fake_sn_name'},
{'id': 'fake_sn_id', 'name': 'fake_sn_name', 'fake_extra_key': 'foo'},
)
def test_build_share_network_v_2_20(self, sn):
req = fakes.HTTPRequest.blank('/share-networks', version="2.20")
expected_keys = (
'id', 'name', 'project_id', 'created_at', 'updated_at',
'neutron_net_id', 'neutron_subnet_id', 'nova_net_id',
'network_type', 'segmentation_id', 'cidr', 'ip_version',
'gateway', 'description', 'mtu')
result = self.builder.build_share_network(req, sn)
self.assertEqual(1, len(result))
self.assertIn('share_network', result)
self.assertEqual(sn['id'], result['share_network']['id'])
self.assertEqual(sn['name'], result['share_network']['name'])
self.assertEqual(len(expected_keys), len(result['share_network']))
for key in expected_keys:
self.assertIn(key, result['share_network'])
for key in result['share_network']:
self.assertIn(key, expected_keys)
@ddt.data(
[], [{
'id': 'fake_id',
'name': 'fake_name',
'project_id': 'fake_project_id',
'created_at': 'fake_created_at',
'updated_at': 'fake_updated_at',
'neutron_net_id': 'fake_neutron_net_id',
'neutron_subnet_id': 'fake_neutron_subnet_id',
'nova_net_id': 'fake_nova_net_id',
'network_type': 'fake_network_type',
'segmentation_id': 'fake_segmentation_id',
'cidr': 'fake_cidr',
'ip_version': 'fake_ip_version',
'gateway': 'fake_gateway',
'description': 'fake_description',
'mtu': 1509
},
{
'id': 'fake_id2',
'name': 'fake_name2'
}],
)
def test_build_share_networks_with_details_v_2_20(self, share_networks):
req = fakes.HTTPRequest.blank('/share-networks', version="2.20")
expected = []
for share_network in share_networks:
expected.append({
'id': share_network.get('id'),
'name': share_network.get('name'),
'project_id': share_network.get('project_id'),
'created_at': share_network.get('created_at'),
'updated_at': share_network.get('updated_at'),
'neutron_net_id': share_network.get('neutron_net_id'),
'neutron_subnet_id': share_network.get('neutron_subnet_id'),
'nova_net_id': share_network.get('nova_net_id'),
'network_type': share_network.get('network_type'),
'segmentation_id': share_network.get('segmentation_id'),
'cidr': share_network.get('cidr'),
'ip_version': share_network.get('ip_version'),
'gateway': share_network.get('gateway'),
'description': share_network.get('description'),
'mtu': share_network.get('mtu'),
})
expected = {'share_networks': expected}
result = self.builder.build_share_networks(
req, share_networks, True)
self.assertEqual(expected, result)
@ddt.data(
[],
[{'id': 'foo', 'name': 'bar'}],
[{'id': 'id1', 'name': 'name1'}, {'id': 'id2', 'name': 'name2'}],
[{'id': 'id1', 'name': 'name1'},
{'id': 'id2', 'name': 'name2', 'fake': 'I should not be returned'}],
)
def test_build_share_networks_without_details_v_2_20(self,
share_networks):
req = fakes.HTTPRequest.blank('/share-networks', version="2.20")
expected = []
for share_network in share_networks:
expected.append({
'id': share_network.get('id'),
'name': share_network.get('name')
})
expected = {'share_networks': expected}
result = self.builder.build_share_networks(
req, share_networks, False)
self.assertEqual(expected, result)
| apache-2.0 | -7,470,150,884,323,348,000 | 40.546296 | 78 | 0.551036 | false |
googleapis/googleapis-gen | google/cloud/osconfig/agentendpoint/v1/osconfig-agentendpoint-v1-py/scripts/fixup_agentendpoint_v1_keywords.py | 1 | 6593 | #! /usr/bin/env python3
# -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import argparse
import os
import libcst as cst
import pathlib
import sys
from typing import (Any, Callable, Dict, List, Sequence, Tuple)
def partition(
predicate: Callable[[Any], bool],
iterator: Sequence[Any]
) -> Tuple[List[Any], List[Any]]:
"""A stable, out-of-place partition."""
results = ([], [])
for i in iterator:
results[int(predicate(i))].append(i)
# Returns trueList, falseList
return results[1], results[0]
class agentendpointCallTransformer(cst.CSTTransformer):
CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata')
METHOD_TO_PARAMS: Dict[str, Tuple[str]] = {
'receive_task_notification': ('instance_id_token', 'agent_version', ),
'register_agent': ('instance_id_token', 'agent_version', 'supported_capabilities', ),
'report_inventory': ('instance_id_token', 'inventory_checksum', 'inventory', ),
'report_task_complete': ('instance_id_token', 'task_id', 'task_type', 'error_message', 'apply_patches_task_output', 'exec_step_task_output', 'apply_config_task_output', ),
'report_task_progress': ('instance_id_token', 'task_id', 'task_type', 'apply_patches_task_progress', 'exec_step_task_progress', 'apply_config_task_progress', ),
'start_next_task': ('instance_id_token', ),
}
def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode:
try:
key = original.func.attr.value
kword_params = self.METHOD_TO_PARAMS[key]
except (AttributeError, KeyError):
# Either not a method from the API or too convoluted to be sure.
return updated
# If the existing code is valid, keyword args come after positional args.
# Therefore, all positional args must map to the first parameters.
args, kwargs = partition(lambda a: not bool(a.keyword), updated.args)
if any(k.keyword.value == "request" for k in kwargs):
# We've already fixed this file, don't fix it again.
return updated
kwargs, ctrl_kwargs = partition(
lambda a: not a.keyword.value in self.CTRL_PARAMS,
kwargs
)
args, ctrl_args = args[:len(kword_params)], args[len(kword_params):]
ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl))
for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS))
request_arg = cst.Arg(
value=cst.Dict([
cst.DictElement(
cst.SimpleString("'{}'".format(name)),
cst.Element(value=arg.value)
)
# Note: the args + kwargs looks silly, but keep in mind that
# the control parameters had to be stripped out, and that
# those could have been passed positionally or by keyword.
for name, arg in zip(kword_params, args + kwargs)]),
keyword=cst.Name("request")
)
return updated.with_changes(
args=[request_arg] + ctrl_kwargs
)
def fix_files(
in_dir: pathlib.Path,
out_dir: pathlib.Path,
*,
transformer=agentendpointCallTransformer(),
):
"""Duplicate the input dir to the output dir, fixing file method calls.
Preconditions:
* in_dir is a real directory
* out_dir is a real, empty directory
"""
pyfile_gen = (
pathlib.Path(os.path.join(root, f))
for root, _, files in os.walk(in_dir)
for f in files if os.path.splitext(f)[1] == ".py"
)
for fpath in pyfile_gen:
with open(fpath, 'r') as f:
src = f.read()
# Parse the code and insert method call fixes.
tree = cst.parse_module(src)
updated = tree.visit(transformer)
# Create the path and directory structure for the new file.
updated_path = out_dir.joinpath(fpath.relative_to(in_dir))
updated_path.parent.mkdir(parents=True, exist_ok=True)
# Generate the updated source file at the corresponding path.
with open(updated_path, 'w') as f:
f.write(updated.code)
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description="""Fix up source that uses the agentendpoint client library.
The existing sources are NOT overwritten but are copied to output_dir with changes made.
Note: This tool operates at a best-effort level at converting positional
parameters in client method calls to keyword based parameters.
Cases where it WILL FAIL include
A) * or ** expansion in a method call.
B) Calls via function or method alias (includes free function calls)
C) Indirect or dispatched calls (e.g. the method is looked up dynamically)
These all constitute false negatives. The tool will also detect false
positives when an API method shares a name with another method.
""")
parser.add_argument(
'-d',
'--input-directory',
required=True,
dest='input_dir',
help='the input directory to walk for python files to fix up',
)
parser.add_argument(
'-o',
'--output-directory',
required=True,
dest='output_dir',
help='the directory to output files fixed via un-flattening',
)
args = parser.parse_args()
input_dir = pathlib.Path(args.input_dir)
output_dir = pathlib.Path(args.output_dir)
if not input_dir.is_dir():
print(
f"input directory '{input_dir}' does not exist or is not a directory",
file=sys.stderr,
)
sys.exit(-1)
if not output_dir.is_dir():
print(
f"output directory '{output_dir}' does not exist or is not a directory",
file=sys.stderr,
)
sys.exit(-1)
if os.listdir(output_dir):
print(
f"output directory '{output_dir}' is not empty",
file=sys.stderr,
)
sys.exit(-1)
fix_files(input_dir, output_dir)
| apache-2.0 | -4,318,478,684,794,081,000 | 35.425414 | 181 | 0.62263 | false |
SmartDeveloperHub/sdh-curator | sdh/curator/actions/ext/enrichment.py | 1 | 10998 | """
#-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=#
This file is part of the Smart Developer Hub Project:
http://www.smartdeveloperhub.org
Center for Open Middleware
http://www.centeropenmiddleware.com/
#-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=#
Copyright (C) 2015 Center for Open Middleware.
#-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=#
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
#-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=#
"""
import logging
import uuid
from datetime import datetime
import base64
from agora.client.execution import AGORA
from sdh.curator.actions.core.fragment import FragmentRequest, FragmentAction, FragmentResponse, FragmentSink
from sdh.curator.actions.core import CURATOR, TYPES, RDF, XSD, FOAF
from sdh.curator.actions.core.utils import CGraph
from rdflib import BNode, Literal, URIRef, RDFS
from sdh.curator.store import r
from sdh.curator.actions.core.delivery import CURATOR_UUID
from sdh.curator.daemons.fragment import FragmentPlugin
from sdh.curator.store.triples import cache
import shortuuid
__author__ = 'Fernando Serena'
log = logging.getLogger('sdh.curator.actions.enrichment')
def get_fragment_enrichments(fid):
return [EnrichmentData(eid) for eid in r.smembers('fragments:{}:enrichments'.format(fid))]
def generate_enrichment_hash(target, links):
links = '|'.join(sorted([str(pr) for (pr, _) in links]))
eid = base64.b64encode('~'.join([target, links]))
return eid
def register_enrichment(pipe, fid, target, links):
e_hash = generate_enrichment_hash(target, links)
if not r.sismember('enrichments', e_hash):
eid = shortuuid.uuid()
enrichment_data = EnrichmentData(eid, fid, target, links)
enrichment_data.save(pipe)
pipe.sadd('enrichments', e_hash)
pipe.set('map:enrichments:{}'.format(e_hash), eid)
else:
eid = r.get('map:enrichments:{}'.format(e_hash))
return eid
class EnrichmentData(object):
def __init__(self, eid, fid=None, target=None, links=None):
if eid is None:
raise ValueError('Cannot create an enrichment data object without an identifier')
self.links = links
self.target = target
self.fragment_id = fid
self.enrichment_id = eid
self._enrichment_key = 'enrichments:{}'.format(self.enrichment_id)
if not any([fid, target, links]):
self.load()
def save(self, pipe):
pipe.hset('{}'.format(self._enrichment_key), 'target', self.target)
pipe.hset('{}'.format(self._enrichment_key), 'fragment_id', self.fragment_id)
pipe.sadd('fragments:{}:enrichments'.format(self.fragment_id), self.enrichment_id)
pipe.sadd('{}:links'.format(self._enrichment_key), *self.links)
pipe.hmset('{}:links:status'.format(self._enrichment_key),
dict((pr, False) for (pr, _) in self.links))
def load(self):
dict_fields = r.hgetall(self._enrichment_key)
self.target = URIRef(dict_fields.get('target', None))
self.fragment_id = dict_fields.get('fragment_id', None)
self.links = map(lambda (link, v): (URIRef(link), v), [eval(pair_str) for pair_str in
r.smembers('{}:links'.format(
self._enrichment_key))])
def set_link(self, link):
with r.pipeline(transaction=True) as p:
p.multi()
p.hset('{}:links:status'.format(self._enrichment_key), str(link), True)
p.execute()
@property
def completed(self):
return all([eval(value) for value in r.hgetall('{}:links:status'.format(self._enrichment_key)).values()])
class EnrichmentPlugin(FragmentPlugin):
@property
def sink_class(self):
return EnrichmentSink
def sink_aware(self):
return False
def consume(self, fid, (c, s, p, o), graph, *args):
enrichments = get_fragment_enrichments(fid)
for e in enrichments:
var_candidate = list(graph.objects(c, AGORA.subject))[0]
if (var_candidate, RDF.type, AGORA.Variable) in graph:
target = e.target
links = dict(map(lambda (l, v): (v, l), e.links))
var_label = str(list(graph.objects(var_candidate, RDFS.label))[0])
if var_label in links:
link = links[var_label]
if (target, link, s) not in cache.get_context('#enrichment'):
e.set_link(link)
cache.get_context('#enrichment').add((target, link, s))
print u'{} {} {} .'.format(target.n3(), link.n3(graph.namespace_manager), s.n3())
def complete(self, fid, *args):
# TODO: check if all links are set
pass
FragmentPlugin.register(EnrichmentPlugin)
class EnrichmentRequest(FragmentRequest):
def __init__(self):
super(EnrichmentRequest, self).__init__()
self._target_resource = None
self._target_links = set([])
def _extract_content(self):
super(EnrichmentRequest, self)._extract_content()
q_res = self._graph.query("""SELECT ?node ?t WHERE {
?node a curator:EnrichmentRequest;
curator:targetResource ?t
}""")
q_res = list(q_res)
if len(q_res) != 1:
raise SyntaxError('Invalid enrichment request')
request_fields = q_res.pop()
if not all(request_fields):
raise ValueError('Missing fields for enrichment request')
if request_fields[0] != self._request_node:
raise SyntaxError('Request node does not match')
(self._target_resource,) = request_fields[1:]
log.debug("""Parsed attributes of an enrichment request:
-target resource: {}""".format(self._target_resource))
target_pattern = self._graph.predicate_objects(self._target_resource)
for (pr, req_object) in target_pattern:
if (req_object, RDF.type, CURATOR.Variable) in self._graph:
self._target_links.add((pr, req_object))
enrich_properties = set([pr for (pr, _) in self._target_links])
if not enrich_properties:
raise ValueError('There is nothing to enrich')
log.debug(
'<{}> is requested to be enriched with values for the following properties:\n{}'.format(
self._target_resource,
'\n'.join(enrich_properties)))
@property
def target_resource(self):
return self._target_resource
@property
def target_links(self):
return self._target_links.copy()
class EnrichmentAction(FragmentAction):
def __init__(self, message):
self.__request = EnrichmentRequest()
self.__sink = EnrichmentSink()
super(EnrichmentAction, self).__init__(message)
@property
def sink(self):
return self.__sink
@classmethod
def response_class(cls):
return EnrichmentResponse
@property
def request(self):
return self.__request
def submit(self):
try:
super(EnrichmentAction, self).submit()
except Exception as e:
log.debug('Bad request: {}'.format(e.message))
self._reply_failure(e.message)
class EnrichmentSink(FragmentSink):
def _remove(self, pipe):
pipe.srem('enrichments', self._request_id)
super(FragmentSink, self)._remove(pipe)
def __init__(self):
super(EnrichmentSink, self).__init__()
self.__target_links = None
self.__target_resource = None
self._enrichment_id = None
self._enrichment_data = None
def _save(self, action):
super(EnrichmentSink, self)._save(action)
variable_links = [(str(pr), self.map(self._variables_dict[v])) for (pr, v) in action.request.target_links]
enrichment_id = register_enrichment(self._pipe, self._fragment_id, action.request.target_resource,
variable_links)
self._pipe.hset('{}'.format(self._request_key), 'enrichment_id', enrichment_id)
self._dict_fields['enrichment_id'] = enrichment_id
def _load(self):
super(EnrichmentSink, self)._load()
@property
def enrichment_data(self):
if self._enrichment_data is None:
self._enrichment_data = EnrichmentData(self.enrichment_id)
return self._enrichment_data
@property
def backed(self):
return self.fragment_updated_on is not None and EnrichmentData(
self.enrichment_id).completed
class EnrichmentResponse(FragmentResponse):
def __init__(self, rid):
self.__sink = EnrichmentSink()
self.__sink.load(rid)
super(EnrichmentResponse, self).__init__(rid)
@property
def sink(self):
return self.__sink
def _build(self):
log.debug('Building a response to request number {}'.format(self._request_id))
graph = CGraph()
resp_node = BNode('#response')
graph.add((resp_node, RDF.type, CURATOR.EnrichmentResponse))
graph.add((resp_node, CURATOR.messageId, Literal(str(uuid.uuid4()), datatype=TYPES.UUID)))
graph.add((resp_node, CURATOR.responseTo, Literal(self.sink.message_id, datatype=TYPES.UUID)))
graph.add((resp_node, CURATOR.responseNumber, Literal("1", datatype=XSD.unsignedLong)))
graph.add((resp_node, CURATOR.targetResource, self.sink.enrichment_data.target))
graph.add((resp_node, CURATOR.submittedOn, Literal(datetime.now())))
curator_node = BNode('#curator')
graph.add((resp_node, CURATOR.submittedBy, curator_node))
graph.add((curator_node, RDF.type, FOAF.Agent))
graph.add((curator_node, CURATOR.agentId, CURATOR_UUID))
addition_node = BNode('#addition')
graph.add((resp_node, CURATOR.additionTarget, addition_node))
graph.add((addition_node, RDF.type, CURATOR.Variable))
for link, v in self.sink.enrichment_data.links:
trs = self.graph().triples((self.sink.enrichment_data.target, link, None))
for (_, _, o) in trs:
graph.add((addition_node, link, o))
yield graph.serialize(format='turtle'), {}
| apache-2.0 | 7,141,783,065,942,430,000 | 37.725352 | 114 | 0.601655 | false |
Ziqi-Li/bknqgis | bokeh/bokeh/plotting/helpers.py | 1 | 24268 | from __future__ import absolute_import
from collections import Iterable, OrderedDict, Sequence
import difflib
import itertools
import re
import textwrap
import warnings
import numpy as np
import sys
from six import string_types, reraise
from ..models import (
BoxSelectTool, BoxZoomTool, CategoricalAxis,
TapTool, CrosshairTool, DataRange1d, DatetimeAxis,
FactorRange, Grid, HelpTool, HoverTool, LassoSelectTool, Legend, LegendItem, LinearAxis,
LogAxis, PanTool, ZoomInTool, ZoomOutTool, PolySelectTool, ContinuousTicker,
SaveTool, Range, Range1d, UndoTool, RedoTool, ResetTool, ResizeTool, Tool,
WheelPanTool, WheelZoomTool, ColumnarDataSource, ColumnDataSource, GlyphRenderer,
LogScale, LinearScale, CategoricalScale)
from ..core.properties import ColorSpec, Datetime, value, field
from ..transform import stack
from ..util.dependencies import import_optional
from ..util.deprecation import deprecated
from ..util.string import nice_join
pd = import_optional('pandas')
DEFAULT_PALETTE = ["#f22c40", "#5ab738", "#407ee7", "#df5320", "#00ad9c", "#c33ff3"]
def _stack(stackers, spec0, spec1, **kw):
for name in (spec0, spec1):
if name in kw:
raise ValueError("Stack property '%s' cannot appear in keyword args" % name)
lengths = { len(x) for x in kw.values() if isinstance(x, (list, tuple)) }
# lengths will be empty if there are no kwargs supplied at all
if len(lengths) > 0:
if len(lengths) != 1:
raise ValueError("Keyword argument sequences for broadcasting must all be the same lengths. Got lengths: %r" % sorted(list(lengths)))
if lengths.pop() != len(stackers):
raise ValueError("Keyword argument sequences for broadcasting must be the same length as stackers")
s0 = []
s1 = []
_kw = []
for i, val in enumerate(stackers):
d = {}
s0 = list(s1)
s1.append(val)
d[spec0] = stack(*s0)
d[spec1] = stack(*s1)
for k, v in kw.items():
if isinstance(v, (list, tuple)):
d[k] = v[i]
else:
d[k] = v
_kw.append(d)
return _kw
def get_default_color(plot=None):
colors = [
"#1f77b4",
"#ff7f0e", "#ffbb78",
"#2ca02c", "#98df8a",
"#d62728", "#ff9896",
"#9467bd", "#c5b0d5",
"#8c564b", "#c49c94",
"#e377c2", "#f7b6d2",
"#7f7f7f",
"#bcbd22", "#dbdb8d",
"#17becf", "#9edae5"
]
if plot:
renderers = plot.renderers
renderers = [x for x in renderers if x.__view_model__ == "GlyphRenderer"]
num_renderers = len(renderers)
return colors[num_renderers]
else:
return colors[0]
def get_default_alpha(plot=None):
return 1.0
def _pop_renderer_args(kwargs):
result = dict(data_source=kwargs.pop('source', ColumnDataSource()))
for attr in ['name', 'x_range_name', 'y_range_name', 'level', 'view', 'visible', 'muted']:
val = kwargs.pop(attr, None)
if val:
result[attr] = val
return result
def _pop_colors_and_alpha(glyphclass, kwargs, prefix="", default_alpha=1.0):
"""
Given a kwargs dict, a prefix, and a default value, looks for different
color and alpha fields of the given prefix, and fills in the default value
if it doesn't exist.
"""
result = dict()
# TODO: The need to do this and the complexity of managing this kind of
# thing throughout the codebase really suggests that we need to have
# a real stylesheet class, where defaults and Types can declaratively
# substitute for this kind of imperative logic.
color = kwargs.pop(prefix + "color", get_default_color())
for argname in ("fill_color", "line_color"):
if argname not in glyphclass.properties():
continue
result[argname] = kwargs.pop(prefix + argname, color)
# NOTE: text fill color should really always default to black, hard coding
# this here now until the stylesheet solution exists
if "text_color" in glyphclass.properties():
result["text_color"] = kwargs.pop(prefix + "text_color", "black")
alpha = kwargs.pop(prefix + "alpha", default_alpha)
for argname in ("fill_alpha", "line_alpha", "text_alpha"):
if argname not in glyphclass.properties():
continue
result[argname] = kwargs.pop(prefix + argname, alpha)
return result
def _get_legend_item_label(kwargs):
legend = kwargs.pop('legend', None)
source = kwargs.get('source')
legend_item_label = None
if legend:
if isinstance(legend, string_types):
# Do the simple thing first
legend_item_label = value(legend)
# But if there's a source - try and do something smart
if source and hasattr(source, 'column_names'):
if legend in source.column_names:
legend_item_label = field(legend)
else:
legend_item_label = legend
return legend_item_label
_GLYPH_SOURCE_MSG = """
Supplying a user-defined data source AND iterable values to glyph methods is deprecated.
See https://github.com/bokeh/bokeh/issues/2056 for more information.
"""
def _process_sequence_literals(glyphclass, kwargs, source, is_user_source):
dataspecs = glyphclass.dataspecs_with_props()
for var, val in kwargs.items():
# ignore things that are not iterable
if not isinstance(val, Iterable):
continue
# pass dicts (i.e., values or fields) on as-is
if isinstance(val, dict):
continue
# let any non-dataspecs do their own validation (e.g., line_dash properties)
if var not in dataspecs:
continue
# strings sequences are handled by the dataspec as-is
if isinstance(val, string_types):
continue
# similarly colorspecs handle color tuple sequences as-is
if (isinstance(dataspecs[var].property, ColorSpec) and isinstance(val, tuple)):
continue
if isinstance(val, np.ndarray) and val.ndim != 1:
raise RuntimeError("Columns need to be 1D (%s is not)" % var)
if is_user_source:
deprecated(_GLYPH_SOURCE_MSG)
source.add(val, name=var)
kwargs[var] = var
def _make_glyph(glyphclass, kws, extra):
if extra is None:
return None
kws = kws.copy()
kws.update(extra)
return glyphclass(**kws)
def _update_legend(plot, legend_item_label, glyph_renderer):
# Get the plot's legend
legends = plot.select(type=Legend)
if not legends:
legend = Legend()
plot.add_layout(legend)
elif len(legends) == 1:
legend = legends[0]
else:
raise RuntimeError("Plot %s configured with more than one legend renderer" % plot)
# If there is an existing legend with a matching label, then put the
# renderer on that (if the source matches). Otherwise add a new one.
added = False
for item in legend.items:
if item.label == legend_item_label:
if item.label.get('value'):
item.renderers.append(glyph_renderer)
added = True
break
if item.label.get('field') and \
glyph_renderer.data_source is item.renderers[0].data_source:
item.renderers.append(glyph_renderer)
added = True
break
if not added:
new_item = LegendItem(label=legend_item_label, renderers=[glyph_renderer])
legend.items.append(new_item)
def _get_range(range_input):
if range_input is None:
return DataRange1d()
if pd and isinstance(range_input, pd.core.groupby.GroupBy):
return FactorRange(factors=sorted(list(range_input.groups.keys())))
if isinstance(range_input, Range):
return range_input
if isinstance(range_input, Sequence):
if all(isinstance(x, string_types) for x in range_input):
return FactorRange(factors=list(range_input))
if len(range_input) == 2:
try:
return Range1d(start=range_input[0], end=range_input[1])
except ValueError: # @mattpap suggests ValidationError instead
pass
raise ValueError("Unrecognized range input: '%s'" % str(range_input))
def _get_scale(range_input, axis_type):
if isinstance(range_input, (DataRange1d, Range1d)) and axis_type in ["linear", "datetime", "auto", None]:
return LinearScale()
elif isinstance(range_input, (DataRange1d, Range1d)) and axis_type == "log":
return LogScale()
elif isinstance(range_input, FactorRange):
return CategoricalScale()
else:
raise ValueError("Unable to determine proper scale for: '%s'" % str(range_input))
def _get_axis_class(axis_type, range_input):
if axis_type is None:
return None
elif axis_type == "linear":
return LinearAxis
elif axis_type == "log":
return LogAxis
elif axis_type == "datetime":
return DatetimeAxis
elif axis_type == "auto":
if isinstance(range_input, FactorRange):
return CategoricalAxis
elif isinstance(range_input, Range1d):
try:
# Easier way to validate type of Range1d parameters
Datetime.validate(Datetime(), range_input.start)
return DatetimeAxis
except ValueError:
pass
return LinearAxis
else:
raise ValueError("Unrecognized axis_type: '%r'" % axis_type)
def _get_num_minor_ticks(axis_class, num_minor_ticks):
if isinstance(num_minor_ticks, int):
if num_minor_ticks <= 1:
raise ValueError("num_minor_ticks must be > 1")
return num_minor_ticks
if num_minor_ticks is None:
return 0
if num_minor_ticks == 'auto':
if axis_class is LogAxis:
return 10
return 5
_known_tools = {
"pan": lambda: PanTool(dimensions='both'),
"xpan": lambda: PanTool(dimensions='width'),
"ypan": lambda: PanTool(dimensions='height'),
"wheel_zoom": lambda: WheelZoomTool(dimensions='both'),
"xwheel_zoom": lambda: WheelZoomTool(dimensions='width'),
"ywheel_zoom": lambda: WheelZoomTool(dimensions='height'),
"zoom_in": lambda: ZoomInTool(dimensions='both'),
"xzoom_in": lambda: ZoomInTool(dimensions='width'),
"yzoom_in": lambda: ZoomInTool(dimensions='height'),
"zoom_out": lambda: ZoomOutTool(dimensions='both'),
"xzoom_out": lambda: ZoomOutTool(dimensions='width'),
"yzoom_out": lambda: ZoomOutTool(dimensions='height'),
"xwheel_pan": lambda: WheelPanTool(dimension="width"),
"ywheel_pan": lambda: WheelPanTool(dimension="height"),
"resize": lambda: ResizeTool(),
"click": lambda: TapTool(behavior="inspect"),
"tap": lambda: TapTool(),
"crosshair": lambda: CrosshairTool(),
"box_select": lambda: BoxSelectTool(),
"xbox_select": lambda: BoxSelectTool(dimensions='width'),
"ybox_select": lambda: BoxSelectTool(dimensions='height'),
"poly_select": lambda: PolySelectTool(),
"lasso_select": lambda: LassoSelectTool(),
"box_zoom": lambda: BoxZoomTool(dimensions='both'),
"xbox_zoom": lambda: BoxZoomTool(dimensions='width'),
"ybox_zoom": lambda: BoxZoomTool(dimensions='height'),
"hover": lambda: HoverTool(tooltips=[
("index", "$index"),
("data (x, y)", "($x, $y)"),
("canvas (x, y)", "($sx, $sy)"),
]),
"save": lambda: SaveTool(),
"previewsave": "save",
"undo": lambda: UndoTool(),
"redo": lambda: RedoTool(),
"reset": lambda: ResetTool(),
"help": lambda: HelpTool(),
}
def _tool_from_string(name):
""" Takes a string and returns a corresponding `Tool` instance. """
known_tools = sorted(_known_tools.keys())
if name in known_tools:
tool_fn = _known_tools[name]
if isinstance(tool_fn, string_types):
tool_fn = _known_tools[tool_fn]
return tool_fn()
else:
matches, text = difflib.get_close_matches(name.lower(), known_tools), "similar"
if not matches:
matches, text = known_tools, "possible"
raise ValueError("unexpected tool name '%s', %s tools are %s" % (name, text, nice_join(matches)))
def _process_axis_and_grid(plot, axis_type, axis_location, minor_ticks, axis_label, rng, dim):
axiscls = _get_axis_class(axis_type, rng)
if axiscls:
if axiscls is LogAxis:
if dim == 0:
plot.x_scale = LogScale()
elif dim == 1:
plot.y_scale = LogScale()
else:
raise ValueError("received invalid dimension value: %r" % dim)
# this is so we can get a ticker off the axis, even if we discard it
axis = axiscls(plot=plot if axis_location else None)
if isinstance(axis.ticker, ContinuousTicker):
axis.ticker.num_minor_ticks = _get_num_minor_ticks(axiscls, minor_ticks)
axis_label = axis_label
if axis_label:
axis.axis_label = axis_label
grid = Grid(plot=plot, dimension=dim, ticker=axis.ticker); grid
if axis_location is not None:
getattr(plot, axis_location).append(axis)
def _process_tools_arg(plot, tools):
""" Adds tools to the plot object
Args:
plot (Plot): instance of a plot object
tools (seq[Tool or str]|str): list of tool types or string listing the
tool names. Those are converted using the _tool_from_string
function. I.e.: `wheel_zoom,box_zoom,reset`.
Returns:
list of Tools objects added to plot, map of supplied string names to tools
"""
tool_objs = []
tool_map = {}
temp_tool_str = ""
repeated_tools = []
if isinstance(tools, (list, tuple)):
for tool in tools:
if isinstance(tool, Tool):
tool_objs.append(tool)
elif isinstance(tool, string_types):
temp_tool_str += tool + ','
else:
raise ValueError("tool should be a string or an instance of Tool class")
tools = temp_tool_str
for tool in re.split(r"\s*,\s*", tools.strip()):
# re.split will return empty strings; ignore them.
if tool == "":
continue
tool_obj = _tool_from_string(tool)
tool_objs.append(tool_obj)
tool_map[tool] = tool_obj
for typename, group in itertools.groupby(
sorted([tool.__class__.__name__ for tool in tool_objs])):
if len(list(group)) > 1:
repeated_tools.append(typename)
if repeated_tools:
warnings.warn("%s are being repeated" % ",".join(repeated_tools))
return tool_objs, tool_map
def _process_active_tools(toolbar, tool_map, active_drag, active_inspect, active_scroll, active_tap):
""" Adds tools to the plot object
Args:
toolbar (Toolbar): instance of a Toolbar object
tools_map (dict[str]|Tool): tool_map from _process_tools_arg
active_drag (str or Tool): the tool to set active for drag
active_inspect (str or Tool): the tool to set active for inspect
active_scroll (str or Tool): the tool to set active for scroll
active_tap (str or Tool): the tool to set active for tap
Returns:
None
Note:
This function sets properties on Toolbar
"""
if active_drag in ['auto', None] or isinstance(active_drag, Tool):
toolbar.active_drag = active_drag
elif active_drag in tool_map:
toolbar.active_drag = tool_map[active_drag]
else:
raise ValueError("Got unknown %r for 'active_drag', which was not a string supplied in 'tools' argument" % active_drag)
if active_inspect in ['auto', None] or isinstance(active_inspect, Tool) or all([isinstance(t, Tool) for t in active_inspect]):
toolbar.active_inspect = active_inspect
elif active_inspect in tool_map:
toolbar.active_inspect = tool_map[active_inspect]
else:
raise ValueError("Got unknown %r for 'active_inspect', which was not a string supplied in 'tools' argument" % active_scroll)
if active_scroll in ['auto', None] or isinstance(active_scroll, Tool):
toolbar.active_scroll = active_scroll
elif active_scroll in tool_map:
toolbar.active_scroll = tool_map[active_scroll]
else:
raise ValueError("Got unknown %r for 'active_scroll', which was not a string supplied in 'tools' argument" % active_scroll)
if active_tap in ['auto', None] or isinstance(active_tap, Tool):
toolbar.active_tap = active_tap
elif active_tap in tool_map:
toolbar.active_tap = tool_map[active_tap]
else:
raise ValueError("Got unknown %r for 'active_tap', which was not a string supplied in 'tools' argument" % active_tap)
def _get_argspecs(glyphclass):
argspecs = OrderedDict()
for arg in glyphclass._args:
spec = {}
descriptor = getattr(glyphclass, arg)
# running python with -OO will discard docstrings -> __doc__ is None
if descriptor.__doc__:
spec['desc'] = "\n ".join(textwrap.dedent(descriptor.__doc__).split("\n"))
else:
spec['desc'] = ""
spec['default'] = descriptor.class_default(glyphclass)
spec['type'] = descriptor.property._sphinx_type()
argspecs[arg] = spec
return argspecs
# This template generates the following:
#
# def foo(self, x, y=10, kwargs):
# kwargs['x'] = x
# kwargs['y'] = y
# return func(self, **kwargs)
_sigfunc_template = """
def %s(self, %s, **kwargs):
%s
return func(self, **kwargs)
"""
def _get_sigfunc(func_name, func, argspecs):
# This code is to wrap the generic func(*args, **kw) glyph method so that
# a much better signature is available to users. E.g., for ``square`` we have:
#
# Signature: p.square(x, y, size=4, angle=0.0, **kwargs)
#
# which provides descriptive names for positional args, as well as any defaults
func_args_with_defaults = []
for arg, spec in argspecs.items():
if spec['default'] is None:
func_args_with_defaults.append(arg)
else:
func_args_with_defaults.append("%s=%r" % (arg, spec['default']))
args_text = ", ".join(func_args_with_defaults)
kwargs_assign_text = "\n".join(" kwargs[%r] = %s" % (x, x) for x in argspecs)
func_text = _sigfunc_template % (func_name, args_text, kwargs_assign_text)
func_code = compile(func_text, "fakesource", "exec")
func_globals = {}
eval(func_code, {"func": func}, func_globals)
return func_globals[func_name]
_arg_template = """ %s (%s) : %s
(default: %r)
"""
_doc_template = """ Configure and add %s glyphs to this Figure.
Args:
%s
Keyword Args:
%s
Other Parameters:
alpha (float) : an alias to set all alpha keyword args at once
color (Color) : an alias to set all color keyword args at once
source (ColumnDataSource) : a user supplied data source
legend (str) : a legend tag for this glyph
x_range_name (str) : name an extra range to use for mapping x-coordinates
y_range_name (str) : name an extra range to use for mapping y-coordinates
level (Enum) : control the render level order for this glyph
It is also possible to set the color and alpha parameters of a "nonselection"
glyph. To do so, prefix any visual parameter with ``'nonselection_'``.
For example, pass ``nonselection_alpha`` or ``nonselection_fill_alpha``.
Returns:
GlyphRenderer
"""
def _add_sigfunc_info(func, argspecs, glyphclass, extra_docs):
func.__name__ = glyphclass.__name__.lower()
omissions = {'js_event_callbacks', 'js_property_callbacks', 'subscribed_events'}
kwlines = []
kws = glyphclass.properties() - set(argspecs)
for kw in kws:
# these are not really useful, and should also really be private, just skip them
if kw in omissions: continue
descriptor = getattr(glyphclass, kw)
typ = descriptor.property._sphinx_type()
if descriptor.__doc__:
desc = "\n ".join(textwrap.dedent(descriptor.__doc__).split("\n"))
else:
desc = ""
kwlines.append(_arg_template % (kw, typ, desc, descriptor.class_default(glyphclass)))
extra_kws = getattr(glyphclass, '_extra_kws', {})
for kw, (typ, desc) in extra_kws.items():
kwlines.append(" %s (%s) : %s" % (kw, typ, desc))
kwlines.sort()
arglines = []
for arg, spec in argspecs.items():
arglines.append(_arg_template % (arg, spec['type'], spec['desc'], spec['default']))
func.__doc__ = _doc_template % (func.__name__, "\n".join(arglines), "\n".join(kwlines))
if extra_docs:
func.__doc__ += extra_docs
def _glyph_function(glyphclass, extra_docs=None):
def func(self, **kwargs):
# Process legend kwargs and remove legend before we get going
legend_item_label = _get_legend_item_label(kwargs)
# Need to check if user source is present before _pop_renderer_args
is_user_source = kwargs.get('source', None) is not None
renderer_kws = _pop_renderer_args(kwargs)
source = renderer_kws['data_source']
if not isinstance(source, ColumnarDataSource):
try:
# try converting the soruce to ColumnDataSource
source = ColumnDataSource(source)
except ValueError as err:
msg = "Failed to auto-convert {curr_type} to ColumnDataSource.\n Original error: {err}".format(
curr_type=str(type(source)),
err=err.message
)
reraise(ValueError, ValueError(msg), sys.exc_info()[2])
# update reddered_kws so that others can use the new source
renderer_kws['data_source'] = source
# handle the main glyph, need to process literals
glyph_ca = _pop_colors_and_alpha(glyphclass, kwargs)
_process_sequence_literals(glyphclass, kwargs, source, is_user_source)
_process_sequence_literals(glyphclass, glyph_ca, source, is_user_source)
# handle the nonselection glyph, we always set one
nsglyph_ca = _pop_colors_and_alpha(glyphclass, kwargs, prefix='nonselection_', default_alpha=0.1)
# handle the selection glyph, if any properties were given
if any(x.startswith('selection_') for x in kwargs):
sglyph_ca = _pop_colors_and_alpha(glyphclass, kwargs, prefix='selection_')
else:
sglyph_ca = None
# handle the hover glyph, if any properties were given
if any(x.startswith('hover_') for x in kwargs):
hglyph_ca = _pop_colors_and_alpha(glyphclass, kwargs, prefix='hover_')
else:
hglyph_ca = None
# handle the mute glyph, if any properties were given
if any(x.startswith('muted_') for x in kwargs):
mglyph_ca = _pop_colors_and_alpha(glyphclass, kwargs, prefix='muted_')
else:
mglyph_ca = None
glyph = _make_glyph(glyphclass, kwargs, glyph_ca)
nsglyph = _make_glyph(glyphclass, kwargs, nsglyph_ca)
sglyph = _make_glyph(glyphclass, kwargs, sglyph_ca)
hglyph = _make_glyph(glyphclass, kwargs, hglyph_ca)
mglyph = _make_glyph(glyphclass, kwargs, mglyph_ca)
glyph_renderer = GlyphRenderer(glyph=glyph,
nonselection_glyph=nsglyph,
selection_glyph=sglyph,
hover_glyph=hglyph,
muted_glyph=mglyph,
**renderer_kws)
if legend_item_label:
_update_legend(self, legend_item_label, glyph_renderer)
for tool in self.select(type=BoxSelectTool):
tool.renderers.append(glyph_renderer)
self.renderers.append(glyph_renderer)
return glyph_renderer
argspecs = _get_argspecs(glyphclass)
sigfunc = _get_sigfunc(glyphclass.__name__.lower(), func, argspecs)
sigfunc.glyph_method = True
_add_sigfunc_info(sigfunc, argspecs, glyphclass, extra_docs)
return sigfunc
| gpl-2.0 | -7,109,149,979,966,638,000 | 35.438438 | 145 | 0.618963 | false |
asajeffrey/servo | tests/wpt/web-platform-tests/tools/wptrunner/wptrunner/executors/executorwebdriver.py | 4 | 24283 | from __future__ import absolute_import
import json
import os
import socket
import threading
import time
import traceback
import uuid
from six.moves.urllib.parse import urljoin
from .base import (CallbackHandler,
CrashtestExecutor,
RefTestExecutor,
RefTestImplementation,
TestharnessExecutor,
TimedRunner,
strip_server)
from .protocol import (BaseProtocolPart,
TestharnessProtocolPart,
Protocol,
SelectorProtocolPart,
ClickProtocolPart,
SendKeysProtocolPart,
ActionSequenceProtocolPart,
TestDriverProtocolPart,
GenerateTestReportProtocolPart,
SetPermissionProtocolPart,
VirtualAuthenticatorProtocolPart)
from ..testrunner import Stop
import webdriver as client
from webdriver import error
here = os.path.dirname(__file__)
class WebDriverCallbackHandler(CallbackHandler):
unimplemented_exc = (NotImplementedError, client.UnknownCommandException)
class WebDriverBaseProtocolPart(BaseProtocolPart):
def setup(self):
self.webdriver = self.parent.webdriver
def execute_script(self, script, asynchronous=False):
method = self.webdriver.execute_async_script if asynchronous else self.webdriver.execute_script
return method(script)
def set_timeout(self, timeout):
try:
self.webdriver.timeouts.script = timeout
except client.WebDriverException:
# workaround https://bugs.chromium.org/p/chromedriver/issues/detail?id=2057
body = {"type": "script", "ms": timeout * 1000}
self.webdriver.send_session_command("POST", "timeouts", body)
@property
def current_window(self):
return self.webdriver.window_handle
def set_window(self, handle):
self.webdriver.window_handle = handle
def window_handles(self):
return self.webdriver.handles
def load(self, url):
self.webdriver.url = url
def wait(self):
while True:
try:
self.webdriver.execute_async_script("")
except (client.TimeoutException,
client.ScriptTimeoutException,
client.JavascriptErrorException):
# A JavascriptErrorException will happen when we navigate;
# by ignoring it it's possible to reload the test whilst the
# harness remains paused
pass
except (socket.timeout,
client.NoSuchWindowException,
client.UnknownErrorException,
IOError):
break
except Exception:
self.logger.error(traceback.format_exc())
break
class WebDriverTestharnessProtocolPart(TestharnessProtocolPart):
def setup(self):
self.webdriver = self.parent.webdriver
self.runner_handle = None
with open(os.path.join(here, "runner.js")) as f:
self.runner_script = f.read()
with open(os.path.join(here, "window-loaded.js")) as f:
self.window_loaded_script = f.read()
def load_runner(self, url_protocol):
if self.runner_handle:
self.webdriver.window_handle = self.runner_handle
url = urljoin(self.parent.executor.server_url(url_protocol),
"/testharness_runner.html")
self.logger.debug("Loading %s" % url)
self.webdriver.url = url
self.runner_handle = self.webdriver.window_handle
format_map = {"title": threading.current_thread().name.replace("'", '"')}
self.parent.base.execute_script(self.runner_script % format_map)
def close_old_windows(self):
self.webdriver.actions.release()
handles = [item for item in self.webdriver.handles if item != self.runner_handle]
for handle in handles:
try:
self.webdriver.window_handle = handle
self.webdriver.window.close()
except client.NoSuchWindowException:
pass
self.webdriver.window_handle = self.runner_handle
return self.runner_handle
def get_test_window(self, window_id, parent, timeout=5):
"""Find the test window amongst all the open windows.
This is assumed to be either the named window or the one after the parent in the list of
window handles
:param window_id: The DOM name of the Window
:param parent: The handle of the runner window
:param timeout: The time in seconds to wait for the window to appear. This is because in
some implementations there's a race between calling window.open and the
window being added to the list of WebDriver accessible windows."""
test_window = None
end_time = time.time() + timeout
while time.time() < end_time:
try:
# Try using the JSON serialization of the WindowProxy object,
# it's in Level 1 but nothing supports it yet
win_s = self.webdriver.execute_script("return window['%s'];" % window_id)
win_obj = json.loads(win_s)
test_window = win_obj["window-fcc6-11e5-b4f8-330a88ab9d7f"]
except Exception:
pass
if test_window is None:
after = self.webdriver.handles
if len(after) == 2:
test_window = next(iter(set(after) - {parent}))
elif after[0] == parent and len(after) > 2:
# Hope the first one here is the test window
test_window = after[1]
if test_window is not None:
assert test_window != parent
return test_window
time.sleep(0.1)
raise Exception("unable to find test window")
def test_window_loaded(self):
"""Wait until the page in the new window has been loaded.
Hereby ignore Javascript execptions that are thrown when
the document has been unloaded due to a process change.
"""
while True:
try:
self.webdriver.execute_script(self.window_loaded_script, asynchronous=True)
break
except error.JavascriptErrorException:
pass
class WebDriverSelectorProtocolPart(SelectorProtocolPart):
def setup(self):
self.webdriver = self.parent.webdriver
def elements_by_selector(self, selector):
return self.webdriver.find.css(selector)
class WebDriverClickProtocolPart(ClickProtocolPart):
def setup(self):
self.webdriver = self.parent.webdriver
def element(self, element):
self.logger.info("click " + repr(element))
return element.click()
class WebDriverSendKeysProtocolPart(SendKeysProtocolPart):
def setup(self):
self.webdriver = self.parent.webdriver
def send_keys(self, element, keys):
try:
return element.send_keys(keys)
except client.UnknownErrorException as e:
# workaround https://bugs.chromium.org/p/chromedriver/issues/detail?id=1999
if (e.http_status != 500 or
e.status_code != "unknown error"):
raise
return element.send_element_command("POST", "value", {"value": list(keys)})
class WebDriverActionSequenceProtocolPart(ActionSequenceProtocolPart):
def setup(self):
self.webdriver = self.parent.webdriver
def send_actions(self, actions):
self.webdriver.actions.perform(actions['actions'])
class WebDriverTestDriverProtocolPart(TestDriverProtocolPart):
def setup(self):
self.webdriver = self.parent.webdriver
def send_message(self, cmd_id, message_type, status, message=None):
obj = {
"cmd_id": cmd_id,
"type": "testdriver-%s" % str(message_type),
"status": str(status)
}
if message:
obj["message"] = str(message)
self.webdriver.execute_script("window.postMessage(%s, '*')" % json.dumps(obj))
def _switch_to_frame(self, frame_number):
self.webdriver.switch_frame(frame_number)
def _switch_to_parent_frame(self):
self.webdriver.switch_frame("parent")
class WebDriverGenerateTestReportProtocolPart(GenerateTestReportProtocolPart):
def setup(self):
self.webdriver = self.parent.webdriver
def generate_test_report(self, message):
json_message = {"message": message}
self.webdriver.send_session_command("POST", "reporting/generate_test_report", json_message)
class WebDriverSetPermissionProtocolPart(SetPermissionProtocolPart):
def setup(self):
self.webdriver = self.parent.webdriver
def set_permission(self, descriptor, state, one_realm):
permission_params_dict = {
"descriptor": descriptor,
"state": state,
}
if one_realm is not None:
permission_params_dict["oneRealm"] = one_realm
self.webdriver.send_session_command("POST", "permissions", permission_params_dict)
class WebDriverVirtualAuthenticatorProtocolPart(VirtualAuthenticatorProtocolPart):
def setup(self):
self.webdriver = self.parent.webdriver
def add_virtual_authenticator(self, config):
return self.webdriver.send_session_command("POST", "webauthn/authenticator", config)
def remove_virtual_authenticator(self, authenticator_id):
return self.webdriver.send_session_command("DELETE", "webauthn/authenticator/%s" % authenticator_id)
def add_credential(self, authenticator_id, credential):
return self.webdriver.send_session_command("POST", "webauthn/authenticator/%s/credential" % authenticator_id, credential)
def get_credentials(self, authenticator_id):
return self.webdriver.send_session_command("GET", "webauthn/authenticator/%s/credentials" % authenticator_id)
def remove_credential(self, authenticator_id, credential_id):
return self.webdriver.send_session_command("DELETE", "webauthn/authenticator/%s/credentials/%s" % (authenticator_id, credential_id))
def remove_all_credentials(self, authenticator_id):
return self.webdriver.send_session_command("DELETE", "webauthn/authenticator/%s/credentials" % authenticator_id)
def set_user_verified(self, authenticator_id, uv):
return self.webdriver.send_session_command("POST", "webauthn/authenticator/%s/uv" % authenticator_id, uv)
class WebDriverProtocol(Protocol):
implements = [WebDriverBaseProtocolPart,
WebDriverTestharnessProtocolPart,
WebDriverSelectorProtocolPart,
WebDriverClickProtocolPart,
WebDriverSendKeysProtocolPart,
WebDriverActionSequenceProtocolPart,
WebDriverTestDriverProtocolPart,
WebDriverGenerateTestReportProtocolPart,
WebDriverSetPermissionProtocolPart,
WebDriverVirtualAuthenticatorProtocolPart]
def __init__(self, executor, browser, capabilities, **kwargs):
super(WebDriverProtocol, self).__init__(executor, browser)
self.capabilities = capabilities
self.url = browser.webdriver_url
self.webdriver = None
def connect(self):
"""Connect to browser via WebDriver."""
self.logger.debug("Connecting to WebDriver on URL: %s" % self.url)
host, port = self.url.split(":")[1].strip("/"), self.url.split(':')[-1].strip("/")
capabilities = {"alwaysMatch": self.capabilities}
self.webdriver = client.Session(host, port, capabilities=capabilities)
self.webdriver.start()
def teardown(self):
self.logger.debug("Hanging up on WebDriver session")
try:
self.webdriver.end()
except Exception as e:
message = str(getattr(e, "message", ""))
if message:
message += "\n"
message += traceback.format_exc()
self.logger.debug(message)
self.webdriver = None
def is_alive(self):
try:
# Get a simple property over the connection, with 2 seconds of timeout
# that should be more than enough to check if the WebDriver its
# still alive, and allows to complete the check within the testrunner
# 5 seconds of extra_timeout we have as maximum to end the test before
# the external timeout from testrunner triggers.
self.webdriver.send_session_command("GET", "window", timeout=2)
except (socket.timeout, client.UnknownErrorException, client.InvalidSessionIdException):
return False
return True
def after_connect(self):
self.testharness.load_runner(self.executor.last_environment["protocol"])
class WebDriverRun(TimedRunner):
def set_timeout(self):
try:
self.protocol.base.set_timeout(self.timeout + self.extra_timeout)
except client.UnknownErrorException:
self.logger.error("Lost WebDriver connection")
return Stop
def run_func(self):
try:
self.result = True, self.func(self.protocol, self.url, self.timeout)
except (client.TimeoutException, client.ScriptTimeoutException):
self.result = False, ("EXTERNAL-TIMEOUT", None)
except (socket.timeout, client.UnknownErrorException):
self.result = False, ("CRASH", None)
except Exception as e:
if (isinstance(e, client.WebDriverException) and
e.http_status == 408 and
e.status_code == "asynchronous script timeout"):
# workaround for https://bugs.chromium.org/p/chromedriver/issues/detail?id=2001
self.result = False, ("EXTERNAL-TIMEOUT", None)
else:
message = str(getattr(e, "message", ""))
if message:
message += "\n"
message += traceback.format_exc()
self.result = False, ("INTERNAL-ERROR", message)
finally:
self.result_flag.set()
class WebDriverTestharnessExecutor(TestharnessExecutor):
supports_testdriver = True
protocol_cls = WebDriverProtocol
def __init__(self, logger, browser, server_config, timeout_multiplier=1,
close_after_done=True, capabilities=None, debug_info=None,
supports_eager_pageload=True, cleanup_after_test=True,
**kwargs):
"""WebDriver-based executor for testharness.js tests"""
TestharnessExecutor.__init__(self, logger, browser, server_config,
timeout_multiplier=timeout_multiplier,
debug_info=debug_info)
self.protocol = self.protocol_cls(self, browser, capabilities)
with open(os.path.join(here, "testharness_webdriver_resume.js")) as f:
self.script_resume = f.read()
with open(os.path.join(here, "window-loaded.js")) as f:
self.window_loaded_script = f.read()
self.close_after_done = close_after_done
self.window_id = str(uuid.uuid4())
self.supports_eager_pageload = supports_eager_pageload
self.cleanup_after_test = cleanup_after_test
def is_alive(self):
return self.protocol.is_alive()
def on_environment_change(self, new_environment):
if new_environment["protocol"] != self.last_environment["protocol"]:
self.protocol.testharness.load_runner(new_environment["protocol"])
def do_test(self, test):
url = self.test_url(test)
success, data = WebDriverRun(self.logger,
self.do_testharness,
self.protocol,
url,
test.timeout * self.timeout_multiplier,
self.extra_timeout).run()
if success:
return self.convert_result(test, data)
return (test.result_cls(*data), [])
def do_testharness(self, protocol, url, timeout):
format_map = {"url": strip_server(url)}
# The previous test may not have closed its old windows (if something
# went wrong or if cleanup_after_test was False), so clean up here.
parent_window = protocol.testharness.close_old_windows()
# Now start the test harness
protocol.base.execute_script("window.open('about:blank', '%s', 'noopener')" % self.window_id)
test_window = protocol.testharness.get_test_window(self.window_id,
parent_window,
timeout=5*self.timeout_multiplier)
self.protocol.base.set_window(test_window)
# Wait until about:blank has been loaded
protocol.base.execute_script(self.window_loaded_script, asynchronous=True)
handler = WebDriverCallbackHandler(self.logger, protocol, test_window)
protocol.webdriver.url = url
if not self.supports_eager_pageload:
self.wait_for_load(protocol)
while True:
result = protocol.base.execute_script(
self.script_resume % format_map, asynchronous=True)
# As of 2019-03-29, WebDriver does not define expected behavior for
# cases where the browser crashes during script execution:
#
# https://github.com/w3c/webdriver/issues/1308
if not isinstance(result, list) or len(result) != 2:
try:
is_alive = self.is_alive()
except client.WebDriverException:
is_alive = False
if not is_alive:
raise Exception("Browser crashed during script execution.")
done, rv = handler(result)
if done:
break
# Attempt to cleanup any leftover windows, if allowed. This is
# preferable as it will blame the correct test if something goes wrong
# closing windows, but if the user wants to see the test results we
# have to leave the window(s) open.
if self.cleanup_after_test:
protocol.testharness.close_old_windows()
return rv
def wait_for_load(self, protocol):
# pageLoadStrategy=eager doesn't work in Chrome so try to emulate in user script
loaded = False
seen_error = False
while not loaded:
try:
loaded = protocol.base.execute_script("""
var callback = arguments[arguments.length - 1];
if (location.href === "about:blank") {
callback(false);
} else if (document.readyState !== "loading") {
callback(true);
} else {
document.addEventListener("readystatechange", () => {if (document.readyState !== "loading") {callback(true)}});
}""", asynchronous=True)
except client.JavascriptErrorException:
# We can get an error here if the script runs in the initial about:blank
# document before it has navigated, with the driver returning an error
# indicating that the document was unloaded
if seen_error:
raise
seen_error = True
class WebDriverRefTestExecutor(RefTestExecutor):
protocol_cls = WebDriverProtocol
def __init__(self, logger, browser, server_config, timeout_multiplier=1,
screenshot_cache=None, close_after_done=True,
debug_info=None, capabilities=None, **kwargs):
"""WebDriver-based executor for reftests"""
RefTestExecutor.__init__(self,
logger,
browser,
server_config,
screenshot_cache=screenshot_cache,
timeout_multiplier=timeout_multiplier,
debug_info=debug_info)
self.protocol = self.protocol_cls(self,
browser,
capabilities=capabilities)
self.implementation = RefTestImplementation(self)
self.close_after_done = close_after_done
self.has_window = False
with open(os.path.join(here, "test-wait.js")) as f:
self.wait_script = f.read() % {"classname": "reftest-wait"}
def reset(self):
self.implementation.reset()
def is_alive(self):
return self.protocol.is_alive()
def do_test(self, test):
width_offset, height_offset = self.protocol.webdriver.execute_script(
"""return [window.outerWidth - window.innerWidth,
window.outerHeight - window.innerHeight];"""
)
try:
self.protocol.webdriver.window.position = (0, 0)
except client.InvalidArgumentException:
# Safari 12 throws with 0 or 1, treating them as bools; fixed in STP
self.protocol.webdriver.window.position = (2, 2)
self.protocol.webdriver.window.size = (800 + width_offset, 600 + height_offset)
result = self.implementation.run_test(test)
return self.convert_result(test, result)
def screenshot(self, test, viewport_size, dpi, page_ranges):
# https://github.com/web-platform-tests/wpt/issues/7135
assert viewport_size is None
assert dpi is None
return WebDriverRun(self.logger,
self._screenshot,
self.protocol,
self.test_url(test),
test.timeout,
self.extra_timeout).run()
def _screenshot(self, protocol, url, timeout):
self.protocol.base.load(url)
self.protocol.base.execute_script(self.wait_script, True)
screenshot = self.protocol.webdriver.screenshot()
# strip off the data:img/png, part of the url
if screenshot.startswith("data:image/png;base64,"):
screenshot = screenshot.split(",", 1)[1]
return screenshot
class WebDriverCrashtestExecutor(CrashtestExecutor):
protocol_cls = WebDriverProtocol
def __init__(self, logger, browser, server_config, timeout_multiplier=1,
screenshot_cache=None, close_after_done=True,
debug_info=None, capabilities=None, **kwargs):
"""WebDriver-based executor for reftests"""
CrashtestExecutor.__init__(self,
logger,
browser,
server_config,
screenshot_cache=screenshot_cache,
timeout_multiplier=timeout_multiplier,
debug_info=debug_info)
self.protocol = self.protocol_cls(self,
browser,
capabilities=capabilities)
with open(os.path.join(here, "test-wait.js")) as f:
self.wait_script = f.read() % {"classname": "test-wait"}
def do_test(self, test):
timeout = (test.timeout * self.timeout_multiplier if self.debug_info is None
else None)
success, data = WebDriverRun(self.logger,
self.do_crashtest,
self.protocol,
self.test_url(test),
timeout,
self.extra_timeout).run()
if success:
return self.convert_result(test, data)
return (test.result_cls(*data), [])
def do_crashtest(self, protocol, url, timeout):
protocol.base.load(url)
protocol.base.execute_script(self.wait_script, asynchronous=True)
return {"status": "PASS",
"message": None}
| mpl-2.0 | 4,533,225,023,122,860,000 | 38.873563 | 140 | 0.59692 | false |
google/makani | config/m600/control/hover_controllers.py | 1 | 15735 | # Copyright 2020 Makani Technologies LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Automatically generated hover controllers.
This file was generated by:
analysis/control/generate_hover_controllers.m
"""
from makani.control import control_types as m
def GetHoverControllers(wing_serial):
"""Returns the hover controller gains."""
if wing_serial == m.kWingSerial01:
low_altitude = {
'kp': 1.79e+03,
'ki': 127.,
'kd': 6.33e+03
}
high_altitude = {
'kp': 687.,
'ki': 40.6,
'kd': 2.91e+03
}
transform_tether_elevation = {
'kp': 0.00,
'ki': -16.0,
'kd': 0.00
}
reel_tether_elevation = {
'kp': 0.00,
'ki': 0.0375,
'kd': 0.00
}
roll = {
'kp': 0.00,
'ki': 0.00,
'kd': 0.00
}
low_thrust_pitch = {
'kp': 2.52e+04,
'ki': 1.07e+03,
'kd': 2.06e+04
}
pitch = {
'kp': 5.23e+04,
'ki': 3.31e+03,
'kd': 3.09e+04
}
yaw = {
'kp': 3.42e+05,
'ki': 2.70e+04,
'kd': 1.73e+05
}
tangential_short_tether = {
'kp': 0.0115,
'ki': 0.000166,
'kd': 0.0870
}
tangential_low_altitude_long_tether = {
'kp': 0.0469,
'ki': 0.00169,
'kd': 0.193
}
tangential_high_altitude_long_tether = {
'kp': 0.00713,
'ki': 4.56e-05,
'kd': 0.0331
}
radial = {
'kp': 0.00,
'ki': 0.00,
'kd': -0.0581
}
tension_hard = {
'kp': 0.00,
'ki': 1.08e-05,
'kd': 0.00
}
tension_soft = {
'kp': 0.00,
'ki': 1.08e-06,
'kd': 0.00
}
int_pitch = {
'kp': 4.06e+04,
'ki': 536.,
'kd': 0.00
}
int_yaw = {
'kp': 4.65e+04,
'ki': 9.25e+03,
'kd': 0.00
}
elif wing_serial == m.kWingSerial04Hover:
low_altitude = {
'kp': 1.90e+03,
'ki': 134.,
'kd': 6.68e+03
}
high_altitude = {
'kp': 729.,
'ki': 43.1,
'kd': 3.08e+03
}
transform_tether_elevation = {
'kp': 0.00,
'ki': -16.0,
'kd': 0.00
}
reel_tether_elevation = {
'kp': 0.00,
'ki': 0.0375,
'kd': 0.00
}
roll = {
'kp': 0.00,
'ki': 0.00,
'kd': 0.00
}
low_thrust_pitch = {
'kp': 2.74e+04,
'ki': 1.16e+03,
'kd': 2.23e+04
}
pitch = {
'kp': 5.71e+04,
'ki': 3.62e+03,
'kd': 3.38e+04
}
yaw = {
'kp': 3.33e+05,
'ki': 2.63e+04,
'kd': 1.69e+05
}
tangential_short_tether = {
'kp': 0.0115,
'ki': 0.000166,
'kd': 0.0870
}
tangential_low_altitude_long_tether = {
'kp': 0.0469,
'ki': 0.00169,
'kd': 0.193
}
tangential_high_altitude_long_tether = {
'kp': 0.00713,
'ki': 4.56e-05,
'kd': 0.0331
}
radial = {
'kp': 0.00,
'ki': 0.00,
'kd': -0.0574
}
tension_hard = {
'kp': 0.00,
'ki': 1.04e-05,
'kd': 0.00
}
tension_soft = {
'kp': 0.00,
'ki': 1.04e-06,
'kd': 0.00
}
int_pitch = {
'kp': 4.46e+04,
'ki': 588.,
'kd': 0.00
}
int_yaw = {
'kp': 4.52e+04,
'ki': 9.01e+03,
'kd': 0.00
}
elif wing_serial == m.kWingSerial04Crosswind:
low_altitude = {
'kp': 1.81e+03,
'ki': 128.,
'kd': 6.39e+03
}
high_altitude = {
'kp': 694.,
'ki': 41.0,
'kd': 2.94e+03
}
transform_tether_elevation = {
'kp': 0.00,
'ki': -16.0,
'kd': 0.00
}
reel_tether_elevation = {
'kp': 0.00,
'ki': 0.0375,
'kd': 0.00
}
roll = {
'kp': 0.00,
'ki': 0.00,
'kd': 0.00
}
low_thrust_pitch = {
'kp': 2.84e+04,
'ki': 1.21e+03,
'kd': 2.32e+04
}
pitch = {
'kp': 5.91e+04,
'ki': 3.75e+03,
'kd': 3.50e+04
}
yaw = {
'kp': 3.45e+05,
'ki': 2.72e+04,
'kd': 1.75e+05
}
tangential_short_tether = {
'kp': 0.00937,
'ki': 0.000135,
'kd': 0.0710
}
tangential_low_altitude_long_tether = {
'kp': 0.0382,
'ki': 0.00138,
'kd': 0.157
}
tangential_high_altitude_long_tether = {
'kp': 0.00582,
'ki': 3.72e-05,
'kd': 0.0270
}
radial = {
'kp': 0.00,
'ki': 0.00,
'kd': -0.0498
}
tension_hard = {
'kp': 0.00,
'ki': 1.08e-05,
'kd': 0.00
}
tension_soft = {
'kp': 0.00,
'ki': 1.08e-06,
'kd': 0.00
}
int_pitch = {
'kp': 4.59e+04,
'ki': 606.,
'kd': 0.00
}
int_yaw = {
'kp': 4.68e+04,
'ki': 9.32e+03,
'kd': 0.00
}
elif wing_serial == m.kWingSerial05Hover:
low_altitude = {
'kp': 1.86e+03,
'ki': 132.,
'kd': 6.55e+03
}
high_altitude = {
'kp': 713.,
'ki': 42.2,
'kd': 3.02e+03
}
transform_tether_elevation = {
'kp': 0.00,
'ki': -16.0,
'kd': 0.00
}
reel_tether_elevation = {
'kp': 0.00,
'ki': 0.0375,
'kd': 0.00
}
roll = {
'kp': 0.00,
'ki': 0.00,
'kd': 0.00
}
low_thrust_pitch = {
'kp': 2.69e+04,
'ki': 1.14e+03,
'kd': 2.19e+04
}
pitch = {
'kp': 5.60e+04,
'ki': 3.55e+03,
'kd': 3.31e+04
}
yaw = {
'kp': 3.27e+05,
'ki': 2.58e+04,
'kd': 1.65e+05
}
tangential_short_tether = {
'kp': 0.0115,
'ki': 0.000166,
'kd': 0.0870
}
tangential_low_altitude_long_tether = {
'kp': 0.0469,
'ki': 0.00169,
'kd': 0.193
}
tangential_high_altitude_long_tether = {
'kp': 0.00713,
'ki': 4.56e-05,
'kd': 0.0331
}
radial = {
'kp': 0.00,
'ki': 0.00,
'kd': -0.0577
}
tension_hard = {
'kp': 0.00,
'ki': 1.06e-05,
'kd': 0.00
}
tension_soft = {
'kp': 0.00,
'ki': 1.06e-06,
'kd': 0.00
}
int_pitch = {
'kp': 4.38e+04,
'ki': 577.,
'kd': 0.00
}
int_yaw = {
'kp': 4.44e+04,
'ki': 8.83e+03,
'kd': 0.00
}
elif wing_serial == m.kWingSerial05Crosswind:
low_altitude = {
'kp': 1.77e+03,
'ki': 126.,
'kd': 6.24e+03
}
high_altitude = {
'kp': 677.,
'ki': 40.0,
'kd': 2.86e+03
}
transform_tether_elevation = {
'kp': 0.00,
'ki': -16.0,
'kd': 0.00
}
reel_tether_elevation = {
'kp': 0.00,
'ki': 0.0375,
'kd': 0.00
}
roll = {
'kp': 0.00,
'ki': 0.00,
'kd': 0.00
}
low_thrust_pitch = {
'kp': 2.78e+04,
'ki': 1.18e+03,
'kd': 2.27e+04
}
pitch = {
'kp': 5.78e+04,
'ki': 3.67e+03,
'kd': 3.42e+04
}
yaw = {
'kp': 3.37e+05,
'ki': 2.66e+04,
'kd': 1.71e+05
}
tangential_short_tether = {
'kp': 0.00933,
'ki': 0.000135,
'kd': 0.0707
}
tangential_low_altitude_long_tether = {
'kp': 0.0381,
'ki': 0.00137,
'kd': 0.157
}
tangential_high_altitude_long_tether = {
'kp': 0.00579,
'ki': 3.71e-05,
'kd': 0.0269
}
radial = {
'kp': 0.00,
'ki': 0.00,
'kd': -0.0500
}
tension_hard = {
'kp': 0.00,
'ki': 1.10e-05,
'kd': 0.00
}
tension_soft = {
'kp': 0.00,
'ki': 1.10e-06,
'kd': 0.00
}
int_pitch = {
'kp': 4.49e+04,
'ki': 593.,
'kd': 0.00
}
int_yaw = {
'kp': 4.58e+04,
'ki': 9.11e+03,
'kd': 0.00
}
elif wing_serial == m.kWingSerial06Hover:
low_altitude = {
'kp': 1.90e+03,
'ki': 135.,
'kd': 6.70e+03
}
high_altitude = {
'kp': 730.,
'ki': 43.2,
'kd': 3.09e+03
}
transform_tether_elevation = {
'kp': 0.00,
'ki': -16.0,
'kd': 0.00
}
reel_tether_elevation = {
'kp': 0.00,
'ki': 0.0375,
'kd': 0.00
}
roll = {
'kp': 0.00,
'ki': 0.00,
'kd': 0.00
}
low_thrust_pitch = {
'kp': 2.74e+04,
'ki': 1.16e+03,
'kd': 2.24e+04
}
pitch = {
'kp': 5.71e+04,
'ki': 3.62e+03,
'kd': 3.38e+04
}
yaw = {
'kp': 3.34e+05,
'ki': 2.64e+04,
'kd': 1.69e+05
}
tangential_short_tether = {
'kp': 0.0115,
'ki': 0.000166,
'kd': 0.0870
}
tangential_low_altitude_long_tether = {
'kp': 0.0469,
'ki': 0.00169,
'kd': 0.193
}
tangential_high_altitude_long_tether = {
'kp': 0.00713,
'ki': 4.56e-05,
'kd': 0.0331
}
radial = {
'kp': 0.00,
'ki': 0.00,
'kd': -0.0574
}
tension_hard = {
'kp': 0.00,
'ki': 1.04e-05,
'kd': 0.00
}
tension_soft = {
'kp': 0.00,
'ki': 1.04e-06,
'kd': 0.00
}
int_pitch = {
'kp': 4.47e+04,
'ki': 590.,
'kd': 0.00
}
int_yaw = {
'kp': 4.53e+04,
'ki': 9.02e+03,
'kd': 0.00
}
elif wing_serial == m.kWingSerial06Crosswind:
low_altitude = {
'kp': 1.81e+03,
'ki': 128.,
'kd': 6.39e+03
}
high_altitude = {
'kp': 694.,
'ki': 41.0,
'kd': 2.94e+03
}
transform_tether_elevation = {
'kp': 0.00,
'ki': -16.0,
'kd': 0.00
}
reel_tether_elevation = {
'kp': 0.00,
'ki': 0.0375,
'kd': 0.00
}
roll = {
'kp': 0.00,
'ki': 0.00,
'kd': 0.00
}
low_thrust_pitch = {
'kp': 2.84e+04,
'ki': 1.21e+03,
'kd': 2.32e+04
}
pitch = {
'kp': 5.91e+04,
'ki': 3.75e+03,
'kd': 3.50e+04
}
yaw = {
'kp': 3.45e+05,
'ki': 2.72e+04,
'kd': 1.75e+05
}
tangential_short_tether = {
'kp': 0.00937,
'ki': 0.000135,
'kd': 0.0709
}
tangential_low_altitude_long_tether = {
'kp': 0.0382,
'ki': 0.00138,
'kd': 0.157
}
tangential_high_altitude_long_tether = {
'kp': 0.00582,
'ki': 3.72e-05,
'kd': 0.0270
}
radial = {
'kp': 0.00,
'ki': 0.00,
'kd': -0.0498
}
tension_hard = {
'kp': 0.00,
'ki': 1.08e-05,
'kd': 0.00
}
tension_soft = {
'kp': 0.00,
'ki': 1.08e-06,
'kd': 0.00
}
int_pitch = {
'kp': 4.59e+04,
'ki': 606.,
'kd': 0.00
}
int_yaw = {
'kp': 4.68e+04,
'ki': 9.32e+03,
'kd': 0.00
}
elif wing_serial == m.kWingSerial07Hover:
low_altitude = {
'kp': 1.90e+03,
'ki': 134.,
'kd': 6.68e+03
}
high_altitude = {
'kp': 729.,
'ki': 43.1,
'kd': 3.08e+03
}
transform_tether_elevation = {
'kp': 0.00,
'ki': -16.0,
'kd': 0.00
}
reel_tether_elevation = {
'kp': 0.00,
'ki': 0.0375,
'kd': 0.00
}
roll = {
'kp': 0.00,
'ki': 0.00,
'kd': 0.00
}
low_thrust_pitch = {
'kp': 2.74e+04,
'ki': 1.16e+03,
'kd': 2.23e+04
}
pitch = {
'kp': 5.71e+04,
'ki': 3.62e+03,
'kd': 3.38e+04
}
yaw = {
'kp': 3.33e+05,
'ki': 2.63e+04,
'kd': 1.69e+05
}
tangential_short_tether = {
'kp': 0.0115,
'ki': 0.000166,
'kd': 0.0870
}
tangential_low_altitude_long_tether = {
'kp': 0.0469,
'ki': 0.00169,
'kd': 0.193
}
tangential_high_altitude_long_tether = {
'kp': 0.00713,
'ki': 4.56e-05,
'kd': 0.0331
}
radial = {
'kp': 0.00,
'ki': 0.00,
'kd': -0.0574
}
tension_hard = {
'kp': 0.00,
'ki': 1.04e-05,
'kd': 0.00
}
tension_soft = {
'kp': 0.00,
'ki': 1.04e-06,
'kd': 0.00
}
int_pitch = {
'kp': 4.46e+04,
'ki': 588.,
'kd': 0.00
}
int_yaw = {
'kp': 4.52e+04,
'ki': 9.01e+03,
'kd': 0.00
}
elif wing_serial == m.kWingSerial07Crosswind:
low_altitude = {
'kp': 1.81e+03,
'ki': 128.,
'kd': 6.39e+03
}
high_altitude = {
'kp': 694.,
'ki': 41.0,
'kd': 2.94e+03
}
transform_tether_elevation = {
'kp': 0.00,
'ki': -16.0,
'kd': 0.00
}
reel_tether_elevation = {
'kp': 0.00,
'ki': 0.0375,
'kd': 0.00
}
roll = {
'kp': 0.00,
'ki': 0.00,
'kd': 0.00
}
low_thrust_pitch = {
'kp': 2.84e+04,
'ki': 1.21e+03,
'kd': 2.32e+04
}
pitch = {
'kp': 5.91e+04,
'ki': 3.75e+03,
'kd': 3.50e+04
}
yaw = {
'kp': 3.45e+05,
'ki': 2.72e+04,
'kd': 1.75e+05
}
tangential_short_tether = {
'kp': 0.00937,
'ki': 0.000135,
'kd': 0.0710
}
tangential_low_altitude_long_tether = {
'kp': 0.0382,
'ki': 0.00138,
'kd': 0.157
}
tangential_high_altitude_long_tether = {
'kp': 0.00582,
'ki': 3.72e-05,
'kd': 0.0270
}
radial = {
'kp': 0.00,
'ki': 0.00,
'kd': -0.0498
}
tension_hard = {
'kp': 0.00,
'ki': 1.08e-05,
'kd': 0.00
}
tension_soft = {
'kp': 0.00,
'ki': 1.08e-06,
'kd': 0.00
}
int_pitch = {
'kp': 4.59e+04,
'ki': 606.,
'kd': 0.00
}
int_yaw = {
'kp': 4.68e+04,
'ki': 9.32e+03,
'kd': 0.00
}
else:
assert False, 'wing_serial %d was not recognized' % wing_serial
return {
'low_altitude': low_altitude,
'high_altitude': high_altitude,
'transform_tether_elevation': transform_tether_elevation,
'reel_tether_elevation': reel_tether_elevation,
'roll': roll,
'low_thrust_pitch': low_thrust_pitch,
'pitch': pitch,
'yaw': yaw,
'tangential_short_tether': tangential_short_tether,
'tangential_low_altitude_long_tether': (
tangential_low_altitude_long_tether),
'tangential_high_altitude_long_tether': (
tangential_high_altitude_long_tether),
'radial': radial,
'tension_hard': tension_hard,
'tension_soft': tension_soft,
'int_pitch': int_pitch,
'int_yaw': int_yaw,
}
| apache-2.0 | -702,106,154,424,924,200 | 19.250965 | 74 | 0.398411 | false |
locationlabs/wolphin | wolphin/config.py | 1 | 6869 | from os.path import expanduser, abspath, exists, join
import re
from wolphin.exceptions import InvalidWolphinConfiguration
class Configuration(object):
"""Configuration for any Wolphin project"""
DEFAULT_REGION = 'us-west-1'
DEFAULT_AMI_ID = 'ami-87712ac2'
DEFAULT_INSTANCE_TYPE = 't1.micro'
DEFAULT_USER = 'ubuntu'
DEFAULT_INSTANCE_AVAILABILITYZONE = 'us-west-1b'
DEFAULT_SECURITYGROUP = 'default'
DEFAULT_MIN_INSTANCE_COUNT = 1
DEFAULT_MAX_INSTANCE_COUNT = 1
DEFAULT_MAX_WAIT_TRIES = 12
DEFAULT_MAX_WAIT_DURATION = 10
def __init__(self,
project=None,
email=None,
region=DEFAULT_REGION,
ami_id=DEFAULT_AMI_ID,
instance_type=DEFAULT_INSTANCE_TYPE,
user=DEFAULT_USER,
instance_availabilityzone=DEFAULT_INSTANCE_AVAILABILITYZONE,
instance_securitygroup=DEFAULT_SECURITYGROUP,
min_instance_count=DEFAULT_MIN_INSTANCE_COUNT,
max_instance_count=DEFAULT_MAX_INSTANCE_COUNT,
amazon_keypair_name=None,
pem_file=None,
pem_path=None,
aws_access_key_id=None,
aws_secret_key=None,
max_wait_tries=DEFAULT_MAX_WAIT_TRIES,
max_wait_duration=DEFAULT_MAX_WAIT_DURATION):
"""
Initialize a wolphin configuration from defaults and any provided parameters.
:param project: wolphin project name.
:param email: email address of the project owner.
:param region: region to spawn the ec2 instances in.
:param ami_id: a suitable AMI Id (Amazon Machine Instance Id) of the base image to be used.
Don't forget to find the right ID for your region.
:param instance_type: ec2 isntance type, should match the AMI.
:param user: a valid account username which can access the ec2 instances, should match
the AMI.
:param instance_availabilityzone: the zone to make the ec2 instances available in.
:param instance_securitygroup: the security group for the ec2 instances, this should be
the name of *your* security group in *your* Amazon account.
:param min_instance_count: minimum number of ec2 instances to request.
:param max_instance_count maximum number of ec2 instances to request.
:param amazon_keypair_name: the key pair name in use for ec2 instances.
:param pem_file: name of the .pem file.
:param pem_path: path to the .pem file.
:param aws_access_key_id: amazon web services access key id.
:param aws_secret_key: amazon web services secret key.
:param max_wait_tries: maximum number of retries to make.
:param max_wait_duration: maximum duration in seconds, to wait during instance state
transition, for each try.
"""
self.project = project
self.email = email
self.region = region
self.ami_id = ami_id
self.instance_type = instance_type
self.user = user
self.instance_availabilityzone = instance_availabilityzone
self.instance_securitygroup = instance_securitygroup
self.min_instance_count = min_instance_count
self.max_instance_count = max_instance_count
self.amazon_keypair_name = amazon_keypair_name
self.pem_file = pem_file
self.pem_path = pem_path
self.aws_access_key_id = aws_access_key_id
self.aws_secret_key = aws_secret_key
self.max_wait_tries = max_wait_tries
self.max_wait_duration = max_wait_duration
@classmethod
def create(cls, *config_files):
"""
Factory Method to create a config from ``config_files``.
:param config_files: files containing overrides for config.
"""
config = cls()
for config_file in config_files:
config.parse_config_file(config_file)
return config
def parse_config_file(self, property_file):
"""
Reads the ``property_file`` to extract properties and updates the ``config`` with them.
The format of properties should be:
k = v or k = "v" or k = 'v'
All comments (anything after a '#') are ignored from the ``property_file``. Moreover, all
comments should be on a separate line and not as a continuation of the property, e.g.:
k = v # comment - is not considered valid.
:param property_file: the file containing the properties to overrife the ``config`` with.
"""
_unquote = lambda word: (word[1:-1]
if ((word.startswith('"') and word.endswith('"')) or
(word.startswith("'") and word.endswith("'")))
else word)
_as_dict = lambda lines: (dict(map(lambda x: _unquote(x.strip()), l.split('='))
for l in lines if not l.startswith("#") and "=" in l))
if property_file:
self.__dict__.update(_as_dict(property_file))
# convert the values that must be numeric from string to int.
for integer_attribute in ['min_instance_count',
'max_instance_count',
'max_wait_tries',
'max_wait_duration']:
setattr(self, integer_attribute, int(getattr(self, integer_attribute)))
@property
def ssh_key_file(self):
"""returns the absolute location (with the filename) of the configured .pem file."""
return abspath(expanduser(join(self.pem_path, self.pem_file)))
def update(self, **kwargs):
for key, value in kwargs.iteritems():
setattr(self, key, value)
def validate(self):
"""Validates this configuration object"""
for k, v in self.__dict__.iteritems():
if not v:
raise InvalidWolphinConfiguration("{} is missing or None.".format(k))
# some basic email validation.
if not re.compile(".+@.+[.].+").match(self.email):
raise InvalidWolphinConfiguration("email: '{}' is not valid.".format(self.email))
# min and max instance count validation.
if not 0 < self.min_instance_count <= self.max_instance_count:
raise InvalidWolphinConfiguration("min_instance_count and max_instance_count should be"
" such that 0 < min_instance_count <="
" max_instance_count.")
# is the .pem available?
if not exists(self.ssh_key_file):
raise InvalidWolphinConfiguration(".pem file {} could not be found."
.format(self.ssh_key_file))
| apache-2.0 | -1,130,192,145,868,009,600 | 42.201258 | 99 | 0.595574 | false |
codeforamerica/typeseam | typeseam/form_filler/front.py | 1 | 3914 | from datetime import datetime, timezone
from pytz import timezone as ptimezone
import re
import json
import requests
class Front:
def __init__(self, token):
self.headers = {
'Authorization': 'Bearer {}'.format(token),
'Accept': 'application/json'
}
self.event_types = 'q[types][]=inbound&q[types][]=outbound'
self.root_url = 'https://api2.frontapp.com/events?'
self.payload = []
def get_events(self, after=None):
self.payload = []
request_url = self.root_url + self.event_types
if after:
request_url += '&q[after]={}'.format(after)
self.pull_payload(request_url)
return self.parse_events()
def pull_payload(self, url):
next_page = url
while next_page:
response = requests.get(
next_page, headers=self.headers)
data = response.json()
self.payload.extend(data['_results'])
next_page = data["_pagination"]["next"]
def parse_events(self):
events = []
for event in self.payload:
data = event["conversation"]
message = data["last_message"]
if message["type"] == "email":
message["subject"] = data["subject"]
if is_referral(message):
events.append(get_referral_info(message))
elif is_submission(message):
events.append(get_submission_info(message))
elif is_opening(message):
events.append(get_opening_info(message))
return events
def get_opening_info(msg):
return {
"type": "opened",
"time": get_datetime(msg),
"by": get_opener(msg),
"key": is_opening(msg)
}
def is_from_cmr(msg):
for entity in msg["recipients"]:
if entity["handle"] == "[email protected]":
return entity["role"] == "from"
return False
def is_to_louise(msg):
for entity in msg["recipients"]:
if entity["handle"] == "[email protected]":
return entity["role"] == "to"
return False
def is_from_server(msg):
for entity in msg["recipients"]:
if entity["handle"] == "[email protected]":
return entity["role"] == "from"
return False
def get_referral_author(msg):
return msg["author"]["username"]
def get_datetime(msg):
return msg["created_at"]
def get_referral_key(msg):
pattern = re.compile(
"\.org/sanfrancisco/(?P<key>[0-9a-f]+)/"
)
results = pattern.findall(msg["text"])
if results and len(results) == 1:
return results[0]
else:
raise Exception(
"Couldn't find a uuid in {}".format(
json.dumps(msg, indent=2)
))
def utc_to_cali(timestamp, fmt="%c"):
PDT = ptimezone('US/Pacific')
dt = datetime.fromtimestamp(timestamp, timezone.utc)
return dt.astimezone(PDT).strftime(fmt)
def is_referral(msg):
return is_from_cmr(msg) and is_to_louise(msg)
def get_referral_info(msg):
return {
"type": "referred",
"by": get_referral_author(msg),
"time": get_datetime(msg),
"key": get_referral_key(msg)
}
def is_submission(msg):
srch = "New application to http://clearmyrecord.codeforamerica.org/"
return srch in msg["subject"]
def get_submission_info(msg):
return {
"type": "received",
"time": get_datetime(msg),
"key": get_referral_key(msg)
}
def get_opener(msg):
srch = "viewed by "
idx = msg["subject"].rfind(srch)
email = msg["subject"][idx + len(srch):]
return email
def is_opening(msg):
pattern = re.compile("Application (?P<key>[0-9a-f]+) viewed by")
results = pattern.findall(msg["subject"])
if results:
return results[0]
return False
| bsd-3-clause | 1,931,358,864,134,680,600 | 26.180556 | 72 | 0.569239 | false |
pgodel/rdiff-backup | rdiff_backup/Globals.py | 1 | 11031 | # Copyright 2002 Ben Escoto
#
# This file is part of rdiff-backup.
#
# rdiff-backup is free software; you can redistribute it and/or modify
# under the terms of the GNU General Public License as published by the
# Free Software Foundation; either version 2 of the License, or (at your
# option) any later version.
#
# rdiff-backup is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with rdiff-backup; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
# USA
"""Hold a variety of constants usually set at initialization."""
import re, os
# The current version of rdiff-backup
version = "1.3.3"
# If this is set, use this value in seconds as the current time
# instead of reading it from the clock.
current_time = None
# This determines how many bytes to read at a time when copying
blocksize = 131072
# This is used by the BufferedRead class to determine how many
# bytes to request from the underlying file per read(). Larger
# values may save on connection overhead and latency.
conn_bufsize = 393216
# This is used in the CacheCollatedPostProcess and MiscIterToFile
# classes. The number represents the number of rpaths which may be
# stuck in buffers when moving over a remote connection.
pipeline_max_length = 500
# True if script is running as a server
server = None
# uid and gid of the owner of the rdiff-backup process. This can
# vary depending on the connection.
try:
process_uid = os.getuid()
process_gid = os.getgid()
process_groups = [process_gid] + os.getgroups()
except AttributeError:
process_uid = 0
process_gid = 0
process_groups = [0]
# If true, when copying attributes, also change target's uid/gid
change_ownership = None
# If true, change the permissions of unwriteable mirror files
# (such as directories) so that they can be written, and then
# change them back. This defaults to 1 just in case the process
# is not running as root (root doesn't need to change
# permissions).
change_mirror_perms = (process_uid != 0)
# If true, try to reset the atimes of the source partition.
preserve_atime = None
# The following three attributes represent whether extended attributes
# are supported. If eas_active is true, then the current session
# supports them. If eas_write is true, then the extended attributes
# should also be written to the destination side. Finally, eas_conn
# is relative to the current connection, and should be true iff that
# particular connection supports extended attributes.
eas_active = None
eas_write = None
eas_conn = None
# The following settings are like the extended attribute settings, but
# apply to access control lists instead.
acls_active = None
acls_write = None
acls_conn = None
# Like the above, but applies to support of Windows
# access control lists.
win_acls_active = None
win_acls_write = None
win_acls_conn = None
# Like above two setting groups, but applies to support of Mac OS X
# style resource forks.
resource_forks_active = None
resource_forks_write = None
resource_forks_conn = None
# Like the above, but applies to MacOS Carbon Finder creator/type info.
# As of 1.0.2 this has defaulted to off because of bugs
carbonfile_active = None
carbonfile_write = None
carbonfile_conn = None
# This will be set as soon as the LocalConnection class loads
local_connection = None
# All connections should be added to the following list, so
# further global changes can be propagated to the remote systems.
# The first element should be Globals.local_connection. For a
# server, the second is the connection to the client.
connections = []
# Each process should have a connection number unique to the
# session. The client has connection number 0.
connection_number = 0
# Dictionary pairing connection numbers with connections. Set in
# SetConnections for all connections.
connection_dict = {}
# True if the script is the end that reads the source directory
# for backups. It is true for purely local sessions.
isbackup_reader = None
# Connection of the real backup reader (for which isbackup_reader
# is true)
backup_reader = None
# True if the script is the end that writes to the increment and
# mirror directories. True for purely local sessions.
isbackup_writer = None
# Connection of the backup writer
backup_writer = None
# Connection of the client
client_conn = None
# When backing up, issource should be true on the reader and isdest on
# the writer. When restoring, issource should be true on the mirror
# and isdest should be true on the target.
issource = None
isdest = None
# This list is used by the set function below. When a new
# connection is created with init_connection, its Globals class
# will match this one for all the variables mentioned in this
# list.
changed_settings = []
# The RPath or QuotedRPath of the rdiff-backup-data directory.
rbdir = None
# chars_to_quote is a string whose characters should be quoted. It
# should be true if certain characters in filenames on the source side
# should be escaped (see FilenameMapping for more info).
chars_to_quote = None
quoting_char = ';'
# If true, the timestamps use the following format: "2008-09-01T04-49-04-07-00"
# (instead of "2008-09-01T04:49:04-07:00"). This creates timestamps which
# don't need to be escaped on Windows.
use_compatible_timestamps = 0
# If true, emit output intended to be easily readable by a
# computer. False means output is intended for humans.
parsable_output = None
# If true, then hardlinks will be preserved to mirror and recorded
# in the increments directory. There is also a difference here
# between None and 0. When restoring, None or 1 means to preserve
# hardlinks iff can find a hardlink dictionary. 0 means ignore
# hardlink information regardless.
preserve_hardlinks = 1
# If this is false, then rdiff-backup will not compress any
# increments. Default is to compress based on regexp below.
compression = 1
# Increments based on files whose names match this
# case-insensitive regular expression won't be compressed (applies
# to .snapshots and .diffs). The second below will be the
# compiled version of the first.
no_compression_regexp_string = ("(?i).*\\.(gz|z|bz|bz2|tgz|zip|rpm|deb|"
"jpg|jpeg|gif|png|jp2|mp3|ogg|avi|wmv|mpeg|mpg|rm|mov|flac|shn|pgp|"
"gpg|rz|lzh|zoo|lharc|rar|arj|asc)$")
no_compression_regexp = None
# If true, filelists and directory statistics will be split on
# nulls instead of newlines.
null_separator = None
# Determines whether or not ssh will be run with the -C switch
ssh_compression = 1
# If true, print statistics after successful backup
print_statistics = None
# Controls whether file_statistics file is written in
# rdiff-backup-data dir. These can sometimes take up a lot of space.
file_statistics = 1
# On the writer connection, the following will be set to the mirror
# Select iterator.
select_mirror = None
# On the backup writer connection, holds the root incrementing branch
# object. Access is provided to increment error counts.
ITRB = None
# security_level has 4 values and controls which requests from remote
# systems will be honored. "all" means anything goes. "read-only"
# means that the requests must not write to disk. "update-only" means
# that requests shouldn't destructively update the disk (but normal
# incremental updates are OK). "minimal" means only listen to a few
# basic requests.
security_level = "all"
# If this is set, it indicates that the remote connection should only
# deal with paths inside of restrict_path.
restrict_path = None
# If set, a file will be marked as changed if its inode changes. See
# the man page under --no-compare-inode for more information.
compare_inode = 1
# If set, directories can be fsync'd just like normal files, to
# guarantee that any changes have been committed to disk.
fsync_directories = None
# If set, exit with error instead of dropping ACLs or ACL entries.
never_drop_acls = None
# Apply this mask to permissions before chmoding. (Set to 0777 to
# prevent highbit permissions on systems which don't support them.)
permission_mask = 07777
# If true, symlinks permissions are affected by the process umask, and
# we should change the umask when creating them in order to preserve
# the original permissions
symlink_perms = None
# If set, the path that should be used instead of the default Python
# tempfile.tempdir value on remote connections
remote_tempdir = None
def get(name):
"""Return the value of something in this module"""
return globals()[name]
def is_not_None(name):
"""Returns true if value is not None"""
return globals()[name] is not None
def set(name, val):
"""Set the value of something in this module
Use this instead of writing the values directly if the setting
matters to remote sides. This function updates the
changed_settings list, so other connections know to copy the
changes.
"""
changed_settings.append(name)
globals()[name] = val
def set_local(name, val):
"""Like set above, but only set current connection"""
globals()[name] = val
def set_integer(name, val):
"""Like set, but make sure val is an integer"""
try: intval = int(val)
except ValueError:
Log.FatalError("Variable %s must be set to an integer -\n"
"received %s instead." % (name, val))
set(name, intval)
def set_float(name, val, min = None, max = None, inclusive = 1):
"""Like set, but make sure val is float within given bounds"""
def error():
s = "Variable %s must be set to a float" % (name,)
if min is not None and max is not None:
s += " between %s and %s " % (min, max)
if inclusive: s += "inclusive"
else: s += "not inclusive"
elif min is not None or max is not None:
if inclusive: inclusive_string = "or equal to "
else: inclusive_string = ""
if min is not None:
s += " greater than %s%s" % (inclusive_string, min)
else: s+= " less than %s%s" % (inclusive_string, max)
Log.FatalError(s)
try: f = float(val)
except ValueError: error()
if min is not None:
if inclusive and f < min: error()
elif not inclusive and f <= min: error()
if max is not None:
if inclusive and f > max: error()
elif not inclusive and f >= max: error()
set(name, f)
def get_dict_val(name, key):
"""Return val from dictionary in this class"""
return globals()[name][key]
def set_dict_val(name, key, val):
"""Set value for dictionary in this class"""
globals()[name][key] = val
def postset_regexp(name, re_string, flags = None):
"""Compile re_string on all existing connections, set to name"""
for conn in connections:
conn.Globals.postset_regexp_local(name, re_string, flags)
def postset_regexp_local(name, re_string, flags):
"""Set name to compiled re_string locally"""
if flags: globals()[name] = re.compile(re_string, flags)
else: globals()[name] = re.compile(re_string)
| gpl-2.0 | -367,777,364,650,201,340 | 32.941538 | 79 | 0.741728 | false |
tensorflow/examples | tensorflow_examples/lite/model_maker/third_party/recommendation/ml/model/context_encoder_test.py | 1 | 6092 | # Lint as: python3
# Copyright 2021 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for context_encoder."""
import tensorflow as tf
from tensorflow_examples.lite.model_maker.third_party.recommendation.ml.configs import input_config_pb2
from tensorflow_examples.lite.model_maker.third_party.recommendation.ml.configs import model_config as model_config_class
from tensorflow_examples.lite.model_maker.third_party.recommendation.ml.model import context_encoder
class ContextEncoderTest(tf.test.TestCase):
def _create_test_feature_group(self,
encoder_type: input_config_pb2.EncoderType):
"""Prepare test feature group."""
feature_context_movie_id = input_config_pb2.Feature(
feature_name='context_movie_id',
feature_type=input_config_pb2.FeatureType.INT,
vocab_size=3952,
embedding_dim=4)
feature_context_movie_rating = input_config_pb2.Feature(
feature_name='context_movie_rating',
feature_type=input_config_pb2.FeatureType.FLOAT)
return input_config_pb2.FeatureGroup(
features=[feature_context_movie_id, feature_context_movie_rating],
encoder_type=encoder_type)
def _create_test_input_config(self):
"""Generate test input_config_pb2.InputConfig proto."""
feature_group_1 = self._create_test_feature_group(
encoder_type=input_config_pb2.EncoderType.BOW)
feature_context_movie_genre = input_config_pb2.Feature(
feature_name='context_movie_genre',
feature_type=input_config_pb2.FeatureType.STRING,
vocab_name='movie_genre_vocab.txt',
vocab_size=19,
embedding_dim=3)
feature_group_2 = input_config_pb2.FeatureGroup(
features=[feature_context_movie_genre],
encoder_type=input_config_pb2.EncoderType.BOW)
feature_label = input_config_pb2.Feature(
feature_name='label_movie_id',
feature_type=input_config_pb2.FeatureType.INT,
vocab_size=3952,
embedding_dim=4)
input_config = input_config_pb2.InputConfig(
activity_feature_groups=[feature_group_1, feature_group_2],
label_feature=feature_label)
return input_config
def _create_test_model_config(self):
return model_config_class.ModelConfig(
hidden_layer_dims=[8, 4],
eval_top_k=[1, 5],
conv_num_filter_ratios=[1, 2],
conv_kernel_size=2,
lstm_num_units=16)
def test_feature_group_encoder_bow(self):
feature_group = self._create_test_feature_group(
encoder_type=input_config_pb2.EncoderType.BOW)
model_config = self._create_test_model_config()
feature_group_encoder = context_encoder.FeatureGroupEncoder(
feature_group, model_config, final_embedding_dim=4)
input_context_movie_id = tf.constant([[1, 0, 0], [1, 2, 0]])
input_context_movie_rating = tf.constant([[1.0, 0.0, 0.0], [2.0, 3.0, 0.0]])
input_context = {
'context_movie_id': input_context_movie_id,
'context_movie_rating': input_context_movie_rating
}
feature_group_embedding = feature_group_encoder(input_context)
self.assertAllEqual([2, 5], list(feature_group_embedding.shape))
def test_feature_group_encoder_cnn(self):
feature_group = self._create_test_feature_group(
encoder_type=input_config_pb2.EncoderType.CNN)
model_config = self._create_test_model_config()
feature_group_encoder = context_encoder.FeatureGroupEncoder(
feature_group, model_config, final_embedding_dim=4)
input_context_movie_id = tf.constant([[1, 0, 0], [1, 2, 0]])
input_context_movie_rating = tf.constant([[1.0, 0.0, 0.0], [2.0, 3.0, 0.0]])
input_context = {
'context_movie_id': input_context_movie_id,
'context_movie_rating': input_context_movie_rating
}
feature_group_embedding = feature_group_encoder(input_context)
self.assertAllEqual([2, 8], list(feature_group_embedding.shape))
def test_feature_group_encoder_lstm(self):
feature_group = self._create_test_feature_group(
encoder_type=input_config_pb2.EncoderType.LSTM)
model_config = self._create_test_model_config()
feature_group_encoder = context_encoder.FeatureGroupEncoder(
feature_group, model_config, final_embedding_dim=4)
input_context_movie_id = tf.constant([[1, 0, 0], [1, 2, 0]])
input_context_movie_rating = tf.constant([[1.0, 0.0, 0.0], [2.0, 3.0, 0.0]])
input_context = {
'context_movie_id': input_context_movie_id,
'context_movie_rating': input_context_movie_rating
}
feature_group_embedding = feature_group_encoder(input_context)
self.assertAllEqual([2, 16], list(feature_group_embedding.shape))
def test_context_encoder(self):
input_config = self._create_test_input_config()
model_config = self._create_test_model_config()
input_context_encoder = context_encoder.ContextEncoder(
input_config=input_config, model_config=model_config)
input_context_movie_id = tf.constant([[1, 0, 0], [1, 2, 0]])
input_context_movie_rating = tf.constant([[1.0, 0.0, 0.0], [2.0, 3.0, 0.0]])
input_context_movie_genre = tf.constant([[1, 2, 2, 4, 3], [1, 1, 2, 2, 3]])
input_context = {
'context_movie_id': input_context_movie_id,
'context_movie_rating': input_context_movie_rating,
'context_movie_genre': input_context_movie_genre
}
context_embedding = input_context_encoder(input_context)
self.assertAllEqual([2, 4], list(context_embedding.shape))
if __name__ == '__main__':
tf.test.main()
| apache-2.0 | 9,103,877,208,249,666,000 | 43.794118 | 121 | 0.682863 | false |
jtoppins/beaker | Client/src/bkr/client/commands/cmd_user_modify.py | 1 | 2506 |
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
"""
bkr user-modify: Modify Beaker users
====================================
.. program:: bkr user-modify
Synopsis
--------
| :program:`bkr user-modify` [*options*] [:option:`--add-submission-delegate` <user>]
| [:option:`--remove-submission-delegate` <user>]
Description
-----------
Modify a Beaker user.
Allows the adding or removing of submission delegates of the currently
logged in user.
.. _user-modify-options:
Options
-------
.. option:: --add-submission-delegate=<user>
Adds a new submission delegate
.. option:: --remove-submission-delegate=<user>
Removes an existing submission delegate
Common :program:`bkr` options are described in the :ref:`Options
<common-options>` section of :manpage:`bkr(1)`.
Exit status
-----------
Non-zero on error, otherwise zero.
Examples
--------
Add a new submission delegate:
bkr user-modify --add-submission-delegate=mydelegate
Remove an existing delegate:
bkr user-modify --remove-submission-delegate=mydelegate
See also
--------
:manpage:`bkr(1)`
"""
from bkr.client import BeakerCommand
from xmlrpclib import Fault
from sys import exit
class User_Modify(BeakerCommand):
"""Modify certain user properties"""
enabled=True
def options(self):
self.parser.usage = "%%prog %s [options]" % self.normalized_name
self.parser.add_option(
"-a",
"--add-submission-delegate",
help="Add a new submission delegate"
)
self.parser.add_option(
"-r",
"--remove-submission-delegate",
help="Remove an existing submission delegate"
)
def run(self, *args, **kwargs):
delegate_to_add = kwargs.get('add_submission_delegate', None)
delegate_to_remove = kwargs.get('remove_submission_delegate', None)
self.set_hub(**kwargs)
if delegate_to_remove:
self.hub.prefs. \
remove_submission_delegate_by_name(delegate_to_remove)
print 'Removed submission delegate %s' % delegate_to_remove
if delegate_to_add:
self.hub.prefs. \
add_submission_delegate_by_name(delegate_to_add)
print 'Added submission delegate %s' % delegate_to_add
exit(0)
| gpl-2.0 | 2,265,340,603,310,640,000 | 24.06 | 85 | 0.638867 | false |
lanhel/viperaccept | setup.py | 1 | 1637 | #!/usr/bin/env python3
# -*- coding: UTF-8 -*-
#----------------------------------------------------------------------------
"""HTTP content negotiation application."""
__author__ = ('Lance Finn Helsten',)
__version__ = '0.0'
__copyright__ = """Copyright (C) 2014 Lance Finn Helsten"""
__license__ = """
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import sys
import os
import setuptools
setuptools.setup(
name = "viperaccept",
version = __version__,
author = 'Lance Finn Helsten',
author_email = '[email protected]',
description = __doc__,
long_description = open('README.rst').read(),
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: HTTP Servers',
'Topic :: Internet :: WWW/HTTP :: WSGI :: Application',
],
packages = [
'viperaccept'
],
# scripts = [
# ],
)
| apache-2.0 | -5,658,288,544,415,330,000 | 31.098039 | 77 | 0.609041 | false |
FlorisHoogenboom/sklearn-helpers | tests/test_preprocessing.py | 1 | 3095 | import unittest
import numpy as np
import pandas as pd
from sklearn_helpers.preprocessing import \
EnhancedLabelEncoder, MultiColumnLabelEncoder
class EnhancedLabelEncoderTest(unittest.TestCase):
def test_accepts_only_1d(self):
"""It should only accept only a 1d array"""
ehe = EnhancedLabelEncoder()
train = np.array([
[1,2],
[2,1]
])
self.assertRaises(ValueError, lambda: ehe.fit(train))
# If it is flattened, it should not raise.
train = train.flatten()
ehe.fit(train)
def test_handle_unknown_error(self):
"""If handle_unkown is 'error' it should throw on unseen labels"""
ehe = EnhancedLabelEncoder(handle_unknown='error')
train = np.array(['a', 'b', 'a'])
test = np.array(['a','c'])
ehe.fit(train)
# Check that a ValueError is raised on transform
self.assertRaises(ValueError, lambda: ehe.transform(test))
def test_handle_unknown_ignore(self):
"""If handle_unknown is 'ignore' it should map unseen labels to a new value"""
ehe = EnhancedLabelEncoder(handle_unknown='ignore')
train = np.array(['a', 'b', 'a'])
test = np.array(['a','c'])
ehe.fit(train)
# Check that the new label is mapped to the next value
self.assertTrue(
(np.array([0,2]) == ehe.transform(test)).all()
)
class MultiColumnLabelEncoderTest(unittest.TestCase):
def test_handle_ignore(self):
"""If handle_unknown is 'ignore' it should map unseen labels to a new value"""
mce = MultiColumnLabelEncoder(handle_unknown='ignore')
train = np.array([
['a', 'b'],
['c', 'a']
])
test = np.array([
['a', 'd'],
['c', 'd']
])
mce.fit(train)
test_transformed = np.array([
[0.,2.],
[1.,2.]
])
self.assertTrue(
(mce.transform(test) == test_transformed).all()
)
def test_accepts_pandas(self):
"""It shouold accept a Pandas dataframe"""
mce = MultiColumnLabelEncoder(handle_unknown='ignore')
train = pd.DataFrame(
np.array([
['a', 'b'],
['c', 'a']
]),
columns=['col1', 'col2']
)
# This should not throw
mce.fit_transform(train, np.array([1,2]))
def test_classes(self):
"""It should return classes for each column"""
def test_accepts_pandas(self):
"""It shouold accept a Pandas dataframe"""
mce = MultiColumnLabelEncoder(
handle_unknown='ignore'
)
train = pd.DataFrame(
np.array([
['a', 'b'],
['c', 'a']
]),
columns=['col1', 'col2']
)
mce.fit(train, np.array([1,2]))
self.assertEqual(
mce.classes_[0][0],
'a'
)
self.assertEqual(
mce.classes_[1][1],
'b'
)
| mit | 4,735,568,165,528,348,000 | 26.149123 | 86 | 0.519871 | false |
jkibele/benthic_photo_survey | bps_package/ui_pref_help.py | 1 | 1950 | # -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'pref_help.ui'
#
# Created: Sun Mar 8 18:17:55 2015
# by: PyQt4 UI code generator 4.10.4
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
def _fromUtf8(s):
return s
try:
_encoding = QtGui.QApplication.UnicodeUTF8
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig, _encoding)
except AttributeError:
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig)
class Ui_PrefHelpDialog(object):
def setupUi(self, PrefHelpDialog):
PrefHelpDialog.setObjectName(_fromUtf8("PrefHelpDialog"))
PrefHelpDialog.resize(447, 326)
self.verticalLayout = QtGui.QVBoxLayout(PrefHelpDialog)
self.verticalLayout.setObjectName(_fromUtf8("verticalLayout"))
self.textBrowser = QtGui.QTextBrowser(PrefHelpDialog)
self.textBrowser.setObjectName(_fromUtf8("textBrowser"))
self.verticalLayout.addWidget(self.textBrowser)
self.buttonBox = QtGui.QDialogButtonBox(PrefHelpDialog)
self.buttonBox.setOrientation(QtCore.Qt.Horizontal)
self.buttonBox.setStandardButtons(QtGui.QDialogButtonBox.Ok)
self.buttonBox.setObjectName(_fromUtf8("buttonBox"))
self.verticalLayout.addWidget(self.buttonBox)
self.retranslateUi(PrefHelpDialog)
QtCore.QObject.connect(self.buttonBox, QtCore.SIGNAL(_fromUtf8("accepted()")), PrefHelpDialog.accept)
QtCore.QObject.connect(self.buttonBox, QtCore.SIGNAL(_fromUtf8("rejected()")), PrefHelpDialog.reject)
QtCore.QMetaObject.connectSlotsByName(PrefHelpDialog)
def retranslateUi(self, PrefHelpDialog):
PrefHelpDialog.setWindowTitle(_translate("PrefHelpDialog", "BPS Help", None))
| bsd-3-clause | 8,490,062,574,565,492,000 | 39.625 | 109 | 0.726667 | false |
zabracks/sshuttle | src/server.py | 1 | 10287 | import re
import struct
import socket
import traceback
import time
import sys
import os
if not globals().get('skip_imports'):
import ssnet
import helpers
import hostwatch
import compat.ssubprocess as ssubprocess
from ssnet import Handler, Proxy, Mux, MuxWrapper
from helpers import log, debug1, debug2, debug3, Fatal, \
resolvconf_random_nameserver
if not globals().get('latency_control'):
latency_control = None
def _ipmatch(ipstr):
if ipstr == 'default':
ipstr = '0.0.0.0/0'
m = re.match(r'^(\d+(\.\d+(\.\d+(\.\d+)?)?)?)(?:/(\d+))?$', ipstr)
if m:
g = m.groups()
ips = g[0]
width = int(g[4] or 32)
if g[1] is None:
ips += '.0.0.0'
width = min(width, 8)
elif g[2] is None:
ips += '.0.0'
width = min(width, 16)
elif g[3] is None:
ips += '.0'
width = min(width, 24)
return (struct.unpack('!I', socket.inet_aton(ips))[0], width)
def _ipstr(ip, width):
if width >= 32:
return ip
else:
return "%s/%d" % (ip, width)
def _maskbits(netmask):
if not netmask:
return 32
for i in range(32):
if netmask[0] & _shl(1, i):
return 32 - i
return 0
def _shl(n, bits):
return n * int(2 ** bits)
def _list_routes():
argv = ['netstat', '-rn']
p = ssubprocess.Popen(argv, stdout=ssubprocess.PIPE)
routes = []
for line in p.stdout:
cols = re.split(r'\s+', line)
ipw = _ipmatch(cols[0])
if not ipw:
continue # some lines won't be parseable; never mind
maskw = _ipmatch(cols[2]) # linux only
mask = _maskbits(maskw) # returns 32 if maskw is null
width = min(ipw[1], mask)
ip = ipw[0] & _shl(_shl(1, width) - 1, 32 - width)
routes.append(
(socket.AF_INET, socket.inet_ntoa(struct.pack('!I', ip)), width))
rv = p.wait()
if rv != 0:
log('WARNING: %r returned %d\n' % (argv, rv))
log('WARNING: That prevents --auto-nets from working.\n')
return routes
def list_routes():
for (family, ip, width) in _list_routes():
if not ip.startswith('0.') and not ip.startswith('127.'):
yield (family, ip, width)
def _exc_dump():
exc_info = sys.exc_info()
return ''.join(traceback.format_exception(*exc_info))
def start_hostwatch(seed_hosts):
s1, s2 = socket.socketpair()
pid = os.fork()
if not pid:
# child
rv = 99
try:
try:
s2.close()
os.dup2(s1.fileno(), 1)
os.dup2(s1.fileno(), 0)
s1.close()
rv = hostwatch.hw_main(seed_hosts) or 0
except Exception:
log('%s\n' % _exc_dump())
rv = 98
finally:
os._exit(rv)
s1.close()
return pid, s2
class Hostwatch:
def __init__(self):
self.pid = 0
self.sock = None
class DnsProxy(Handler):
def __init__(self, mux, chan, request):
# FIXME! IPv4 specific
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
Handler.__init__(self, [sock])
self.timeout = time.time() + 30
self.mux = mux
self.chan = chan
self.tries = 0
self.peer = None
self.request = request
self.sock = sock
# FIXME! IPv4 specific
self.sock.setsockopt(socket.SOL_IP, socket.IP_TTL, 42)
self.try_send()
def try_send(self):
if self.tries >= 3:
return
self.tries += 1
# FIXME! Support IPv6 nameservers
self.peer = resolvconf_random_nameserver()[1]
self.sock.connect((self.peer, 53))
debug2('DNS: sending to %r\n' % self.peer)
try:
self.sock.send(self.request)
except socket.error, e:
if e.args[0] in ssnet.NET_ERRS:
# might have been spurious; try again.
# Note: these errors sometimes are reported by recv(),
# and sometimes by send(). We have to catch both.
debug2('DNS send to %r: %s\n' % (self.peer, e))
self.try_send()
return
else:
log('DNS send to %r: %s\n' % (self.peer, e))
return
def callback(self):
try:
data = self.sock.recv(4096)
except socket.error, e:
if e.args[0] in ssnet.NET_ERRS:
# might have been spurious; try again.
# Note: these errors sometimes are reported by recv(),
# and sometimes by send(). We have to catch both.
debug2('DNS recv from %r: %s\n' % (self.peer, e))
self.try_send()
return
else:
log('DNS recv from %r: %s\n' % (self.peer, e))
return
debug2('DNS response: %d bytes\n' % len(data))
self.mux.send(self.chan, ssnet.CMD_DNS_RESPONSE, data)
self.ok = False
class UdpProxy(Handler):
def __init__(self, mux, chan, family):
sock = socket.socket(family, socket.SOCK_DGRAM)
Handler.__init__(self, [sock])
self.timeout = time.time() + 30
self.mux = mux
self.chan = chan
self.sock = sock
if family == socket.AF_INET:
self.sock.setsockopt(socket.SOL_IP, socket.IP_TTL, 42)
def send(self, dstip, data):
debug2('UDP: sending to %r port %d\n' % dstip)
try:
self.sock.sendto(data, dstip)
except socket.error, e:
log('UDP send to %r port %d: %s\n' % (dstip[0], dstip[1], e))
return
def callback(self):
try:
data, peer = self.sock.recvfrom(4096)
except socket.error, e:
log('UDP recv from %r port %d: %s\n' % (peer[0], peer[1], e))
return
debug2('UDP response: %d bytes\n' % len(data))
hdr = "%s,%r," % (peer[0], peer[1])
self.mux.send(self.chan, ssnet.CMD_UDP_DATA, hdr + data)
def main():
if helpers.verbose >= 1:
helpers.logprefix = ' s: '
else:
helpers.logprefix = 'server: '
assert latency_control is not None
debug1('latency control setting = %r\n' % latency_control)
routes = list(list_routes())
debug1('available routes:\n')
for r in routes:
debug1(' %d/%s/%d\n' % r)
# synchronization header
sys.stdout.write('\0\0SSHUTTLE0001')
sys.stdout.flush()
handlers = []
mux = Mux(socket.fromfd(sys.stdin.fileno(),
socket.AF_INET, socket.SOCK_STREAM),
socket.fromfd(sys.stdout.fileno(),
socket.AF_INET, socket.SOCK_STREAM))
handlers.append(mux)
routepkt = ''
for r in routes:
routepkt += '%d,%s,%d\n' % r
mux.send(0, ssnet.CMD_ROUTES, routepkt)
hw = Hostwatch()
hw.leftover = ''
def hostwatch_ready():
assert(hw.pid)
content = hw.sock.recv(4096)
if content:
lines = (hw.leftover + content).split('\n')
if lines[-1]:
# no terminating newline: entry isn't complete yet!
hw.leftover = lines.pop()
lines.append('')
else:
hw.leftover = ''
mux.send(0, ssnet.CMD_HOST_LIST, '\n'.join(lines))
else:
raise Fatal('hostwatch process died')
def got_host_req(data):
if not hw.pid:
(hw.pid, hw.sock) = start_hostwatch(data.strip().split())
handlers.append(Handler(socks=[hw.sock],
callback=hostwatch_ready))
mux.got_host_req = got_host_req
def new_channel(channel, data):
(family, dstip, dstport) = data.split(',', 2)
family = int(family)
dstport = int(dstport)
outwrap = ssnet.connect_dst(family, dstip, dstport)
handlers.append(Proxy(MuxWrapper(mux, channel), outwrap))
mux.new_channel = new_channel
dnshandlers = {}
def dns_req(channel, data):
debug2('Incoming DNS request channel=%d.\n' % channel)
h = DnsProxy(mux, channel, data)
handlers.append(h)
dnshandlers[channel] = h
mux.got_dns_req = dns_req
udphandlers = {}
def udp_req(channel, cmd, data):
debug2('Incoming UDP request channel=%d, cmd=%d\n' % (channel, cmd))
if cmd == ssnet.CMD_UDP_DATA:
(dstip, dstport, data) = data.split(",", 2)
dstport = int(dstport)
debug2('is incoming UDP data. %r %d.\n' % (dstip, dstport))
h = udphandlers[channel]
h.send((dstip, dstport), data)
elif cmd == ssnet.CMD_UDP_CLOSE:
debug2('is incoming UDP close\n')
h = udphandlers[channel]
h.ok = False
del mux.channels[channel]
def udp_open(channel, data):
debug2('Incoming UDP open.\n')
family = int(data)
mux.channels[channel] = lambda cmd, data: udp_req(channel, cmd, data)
if channel in udphandlers:
raise Fatal('UDP connection channel %d already open' % channel)
else:
h = UdpProxy(mux, channel, family)
handlers.append(h)
udphandlers[channel] = h
mux.got_udp_open = udp_open
while mux.ok:
if hw.pid:
assert(hw.pid > 0)
(rpid, rv) = os.waitpid(hw.pid, os.WNOHANG)
if rpid:
raise Fatal(
'hostwatch exited unexpectedly: code 0x%04x\n' % rv)
ssnet.runonce(handlers, mux)
if latency_control:
mux.check_fullness()
mux.callback()
if dnshandlers:
now = time.time()
for channel, h in dnshandlers.items():
if h.timeout < now or not h.ok:
debug3('expiring dnsreqs channel=%d\n' % channel)
del dnshandlers[channel]
h.ok = False
for channel, h in udphandlers.items():
if not h.ok:
debug3('expiring UDP channel=%d\n' % channel)
del udphandlers[channel]
h.ok = False
| lgpl-2.1 | -5,990,303,340,717,896,000 | 29.707463 | 77 | 0.521532 | false |
Neopallium/mailinabox | management/backup.py | 1 | 17961 | #!/usr/bin/python3
# This script performs a backup of all user data:
# 1) System services are stopped.
# 2) An incremental encrypted backup is made using duplicity.
# 3) The stopped services are restarted.
# 4) STORAGE_ROOT/backup/after-backup is executd if it exists.
import os, os.path, shutil, glob, re, datetime, sys
import dateutil.parser, dateutil.relativedelta, dateutil.tz
import rtyaml
from utils import exclusive_process, load_environment, shell, wait_for_service, fix_boto
def backup_status(env):
# Root folder
backup_root = os.path.join(env["STORAGE_ROOT"], 'backup')
# What is the current status of backups?
# Query duplicity to get a list of all backups.
# Use the number of volumes to estimate the size.
config = get_backup_config(env)
now = datetime.datetime.now(dateutil.tz.tzlocal())
# Are backups dissbled?
if config["target"] == "off":
return { }
backups = { }
backup_cache_dir = os.path.join(backup_root, 'cache')
def reldate(date, ref, clip):
if ref < date: return clip
rd = dateutil.relativedelta.relativedelta(ref, date)
if rd.months > 1: return "%d months, %d days" % (rd.months, rd.days)
if rd.months == 1: return "%d month, %d days" % (rd.months, rd.days)
if rd.days >= 7: return "%d days" % rd.days
if rd.days > 1: return "%d days, %d hours" % (rd.days, rd.hours)
if rd.days == 1: return "%d day, %d hours" % (rd.days, rd.hours)
return "%d hours, %d minutes" % (rd.hours, rd.minutes)
# Get duplicity collection status and parse for a list of backups.
def parse_line(line):
keys = line.strip().split()
date = dateutil.parser.parse(keys[1]).astimezone(dateutil.tz.tzlocal())
return {
"date": keys[1],
"date_str": date.strftime("%x %X") + " " + now.tzname(),
"date_delta": reldate(date, now, "the future?"),
"full": keys[0] == "full",
"size": 0, # collection-status doesn't give us the size
"volumes": keys[2], # number of archive volumes for this backup (not really helpful)
}
code, collection_status = shell('check_output', [
"/usr/bin/duplicity",
"collection-status",
"--archive-dir", backup_cache_dir,
"--gpg-options", "--cipher-algo=AES256",
"--log-fd", "1",
config["target"],
],
get_env(env),
trap=True)
if code != 0:
# Command failed. This is likely due to an improperly configured remote
# destination for the backups or the last backup job terminated unexpectedly.
raise Exception("Something is wrong with the backup: " + collection_status)
for line in collection_status.split('\n'):
if line.startswith(" full") or line.startswith(" inc"):
backup = parse_line(line)
backups[backup["date"]] = backup
# Look at the target to get the sizes of each of the backups. There is more than one file per backup.
for fn, size in list_target_files(config):
m = re.match(r"duplicity-(full|full-signatures|(inc|new-signatures)\.(?P<incbase>\d+T\d+Z)\.to)\.(?P<date>\d+T\d+Z)\.", fn)
if not m: continue # not a part of a current backup chain
key = m.group("date")
backups[key]["size"] += size
# Ensure the rows are sorted reverse chronologically.
# This is relied on by should_force_full() and the next step.
backups = sorted(backups.values(), key = lambda b : b["date"], reverse=True)
# Get the average size of incremental backups, the size of the
# most recent full backup, and the date of the most recent
# backup and the most recent full backup.
incremental_count = 0
incremental_size = 0
first_date = None
first_full_size = None
first_full_date = None
for bak in backups:
if first_date is None:
first_date = dateutil.parser.parse(bak["date"])
if bak["full"]:
first_full_size = bak["size"]
first_full_date = dateutil.parser.parse(bak["date"])
break
incremental_count += 1
incremental_size += bak["size"]
# When will the most recent backup be deleted? It won't be deleted if the next
# backup is incremental, because the increments rely on all past increments.
# So first guess how many more incremental backups will occur until the next
# full backup. That full backup frees up this one to be deleted. But, the backup
# must also be at least min_age_in_days old too.
deleted_in = None
if incremental_count > 0 and first_full_size is not None:
# How many days until the next incremental backup? First, the part of
# the algorithm based on increment sizes:
est_days_to_next_full = (.5 * first_full_size - incremental_size) / (incremental_size/incremental_count)
est_time_of_next_full = first_date + datetime.timedelta(days=est_days_to_next_full)
# ...And then the part of the algorithm based on full backup age:
est_time_of_next_full = min(est_time_of_next_full, first_full_date + datetime.timedelta(days=config["min_age_in_days"]*10+1))
# It still can't be deleted until it's old enough.
est_deleted_on = max(est_time_of_next_full, first_date + datetime.timedelta(days=config["min_age_in_days"]))
deleted_in = "approx. %d days" % round((est_deleted_on-now).total_seconds()/60/60/24 + .5)
# When will a backup be deleted? Set the deleted_in field of each backup.
saw_full = False
for bak in backups:
if deleted_in:
# The most recent increment in a chain and all of the previous backups
# it relies on are deleted at the same time.
bak["deleted_in"] = deleted_in
if bak["full"]:
# Reset when we get to a full backup. A new chain start *next*.
saw_full = True
deleted_in = None
elif saw_full and not deleted_in:
# We're now on backups prior to the most recent full backup. These are
# free to be deleted as soon as they are min_age_in_days old.
deleted_in = reldate(now, dateutil.parser.parse(bak["date"]) + datetime.timedelta(days=config["min_age_in_days"]), "on next daily backup")
bak["deleted_in"] = deleted_in
return {
"backups": backups,
}
def should_force_full(config, env):
# Force a full backup when the total size of the increments
# since the last full backup is greater than half the size
# of that full backup.
inc_size = 0
for bak in backup_status(env)["backups"]:
if not bak["full"]:
# Scan through the incremental backups cumulating
# size...
inc_size += bak["size"]
else:
# ...until we reach the most recent full backup.
# Return if we should to a full backup, which is based
# on the size of the increments relative to the full
# backup, as well as the age of the full backup.
if inc_size > .5*bak["size"]:
return True
if dateutil.parser.parse(bak["date"]) + datetime.timedelta(days=config["min_age_in_days"]*10+1) < datetime.datetime.now(dateutil.tz.tzlocal()):
return True
return False
else:
# If we got here there are no (full) backups, so make one.
# (I love for/else blocks. Here it's just to show off.)
return True
def get_passphrase(env):
# Get the encryption passphrase. secret_key.txt is 2048 random
# bits base64-encoded and with line breaks every 65 characters.
# gpg will only take the first line of text, so sanity check that
# that line is long enough to be a reasonable passphrase. It
# only needs to be 43 base64-characters to match AES256's key
# length of 32 bytes.
backup_root = os.path.join(env["STORAGE_ROOT"], 'backup')
with open(os.path.join(backup_root, 'secret_key.txt')) as f:
passphrase = f.readline().strip()
if len(passphrase) < 43: raise Exception("secret_key.txt's first line is too short!")
return passphrase
def get_env(env):
config = get_backup_config(env)
env = { "PASSPHRASE" : get_passphrase(env) }
if get_target_type(config) == 's3':
env["AWS_ACCESS_KEY_ID"] = config["target_user"]
env["AWS_SECRET_ACCESS_KEY"] = config["target_pass"]
return env
def get_target_type(config):
protocol = config["target"].split(":")[0]
return protocol
def perform_backup(full_backup):
env = load_environment()
exclusive_process("backup")
config = get_backup_config(env)
backup_root = os.path.join(env["STORAGE_ROOT"], 'backup')
backup_cache_dir = os.path.join(backup_root, 'cache')
backup_dir = os.path.join(backup_root, 'encrypted')
# Are backups dissbled?
if config["target"] == "off":
return
# In an older version of this script, duplicity was called
# such that it did not encrypt the backups it created (in
# backup/duplicity), and instead openssl was called separately
# after each backup run, creating AES256 encrypted copies of
# each file created by duplicity in backup/encrypted.
#
# We detect the transition by the presence of backup/duplicity
# and handle it by 'dupliception': we move all the old *un*encrypted
# duplicity files up out of the backup/duplicity directory (as
# backup/ is excluded from duplicity runs) in order that it is
# included in the next run, and we delete backup/encrypted (which
# duplicity will output files directly to, post-transition).
old_backup_dir = os.path.join(backup_root, 'duplicity')
migrated_unencrypted_backup_dir = os.path.join(env["STORAGE_ROOT"], "migrated_unencrypted_backup")
if os.path.isdir(old_backup_dir):
# Move the old unencrypted files to a new location outside of
# the backup root so they get included in the next (new) backup.
# Then we'll delete them. Also so that they do not get in the
# way of duplicity doing a full backup on the first run after
# we take care of this.
shutil.move(old_backup_dir, migrated_unencrypted_backup_dir)
# The backup_dir (backup/encrypted) now has a new purpose.
# Clear it out.
shutil.rmtree(backup_dir)
# On the first run, always do a full backup. Incremental
# will fail. Otherwise do a full backup when the size of
# the increments since the most recent full backup are
# large.
try:
full_backup = full_backup or should_force_full(config, env)
except Exception as e:
# This was the first call to duplicity, and there might
# be an error already.
print(e)
sys.exit(1)
# Stop services.
def service_command(service, command, quit=None):
# Execute silently, but if there is an error then display the output & exit.
code, ret = shell('check_output', ["/usr/sbin/service", service, command], capture_stderr=True, trap=True)
if code != 0:
print(ret)
if quit:
sys.exit(code)
service_command("php5-fpm", "stop", quit=True)
service_command("postfix", "stop", quit=True)
service_command("dovecot", "stop", quit=True)
# Run a backup of STORAGE_ROOT (but excluding the backups themselves!).
# --allow-source-mismatch is needed in case the box's hostname is changed
# after the first backup. See #396.
try:
shell('check_call', [
"/usr/bin/duplicity",
"full" if full_backup else "incr",
"--verbosity", "warning", "--no-print-statistics",
"--archive-dir", backup_cache_dir,
"--exclude", backup_root,
"--volsize", "250",
"--gpg-options", "--cipher-algo=AES256",
env["STORAGE_ROOT"],
config["target"],
"--allow-source-mismatch"
],
get_env(env))
finally:
# Start services again.
service_command("dovecot", "start", quit=False)
service_command("postfix", "start", quit=False)
service_command("php5-fpm", "start", quit=False)
# Once the migrated backup is included in a new backup, it can be deleted.
if os.path.isdir(migrated_unencrypted_backup_dir):
shutil.rmtree(migrated_unencrypted_backup_dir)
# Remove old backups. This deletes all backup data no longer needed
# from more than 3 days ago.
shell('check_call', [
"/usr/bin/duplicity",
"remove-older-than",
"%dD" % config["min_age_in_days"],
"--verbosity", "error",
"--archive-dir", backup_cache_dir,
"--force",
config["target"]
],
get_env(env))
# From duplicity's manual:
# "This should only be necessary after a duplicity session fails or is
# aborted prematurely."
# That may be unlikely here but we may as well ensure we tidy up if
# that does happen - it might just have been a poorly timed reboot.
shell('check_call', [
"/usr/bin/duplicity",
"cleanup",
"--verbosity", "error",
"--archive-dir", backup_cache_dir,
"--force",
config["target"]
],
get_env(env))
# Change ownership of backups to the user-data user, so that the after-bcakup
# script can access them.
if get_target_type(config) == 'file':
shell('check_call', ["/bin/chown", "-R", env["STORAGE_USER"], backup_dir])
# Execute a post-backup script that does the copying to a remote server.
# Run as the STORAGE_USER user, not as root. Pass our settings in
# environment variables so the script has access to STORAGE_ROOT.
post_script = os.path.join(backup_root, 'after-backup')
if os.path.exists(post_script):
shell('check_call',
['su', env['STORAGE_USER'], '-c', post_script, config["target"]],
env=env)
# Our nightly cron job executes system status checks immediately after this
# backup. Since it checks that dovecot and postfix are running, block for a
# bit (maximum of 10 seconds each) to give each a chance to finish restarting
# before the status checks might catch them down. See #381.
wait_for_service(25, True, env, 10)
wait_for_service(993, True, env, 10)
def run_duplicity_verification():
env = load_environment()
backup_root = os.path.join(env["STORAGE_ROOT"], 'backup')
config = get_backup_config(env)
backup_cache_dir = os.path.join(backup_root, 'cache')
shell('check_call', [
"/usr/bin/duplicity",
"--verbosity", "info",
"verify",
"--compare-data",
"--archive-dir", backup_cache_dir,
"--exclude", backup_root,
config["target"],
env["STORAGE_ROOT"],
], get_env(env))
def run_duplicity_restore(args):
env = load_environment()
config = get_backup_config(env)
backup_cache_dir = os.path.join(env["STORAGE_ROOT"], 'backup', 'cache')
shell('check_call', [
"/usr/bin/duplicity",
"restore",
"--archive-dir", backup_cache_dir,
config["target"],
] + args,
get_env(env))
def list_target_files(config):
import urllib.parse
try:
p = urllib.parse.urlparse(config["target"])
except ValueError:
return "invalid target"
if p.scheme == "file":
return [(fn, os.path.getsize(os.path.join(p.path, fn))) for fn in os.listdir(p.path)]
elif p.scheme == "s3":
# match to a Region
fix_boto() # must call prior to importing boto
import boto.s3
from boto.exception import BotoServerError
for region in boto.s3.regions():
if region.endpoint == p.hostname:
break
else:
raise ValueError("Invalid S3 region/host.")
bucket = p.path[1:].split('/')[0]
path = '/'.join(p.path[1:].split('/')[1:]) + '/'
# If no prefix is specified, set the path to '', otherwise boto won't list the files
if path == '/':
path = ''
if bucket == "":
raise ValueError("Enter an S3 bucket name.")
# connect to the region & bucket
try:
conn = region.connect(aws_access_key_id=config["target_user"], aws_secret_access_key=config["target_pass"])
bucket = conn.get_bucket(bucket)
except BotoServerError as e:
if e.status == 403:
raise ValueError("Invalid S3 access key or secret access key.")
elif e.status == 404:
raise ValueError("Invalid S3 bucket name.")
elif e.status == 301:
raise ValueError("Incorrect region for this bucket.")
raise ValueError(e.reason)
return [(key.name[len(path):], key.size) for key in bucket.list(prefix=path)]
else:
raise ValueError(config["target"])
def backup_set_custom(env, target, target_user, target_pass, min_age):
config = get_backup_config(env, for_save=True)
# min_age must be an int
if isinstance(min_age, str):
min_age = int(min_age)
config["target"] = target
config["target_user"] = target_user
config["target_pass"] = target_pass
config["min_age_in_days"] = min_age
# Validate.
try:
if config["target"] not in ("off", "local"):
# these aren't supported by the following function, which expects a full url in the target key,
# which is what is there except when loading the config prior to saving
list_target_files(config)
except ValueError as e:
return str(e)
write_backup_config(env, config)
return "OK"
def get_backup_config(env, for_save=False, for_ui=False):
backup_root = os.path.join(env["STORAGE_ROOT"], 'backup')
# Defaults.
config = {
"min_age_in_days": 3,
"target": "local",
}
# Merge in anything written to custom.yaml.
try:
custom_config = rtyaml.load(open(os.path.join(backup_root, 'custom.yaml')))
if not isinstance(custom_config, dict): raise ValueError() # caught below
config.update(custom_config)
except:
pass
# When updating config.yaml, don't do any further processing on what we find.
if for_save:
return config
# When passing this back to the admin to show the current settings, do not include
# authentication details. The user will have to re-enter it.
if for_ui:
for field in ("target_user", "target_pass"):
if field in config:
del config[field]
# helper fields for the admin
config["file_target_directory"] = os.path.join(backup_root, 'encrypted')
config["enc_pw_file"] = os.path.join(backup_root, 'secret_key.txt')
if config["target"] == "local":
# Expand to the full URL.
config["target"] = "file://" + config["file_target_directory"]
return config
def write_backup_config(env, newconfig):
backup_root = os.path.join(env["STORAGE_ROOT"], 'backup')
with open(os.path.join(backup_root, 'custom.yaml'), "w") as f:
f.write(rtyaml.dump(newconfig))
if __name__ == "__main__":
import sys
if sys.argv[-1] == "--verify":
# Run duplicity's verification command to check a) the backup files
# are readable, and b) report if they are up to date.
run_duplicity_verification()
elif sys.argv[-1] == "--status":
# Show backup status.
ret = backup_status(load_environment())
print(rtyaml.dump(ret["backups"]))
elif len(sys.argv) >= 2 and sys.argv[1] == "--restore":
# Run duplicity restore. Rest of command line passed as arguments
# to duplicity. The restore path should be specified.
run_duplicity_restore(sys.argv[2:])
else:
# Perform a backup. Add --full to force a full backup rather than
# possibly performing an incremental backup.
full_backup = "--full" in sys.argv
perform_backup(full_backup)
| cc0-1.0 | 5,556,783,706,832,090,000 | 34.636905 | 146 | 0.691832 | false |
crazyskateface/LC | chat/admin.py | 1 | 1664 | from django.contrib import admin
from chat.models import UserProfile, Comments, Roll, Emblem
from django.contrib.auth.forms import UserCreationForm
from django.contrib.auth.admin import UserAdmin
from django.contrib.auth.models import User
# Register your models here.
class UserProfileAdmin(admin.StackedInline):
model = UserProfile
can_delete = False
verbose_name_plural = 'User'
fields = ('user','ign','isMod','banned','verified','primRole','secRole','tier','division')
class MyUserCreationForm(UserCreationForm):
def clean_username(self):
# Since User.username is unique, this check is redundant,
# but it sets a nicer error message than the ORM. See #13147.
username = self.cleaned_data["username"]
try:
User._default_manager.get(username=username)
except User.DoesNotExist:
return username
raise forms.ValidationError(self.error_messages['duplicate_username'])
class Meta(UserCreationForm.Meta):
model = User
class UserAdmin(UserAdmin):
add_form = MyUserCreationForm
inlines = (UserProfileAdmin, )
admin.site.unregister(User)
admin.site.register(User, UserAdmin)
class CommentsAdmin(admin.ModelAdmin):
fields = ('user','text','datetime')
admin.site.register(Comments,CommentsAdmin)
class RollAdmin(admin.ModelAdmin):
fields = ('name',)
admin.site.register(Roll, RollAdmin)
class EmblemAdmin(admin.ModelAdmin):
fields = ('name', 'url',)
admin.site.register(Emblem, EmblemAdmin)
# class MyUserAdmin(UserAdmin):
# add_form = MyUserCreationForm
#
# admin.site.register(UserProfile, MyUserAdmin)
| mit | -309,975,475,201,732,700 | 28.714286 | 94 | 0.709135 | false |
g-goessel/mathdoku_solve | fonctions.py | 1 | 4674 | """
fonctions
"""
from itertools import permutations, product
from functools import reduce
import numpy as np
def combi_possibles(val_tot,nbr_cases,nbr_max):
"""
retourne la liste des combinaisons possibles
"""
#test si la valeur est certaine
if nbr_cases==1:
return [(val_tot,)]
combi=list()
list_div=[i for i in range(1,nbr_max+1) if val_tot/i==int(val_tot/i)]
combi_max=list(product([i for i in range(1,nbr_max+1)], repeat=nbr_cases))
combi_max_multipli=list(product(list_div, repeat=nbr_cases))
if val_tot <= nbr_max**2:
#on peut avoir une addition
for i in combi_max:
soustraction = reduce(lambda x,y: x-y, i)
somme = sum(i)
division = reduce(lambda x,y: x/y, i)
if somme == val_tot:
combi.append(i)
if soustraction == val_tot:
for j in list(permutations(i)):
combi.append(j)
if division == val_tot:
for j in list(permutations(i)):
combi.append(j)
for i in combi_max_multipli:
produit = reduce(lambda x,y: x*y, i)
if produit == val_tot:
combi.append(i)
return combi
def bonoupas(matrice):
"""
Cette foncton va tester si matrice est correcte ou pas en vérifiant que
les chiffres n'apparaissent qu'une seule fois par ligne et par colonne
Retourne True si matrice est valable et False dans le cas contraire
"""
size = len(matrice)
#on fixe (i_ref,j_ref) les coordonées d'une case que l'on veut vérifier comme étant unique sur ca ligne/colonne
for i_ref in range(size):
for j_ref in range(size):
#On vérifie l'unicité sur la colonne
for i in range(size):
if (matrice[i][j_ref]==matrice[i_ref][j_ref] and i != i_ref) and matrice[i][j_ref]!=0: return False
#Puis sur la ligne
for j in range(size):
if matrice[i_ref][j]==matrice[i_ref][j_ref] and j != j_ref: return False
return True
# Optimisations diverses
def optimize(user_data):
"""
On peut enlever les doublons
"""
for i in user_data:
user_data[i][2]=list(set(user_data[i][2]))
"""
On utilise les blocs avec une seule probabilité pour éliminer un grand nombre de cas certainement impossibles
"""
#on récupère la liste des blocs unitaires
blocs_solo=list()
for i in user_data:
if len(user_data[i][2])==1:
blocs_solo.append(i)
for bloc_solo in blocs_solo:
coord_bloc_solo=user_data[bloc_solo][1][0]
for bloc_to_clean in user_data:
if bloc_to_clean==bloc_solo: pass
else :
#on crée la liste contenant la liste des cases qui vont nous intéresser dans bloc_to_clean
cases_to_clean=[i for i,x in enumerate(user_data[bloc_to_clean][1]) if x[0]==coord_bloc_solo[0] or x[1]==coord_bloc_solo[1]]
for case_to_clean in cases_to_clean:
for i,coord in enumerate(user_data[bloc_to_clean][2]):
if user_data[bloc_to_clean][2][i][case_to_clean] == user_data[bloc_solo][0]:
del(user_data[bloc_to_clean][2][i])
"""
On efface des combinaisons qui ne sont pas possibles car le meme chiffre apparait plusieurs fois sur la meme ligne/colonne
"""
for bloc in user_data:
#Dans chaque bloc on liste tous les emplacements qui ne peuvent cohexister
emplacements=[]
liste_x=[i[0] for i in user_data[bloc][1]]
liste_x_small=list(set(liste_x))
for key,x in enumerate(liste_x_small):
if liste_x.count(x)>1:
emplacements.append([i for i,j in enumerate(liste_x) if j == x and i != key])
liste_y=[i[1] for i in user_data[bloc][1]]
liste_y_small=list(set(liste_y))
for key,y in enumerate(liste_y_small):
if liste_y.count(y)>1:
emplacements.append([i for i,j in enumerate(liste_y) if j == y and i != key])
#Ensuite on élimine les combinaisons qui ne respectent pas ce critère
for key,combinaison in enumerate(user_data[bloc][2]):
for combinaison_limitante in emplacements:
coord_interessantes=[combinaison[i] for i in list(combinaison_limitante)]
if len(coord_interessantes)!=len(set(coord_interessantes)):
user_data[bloc][2].pop(key)
return user_data
| mpl-2.0 | -8,464,025,482,168,778,000 | 34.124031 | 140 | 0.574893 | false |
Shawnxkwang/CS0008-f2016 | f2016_cs8_xiw69_a1/f2016_cs8_xiw69_a1.py | 1 | 2622 |
# Xiaokai Wang
# [email protected]
# CS 0008
# assignmnet #1
# ask the user to input preferred unit system/ distance driven/ gas usage
choice = input("Please enter your preferred unit system (USC/Metric): ")
dist = float(input("Please input your distance driven: "))
gas = float(input("Please input your gas usage during this distance: "))
#initialize distance and gas of other unit system
dist_conv = 0.0
gas_conv = 0.0
#check which unit system the user input.
if (choice.lower() == "usc"):
dist_conv = dist * 1.60934
gas_conv = gas * 3.78541
elif(choice.lower() == "metric"):
dist_conv = dist * 0.621371
gas_conv = gas * 0.264172
else:
print("Wrong unit system!")
#initialize mpg and liter per 100 kilometers
mpg = 0.0
lpk = 0.0
# compute the mpg and liter per 100 kilometers
if (choice.lower() == "usc"):
mpg = dist / gas
lpk = 100 * gas_conv / dist_conv
elif(choice.lower() == "metric"):
mpg = dist_conv / gas_conv
lpk = 100 * gas / dist
else:
print("Wrong unit system!")
# initialize the consumption rating category
rating = ""
#checl status
if (lpk < 0 ):
print("not valid gas usage per distance")
elif (lpk >= 0 and lpk <= 8):
rating = "Excellent"
elif (lpk > 8 and lpk <= 10):
rating = "Good"
elif(lpk > 10 and lpk <= 15):
rating = "Average"
elif(lpk > 15 and lpk <= 20):
rating = "Poor"
else:
rating = "Extremely poor"
#check unit and choose which status we need to use
if (choice.lower() == "usc"):
# print the final output
print("\t\t\t\t\t\t\t\t\t" + "USC" + "\t\t\t\t\t\t" + "Metric")
print("Distance ______________:" + "\t\t ", format(dist, '.3f'), " miles", "\t\t\t", format(dist_conv, '.3f')," Km")
print("Gas ___________________:" + "\t\t ", format(gas, '.3f')," gallons", "\t\t", format(gas_conv, '.3f')," Liters")
print("Consumption ___________:" + "\t\t ", format(mpg, '.3f'), " mpg", "\t\t\t\t", format(lpk, '.3f'), " 1/100Km")
print("")
print("Gas Consumption Rating : " + rating)
elif(choice.lower() == "metric"):
# print the final output
print("\t\t\t\t\t\t\t\t\t" + "USC" + "\t\t\t\t\t\t" + "Metric")
print("Distance ______________:" + "\t\t ", format(dist_conv, '.3f'), " miles", "\t\t\t", format(dist, '.3f')," Km")
print("Gas ___________________:" + "\t\t ", format(gas_conv, '.3f'), " gallons", "\t\t\t", format(gas, '.3f')," Liters")
print("Consumption ___________:" + "\t\t ", format(mpg, '.3f'), " mpg", "\t\t\t\t", format(lpk, '.3f')," 1/100Km")
print("")
print("Gas Consumption Rating : " + rating)
else:
print("Invalid unit system")
| mit | -1,917,226,436,211,530,000 | 29.137931 | 126 | 0.569794 | false |
Microsoft/PTVS | Python/Product/Miniconda/Miniconda3-x64/Lib/site-packages/conda/core/portability.py | 1 | 7021 | # -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function, unicode_literals
from logging import getLogger
from os.path import realpath
import re
import struct
from ..base.constants import PREFIX_PLACEHOLDER
from ..common.compat import on_win
from ..exceptions import CondaIOError, BinaryPrefixReplacementError
from ..gateways.disk.update import CancelOperation, update_file_in_place_as_binary
from ..models.enums import FileMode
log = getLogger(__name__)
# three capture groups: whole_shebang, executable, options
SHEBANG_REGEX = (br'^(#!' # pretty much the whole match string
br'(?:[ ]*)' # allow spaces between #! and beginning of the executable path
br'(/(?:\\ |[^ \n\r\t])*)' # the executable is the next text block without an escaped space or non-space whitespace character # NOQA
br'(.*)' # the rest of the line can contain option flags
br')$') # end whole_shebang group
class _PaddingError(Exception):
pass
def update_prefix(path, new_prefix, placeholder=PREFIX_PLACEHOLDER, mode=FileMode.text):
if on_win and mode == FileMode.text:
# force all prefix replacements to forward slashes to simplify need to escape backslashes
# replace with unix-style path separators
new_prefix = new_prefix.replace('\\', '/')
def _update_prefix(original_data):
# Step 1. do all prefix replacement
data = replace_prefix(mode, original_data, placeholder, new_prefix)
# Step 2. if the shebang is too long, shorten it using /usr/bin/env trick
if not on_win:
data = replace_long_shebang(mode, data)
# Step 3. if the before and after content is the same, skip writing
if data == original_data:
raise CancelOperation()
# Step 4. if we have a binary file, make sure the byte size is the same before
# and after the update
if mode == FileMode.binary and len(data) != len(original_data):
raise BinaryPrefixReplacementError(path, placeholder, new_prefix,
len(original_data), len(data))
return data
update_file_in_place_as_binary(realpath(path), _update_prefix)
def replace_prefix(mode, data, placeholder, new_prefix):
if mode == FileMode.text:
data = data.replace(placeholder.encode('utf-8'), new_prefix.encode('utf-8'))
elif mode == FileMode.binary:
data = binary_replace(data, placeholder.encode('utf-8'), new_prefix.encode('utf-8'))
else:
raise CondaIOError("Invalid mode: %r" % mode)
return data
def binary_replace(data, a, b):
"""
Perform a binary replacement of `data`, where the placeholder `a` is
replaced with `b` and the remaining string is padded with null characters.
All input arguments are expected to be bytes objects.
"""
if on_win:
# on Windows for binary files, we currently only replace a pyzzer-type entry point
# we skip all other prefix replacement
if has_pyzzer_entry_point(data):
return replace_pyzzer_entry_point_shebang(data, a, b)
else:
return data
def replace(match):
occurances = match.group().count(a)
padding = (len(a) - len(b)) * occurances
if padding < 0:
raise _PaddingError
return match.group().replace(a, b) + b'\0' * padding
original_data_len = len(data)
pat = re.compile(re.escape(a) + b'([^\0]*?)\0')
data = pat.sub(replace, data)
assert len(data) == original_data_len
return data
def has_pyzzer_entry_point(data):
pos = data.rfind(b'PK\x05\x06')
return pos >= 0
def replace_pyzzer_entry_point_shebang(all_data, placeholder, new_prefix):
"""Code adapted from pyzzer. This is meant to deal with entry point exe's created by distlib,
which consist of a launcher, then a shebang, then a zip archive of the entry point code to run.
We need to change the shebang.
https://bitbucket.org/vinay.sajip/pyzzer/src/5d5740cb04308f067d5844a56fbe91e7a27efccc/pyzzer/__init__.py?at=default&fileviewer=file-view-default#__init__.py-112 # NOQA
"""
# Copyright (c) 2013 Vinay Sajip.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
launcher = shebang = None
pos = all_data.rfind(b'PK\x05\x06')
if pos >= 0:
end_cdr = all_data[pos + 12:pos + 20]
cdr_size, cdr_offset = struct.unpack('<LL', end_cdr)
arc_pos = pos - cdr_size - cdr_offset
data = all_data[arc_pos:]
if arc_pos > 0:
pos = all_data.rfind(b'#!', 0, arc_pos)
if pos >= 0:
shebang = all_data[pos:arc_pos]
if pos > 0:
launcher = all_data[:pos]
if data and shebang and launcher:
if hasattr(placeholder, 'encode'):
placeholder = placeholder.encode('utf-8')
if hasattr(new_prefix, 'encode'):
new_prefix = new_prefix.encode('utf-8')
shebang = shebang.replace(placeholder, new_prefix)
all_data = b"".join([launcher, shebang, data])
return all_data
def replace_long_shebang(mode, data):
# this function only changes a shebang line if it exists and is greater than 127 characters
if mode == FileMode.text:
shebang_match = re.match(SHEBANG_REGEX, data, re.MULTILINE)
if shebang_match:
whole_shebang, executable, options = shebang_match.groups()
if len(whole_shebang) > 127:
executable_name = executable.decode('utf-8').split('/')[-1]
new_shebang = '#!/usr/bin/env %s%s' % (executable_name, options.decode('utf-8'))
data = data.replace(whole_shebang, new_shebang.encode('utf-8'))
else:
# TODO: binary shebangs exist; figure this out in the future if text works well
pass
return data
| apache-2.0 | -6,387,582,113,646,030,000 | 41.041916 | 172 | 0.64478 | false |
sudhaMR/Django-Perception | imgpage/urls.py | 1 | 1204 | """perception URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.8/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Add an import: from blog import urls as blog_urls
2. Add a URL to urlpatterns: url(r'^blog/', include(blog_urls))
"""
from django.conf.urls import url,patterns
from imgpage import views
from perception.settings import STATIC_PATH, DEBUG
urlpatterns = patterns('',
url(r'^$',views.add_category,name='add_category'),
url(r'^about/',views.about,name='about'),
url(r'^taginfo/',views.taginfo,name='taginfo'),
url(r'^static/(.*)$', 'django.views.static.serve', {'document_root': STATIC_PATH, 'show_indexes': True}),
url(r'^static/', 'django.views.static.serve', {'document_root': STATIC_PATH, 'show_indexes': True}),
url(r'^add_category/$', views.add_category, name='add_category'))
| mit | 2,675,888,157,375,171,000 | 45.307692 | 109 | 0.689369 | false |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.