repo_name
stringlengths 5
92
| path
stringlengths 4
232
| copies
stringclasses 19
values | size
stringlengths 4
7
| content
stringlengths 721
1.04M
| license
stringclasses 15
values | hash
int64 -9,223,277,421,539,062,000
9,223,102,107B
| line_mean
float64 6.51
99.9
| line_max
int64 15
997
| alpha_frac
float64 0.25
0.97
| autogenerated
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|
RENCI/xDCIShare | hs_geo_raster_resource/migrations/0001_initial.py | 1 | 7533 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from django.conf import settings
import hs_core.models
class Migration(migrations.Migration):
dependencies = [
('auth', '0001_initial'),
('pages', '__first__'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('contenttypes', '0001_initial'),
('hs_core', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='BandInformation',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('object_id', models.PositiveIntegerField()),
('name', models.CharField(max_length=500, null=True)),
('variableName', models.TextField(max_length=100, null=True)),
('variableUnit', models.CharField(max_length=50, null=True)),
('method', models.TextField(null=True, blank=True)),
('comment', models.TextField(null=True, blank=True)),
('content_type', models.ForeignKey(related_name='hs_geo_raster_resource_bandinformation_related', to='contenttypes.ContentType')),
],
options={
'abstract': False,
},
bases=(models.Model,),
),
migrations.CreateModel(
name='CellInformation',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('object_id', models.PositiveIntegerField()),
('name', models.CharField(max_length=500, null=True)),
('rows', models.IntegerField(null=True)),
('columns', models.IntegerField(null=True)),
('cellSizeXValue', models.FloatField(null=True)),
('cellSizeYValue', models.FloatField(null=True)),
('cellSizeUnit', models.CharField(max_length=50, null=True)),
('cellDataType', models.CharField(max_length=50, null=True)),
('noDataValue', models.FloatField(null=True)),
('content_type', models.ForeignKey(related_name='hs_geo_raster_resource_cellinformation_related', to='contenttypes.ContentType')),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='OriginalCoverage',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('object_id', models.PositiveIntegerField()),
('_value', models.CharField(max_length=1024, null=True)),
('content_type', models.ForeignKey(related_name='hs_geo_raster_resource_originalcoverage_related', to='contenttypes.ContentType')),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='RasterMetaData',
fields=[
('coremetadata_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='hs_core.CoreMetaData')),
],
options={
},
bases=('hs_core.coremetadata',),
),
migrations.CreateModel(
name='RasterResource',
fields=[
('page_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='pages.Page')),
('comments_count', models.IntegerField(default=0, editable=False)),
('rating_count', models.IntegerField(default=0, editable=False)),
('rating_sum', models.IntegerField(default=0, editable=False)),
('rating_average', models.FloatField(default=0, editable=False)),
('public', models.BooleanField(default=True, help_text=b'If this is true, the resource is viewable and downloadable by anyone')),
('frozen', models.BooleanField(default=False, help_text=b'If this is true, the resource should not be modified')),
('do_not_distribute', models.BooleanField(default=False, help_text=b'If this is true, the resource owner has to designate viewers')),
('discoverable', models.BooleanField(default=True, help_text=b'If this is true, it will turn up in searches.')),
('published_and_frozen', models.BooleanField(default=False, help_text=b'Once this is true, no changes can be made to the resource')),
('content', models.TextField()),
('short_id', models.CharField(default=hs_core.models.short_id, max_length=32, db_index=True)),
('doi', models.CharField(help_text=b"Permanent identifier. Never changes once it's been set.", max_length=1024, null=True, db_index=True, blank=True)),
('object_id', models.PositiveIntegerField(null=True, blank=True)),
('content_type', models.ForeignKey(blank=True, to='contenttypes.ContentType', null=True)),
('creator', models.ForeignKey(related_name='creator_of_hs_geo_raster_resource_rasterresource', to=settings.AUTH_USER_MODEL, help_text=b'This is the person who first uploaded the resource')),
('edit_groups', models.ManyToManyField(help_text=b'This is the set of xDCIShare Groups who can edit the resource', related_name='group_editable_hs_geo_raster_resource_rasterresource', null=True, to=b'auth.Group', blank=True)),
('edit_users', models.ManyToManyField(help_text=b'This is the set of xDCIShare Users who can edit the resource', related_name='user_editable_hs_geo_raster_resource_rasterresource', null=True, to=settings.AUTH_USER_MODEL, blank=True)),
('last_changed_by', models.ForeignKey(related_name='last_changed_hs_geo_raster_resource_rasterresource', to=settings.AUTH_USER_MODEL, help_text=b'The person who last changed the resource', null=True)),
('owners', models.ManyToManyField(help_text=b'The person who has total ownership of the resource', related_name='owns_hs_geo_raster_resource_rasterresource', to=settings.AUTH_USER_MODEL)),
('user', models.ForeignKey(related_name='rasterresources', verbose_name='Author', to=settings.AUTH_USER_MODEL)),
('view_groups', models.ManyToManyField(help_text=b'This is the set of xDCIShare Groups who can view the resource', related_name='group_viewable_hs_geo_raster_resource_rasterresource', null=True, to=b'auth.Group', blank=True)),
('view_users', models.ManyToManyField(help_text=b'This is the set of xDCIShare Users who can view the resource', related_name='user_viewable_hs_geo_raster_resource_rasterresource', null=True, to=settings.AUTH_USER_MODEL, blank=True)),
],
options={
'ordering': ('_order',),
'verbose_name': 'Geographic Raster',
},
bases=('pages.page', models.Model),
),
migrations.AlterUniqueTogether(
name='originalcoverage',
unique_together=set([('content_type', 'object_id')]),
),
migrations.AlterUniqueTogether(
name='cellinformation',
unique_together=set([('content_type', 'object_id')]),
),
migrations.RemoveField(
model_name='cellinformation',
name='cellSizeUnit',
),
]
| bsd-3-clause | -2,520,419,331,241,645,000 | 60.745902 | 250 | 0.609584 | false |
jhavstad/model_runner | src/Dnd.py | 1 | 31407 | """Drag-and-drop support for Tkinter.
This is very preliminary. I currently only support dnd *within* one
application, between different windows (or within the same window).
I an trying to make this as generic as possible -- not dependent on
the use of a particular widget or icon type, etc. I also hope that
this will work with Pmw.
To enable an object to be dragged, you must create an event binding
for it that starts the drag-and-drop process. Typically, you should
bind <ButtonPress> to a callback function that you write. The function
should call Tkdnd.dnd_start(source, event), where 'source' is the
object to be dragged, and 'event' is the event that invoked the call
(the argument to your callback function). Even though this is a class
instantiation, the returned instance should not be stored -- it will
be kept alive automatically for the duration of the drag-and-drop.
When a drag-and-drop is already in process for the Tk interpreter, the
call is *ignored*; this normally averts starting multiple simultaneous
dnd processes, e.g. because different button callbacks all
dnd_start().
The object is *not* necessarily a widget -- it can be any
application-specific object that is meaningful to potential
drag-and-drop targets.
Potential drag-and-drop targets are discovered as follows. Whenever
the mouse moves, and at the start and end of a drag-and-drop move, the
Tk widget directly under the mouse is inspected. This is the target
widget (not to be confused with the target object, yet to be
determined). If there is no target widget, there is no dnd target
object. If there is a target widget, and it has an attribute
dnd_accept, this should be a function (or any callable object). The
function is called as dnd_accept(source, event), where 'source' is the
object being dragged (the object passed to dnd_start() above), and
'event' is the most recent event object (generally a <Motion> event;
it can also be <ButtonPress> or <ButtonRelease>). If the dnd_accept()
function returns something other than None, this is the new dnd target
object. If dnd_accept() returns None, or if the target widget has no
dnd_accept attribute, the target widget's parent is considered as the
target widget, and the search for a target object is repeated from
there. If necessary, the search is repeated all the way up to the
root widget. If none of the target widgets can produce a target
object, there is no target object (the target object is None).
The target object thus produced, if any, is called the new target
object. It is compared with the old target object (or None, if there
was no old target widget). There are several cases ('source' is the
source object, and 'event' is the most recent event object):
- Both the old and new target objects are None. Nothing happens.
- The old and new target objects are the same object. Its method
dnd_motion(source, event) is called.
- The old target object was None, and the new target object is not
None. The new target object's method dnd_enter(source, event) is
called.
- The new target object is None, and the old target object is not
None. The old target object's method dnd_leave(source, event) is
called.
- The old and new target objects differ and neither is None. The old
target object's method dnd_leave(source, event), and then the new
target object's method dnd_enter(source, event) is called.
Once this is done, the new target object replaces the old one, and the
Tk mainloop proceeds. The return value of the methods mentioned above
is ignored; if they raise an exception, the normal exception handling
mechanisms take over.
The drag-and-drop processes can end in two ways: a final target object
is selected, or no final target object is selected. When a final
target object is selected, it will always have been notified of the
potential drop by a call to its dnd_enter() method, as described
above, and possibly one or more calls to its dnd_motion() method; its
dnd_leave() method has not been called since the last call to
dnd_enter(). The target is notified of the drop by a call to its
method dnd_commit(source, event).
If no final target object is selected, and there was an old target
object, its dnd_leave(source, event) method is called to complete the
dnd sequence.
Finally, the source object is notified that the drag-and-drop process
is over, by a call to source.dnd_end(target, event), specifying either
the selected target object, or None if no target object was selected.
The source object can use this to implement the commit action; this is
sometimes simpler than to do it in the target's dnd_commit(). The
target's dnd_commit() method could then simply be aliased to
dnd_leave().
At any time during a dnd sequence, the application can cancel the
sequence by calling the cancel() method on the object returned by
dnd_start(). This will call dnd_leave() if a target is currently
active; it will never call dnd_commit().
"""
import sys
import Tkinter
import ttk
import MyPmwScrolledFrame
from PIL import Image
from PIL import ImageTk
class DrawCanvas(Tkinter.Toplevel):
def __init__(self, root, geometry, parent, alpha=0.50):
Tkinter.Toplevel.__init__(self)
#geometry = geometry[geometry.find('+'):]
#print 'Geometry ' + geometry
#x1, y1, x2, y2 = root.bbox()
#print 'X ' + str(root.winfo_rootx())
#print 'Y ' + str(root.winfo_rooty())
geometry = geometry[0:geometry.rindex('+')] + '+' + str(root.winfo_rooty())
self.geometry(geometry)
self.overrideredirect(1)
self.transient(root)
self.attributes('-alpha', alpha)
self.canvas = Tkinter.Canvas(self)
self.canvas.pack(expand=True, fill=Tkinter.BOTH)
self.canvas.name = 'Draw Canvas'
self.parent = parent
self.dnd_accept = parent.dnd_accept
# The factory function
def dnd_start(source, event, parent):
h = DndHandler(source, event, parent)
#print 'Created new DnD Handler'
if h.root:
return h
else:
return None
# The class that does the work
class DndHandler:
root = None
def __init__(self, source, event, parent):
if event.num > 5:
return
root = event.widget._root()
try:
root.__dnd
return # Don't start recursive dnd
except AttributeError:
root.__dnd = self
self.root = root
self.source = source
self.target = None
self.initial_button = button = event.num
self.initial_widget = widget = event.widget
self.source_widget = source.parent.scrolled_frame
self.release_pattern = "<B%d-ButtonRelease-%d>" % (button, button)
self.save_cursor = widget['cursor'] or ""
widget.dnd_bind(self.release_pattern, self.on_release, self.on_motion, "hand2")
self.parent = parent
#self.draw_canvas = DrawCanvas(self.root.winfo_toplevel(), self.root.winfo_toplevel().winfo_geometry(), parent)
self.draw_canvas = DrawCanvas(self.parent.root, self.parent.root.winfo_geometry(), self.parent)
def __del__(self):
root = self.root
self.root = None
if root:
try:
del root.__dnd
except AttributeError:
pass
def on_motion(self, event):
#print 'On motion'
x, y = event.x_root, event.y_root
target_widget = self.source_widget.winfo_containing(x, y)
source = self.source
def on_motion(self, event):
#print 'On motion'
x, y = event.x_root, event.y_root
#target_widget = self.initial_widget.winfo_containing(x, y)
#self.parent.dnd_lift()
#target_widget = self.source_widget.winfo_containing(x, y)
source = self.source
target_widget = self.parent.dnd_lift(x, y)
if target_widget == None:
self.parent.dnd_enter(source, event, self.draw_canvas.canvas, self.draw_canvas)
self.target = self.parent
return
#print 'Target widget class: ' + target_widget.winfo_class()
new_target = None
while target_widget:
try:
attr = target_widget.dnd_accept
except AttributeError:
pass
else:
new_target = attr(source, event)
target_widget = target_widget.master
old_target = self.target
if old_target:
pass #print 'Old Target: ' + old_target.name
if new_target:
pass #print 'New target: ' + new_target.name
if old_target is new_target:
if old_target:
old_target.dnd_motion(source, event, self.draw_canvas.canvas)
else:
if old_target:
#print 'On Leave'
self.target = None
old_target.dnd_leave(source, event, self.draw_canvas.canvas)
source.moved = True
if new_target:
#print 'On Enter'
new_target.dnd_enter(source, event, self.draw_canvas.canvas, self.draw_canvas)
self.target = new_target
#print 'Finished On motion\n'
def on_release(self, event):
self.finish(event, 1)
def cancel(self, event=None):
self.finish(event, 0)
def finish(self, event, commit=0):
x, y = event.x, event.y
target = self.target
source = self.source
widget = self.initial_widget
root = self.root
try:
del root.__dnd
self.initial_widget.unbind(self.release_pattern)
self.initial_widget.unbind("<Motion>")
widget['cursor'] = self.save_cursor
self.target = self.source = self.initial_widget = self.root = None
if target:
if commit:
target.dnd_commit(source, event, self.draw_canvas.canvas)
else:
target.dnd_leave(source, event, self.draw_canvas.canvas)
finally:
source.dnd_end(target, event)
self.draw_canvas.canvas.delete(source.id)
self.draw_canvas.destroy()
class IconLabel(ttk.Frame):
def __init__(self, parent, text, borderwidth, relief, icon_fname, root):
ttk.Frame.__init__(self, parent, borderwidth=borderwidth, relief=relief)
self.label_text = Tkinter.StringVar(value=text)
#max_word_length = self.longest_word_length(text)
# If the length of the text is greater than twice then truncate it
if len(text) > 12:
text = text[0:12] + '...'
self.label = ttk.Label(self, text=text, wraplength=60)
image = Image.open(icon_fname)
#icon = ImageTk.PhotoImage(image=image, master=root)
icon = ImageTk.PhotoImage(image=image)
#print 'Icon is: ' + icon_fname
self.button = ttk.Button(master=self, image=icon)
self.image = icon
self.button.config(state=Tkinter.DISABLED)
self.release_pattern = None
self.btn_press_pattern = None
self.hint_text = self.label_text
self.label.hint_text = self.hint_text
self.button.hint_text = self.hint_text
def longest_word_length(self, text):
words = text.split(' ')
max_length = 0
for word in words:
if len(word) > max_length:
max_length = word
return max_length
def set_grid(self, row, column):
self.grid(row=row, column=column, padx=2, pady=2)
self.button.grid(row=0)
self.label.grid(row=1)
def unset_grid(self):
self.button.grid_forget()
self.label.grid_forget()
self.grid_forget()
def btn_press_bind(self, btn_press_pattern, on_press):
self.bind(btn_press_pattern, on_press)
#self.label.bind(btn_press_pattern, on_press)
#self.button.bind(btn_press_pattern, on_press)
self.label.bind(btn_press_pattern, self.gen_on_btn_press)
self.button.bind(btn_press_pattern, self.gen_on_btn_press)
self.btn_press_pattern = btn_press_pattern
def dnd_bind(self, release_pattern, on_release, on_motion, cursor_icon):
#print release_pattern
self.bind(release_pattern, on_release)
self.bind("<Motion>", on_motion)
self.label.bind(release_pattern, self.gen_on_release)
self.label.bind("<Motion>", self.gen_on_motion)
self.button.bind(release_pattern, self.gen_on_release)
self.button.bind("<Motion>", self.gen_on_motion)
self['cursor'] = cursor_icon
self.release_pattern = release_pattern
def gen_on_btn_press(self, event):
#print 'gen on btn press'
self.event_generate(self.btn_press_pattern, button=event.num, when='tail')
def gen_on_release(self, event):
#print 'gen on release'
self.event_generate(self.release_pattern, when='tail')
def gen_on_motion(self, event):
#print 'gen on motion'
self.event_generate("<Motion>", x=event.x, y=event.y, when='tail')
def create_window(self, canvas, x, y):
#print 'Create window'
self.button.grid(row=0)
self.label.grid(row=1)
id = canvas.create_window(x, y, window=self)
return id
def get_label_text(self):
return self.label_text.get()
# ----------------------------------------------------------------------
# The rest is here for testing and demonstration purposes only!
class InteractiveFile:
def __init__(self, name, icon_fname, root=None):
self.name = name
self.icon_fname = icon_fname
#self.canvas = self.label = self.id = self.parent = None
self.scrolled_frame = self.label = self.id = self.parent = None
self.moved = False
self.root = root
def attach(self, parent, x=10, y=10, grid_all=False):
if parent.scrolled_frame is self.scrolled_frame:
#self.canvas.coords(self.id, x, y)
return
if self.scrolled_frame:
self.detach()
if not parent.scrolled_frame:
return
label = IconLabel(parent.scrolled_frame.interior(), text=self.name,
borderwidth=2, relief='flat', icon_fname=self.icon_fname, root=self.root)
id = None
#label.grid(row=self.row, column=self.column)
self.scrolled_frame = parent.scrolled_frame
#self.canvas = parent.canvas
self.label = label
self.id = id
self.parent = parent
label.btn_press_bind("<ButtonPress>", self.press)
parent.attach(window=label)
def detach(self):
self.parent.detach(self.label)
#canvas = self.canvas
#if not canvas:
#return
scrolled_frame = self.scrolled_frame
if not scrolled_frame:
return
id = self.id
label = self.label
#self.canvas = self.label = self.id = self.scrolled_frame = None
self.label = self.id = self.scrolled_frame = self.parent = None
#canvas.delete(id)
label.destroy()
def press(self, event):
h = dnd_start(self, event, self.parent)
if h:
# where the pointer is relative to the label widget:
self.x_off = event.x
self.y_off = event.y
# where the widget is relative to the canvas:
#self.x_orig, self.y_orig = self.canvas.coords(self.id)
self.x_orig, self.y_orig = self.label.winfo_rootx(), self.label.winfo_rooty()
def move(self, event, canvas):
x, y = self.where(canvas, event)
canvas.coords(self.id, x, y)
def putback(self, canvas):
canvas.coords(self.id, self.x_orig, self.y_orig)
def where(self, canvas, event):
# where the corner of the canvas is relative to the screen:
x_org = canvas.winfo_rootx()
y_org = canvas.winfo_rooty()
# where the pointer is relative to the canvas widget:
x = event.x_root - x_org
y = event.y_root - y_org
# compensate for initial pointer offset
return x - self.x_off, y - self.y_off
def dnd_end(self, target, event):
pass
class DnDFilePane:
def __init__(self, root, pane_name):
#self.top = Tkinter.Toplevel(root)
self.top = ttk.LabelFrame(root, borderwidth=1, text=pane_name, relief='sunken')
self.name = pane_name
#self.top.dnd_accept = self.dnd_accept
#self.canvas_xscroll = ttk.Scrollbar(self.top, orient=Tkinter.HORIZONTAL)
#self.canvas_yscroll = ttk.Scrollbar(self.top, orient=Tkinter.VERTICAL)
self.scrolled_frame = MyPmwScrolledFrame.ScrolledFrame(self.top, horizflex='expand', vertflex='expand')
self.scrolled_frame.interior().config(borderwidth=1, relief='sunken')
#self.canvas = Tkinter.Canvas(self.scrolled_frame.interior())
#self.canvas.config(xscrollcommand=self.canvas_xscroll.set, yscrollcommand=self.canvas_yscroll.set)
#self.canvas_xscroll.config(command=self.canvas.xview)
#self.canvas_yscroll.config(command=self.canvas.yview)
#self.id = self.canvas.create_window(10, 10, window=self.top)
#self.canvas.pack(fill="both", expand=1)
#self.canvas.dnd_accept = self.dnd_accept
self.scrolled_frame.master = None
self.scrolled_frame.interior().root = self
self.scrolled_frame.component('hull').root = self
self.scrolled_frame.dnd_accept = self.dnd_accept
self.scrolled_frame.component('hull').dnd_accept = self.dnd_accept
self.scrolled_frame.interior().dnd_accept = self.dnd_accept
self.row = -1
self.column = -1
self.padx = -1
self.pady = -1
self.sticky = None
self.root = root
self.moved = False
self.children = list()
self.current_width = 0
def dnd_lift(self, x, y):
parent = self.root.winfo_toplevel()
#print '\n'
#print 'Parent: ' + parent.winfo_class()
find = self.dnd_find(parent, x, y)
if not find:
pass
#print 'Target not found'
else:
pass
#print 'Target found: ' + find.winfo_class()
#print '\n'
return find
def dnd_find(self, target_candidate, x, y):
#print 'Target: ' + target_candidate.winfo_class()
if target_candidate.winfo_class() != 'ScrolledFrame':
children = target_candidate.winfo_children()
for child in children:
#print 'Calling find'
find = self.dnd_find(child, x, y)
#print 'Return from find'
if find:
return find
# If the target_candidate is of the same type as the target type
# then determine if it is the actual target
try:
x1, y1, x2, y2 = target_candidate.bbox()
except Tkinter.TclError as tcle:
#print 'TclError: ' + str(tcle)
return None
#x += target_candidate.winfo_rootx()
x1 += target_candidate.winfo_rootx()
x2 += target_candidate.winfo_rootx()
#y += target_candidate.winfo_rooty()
y1 += target_candidate.winfo_rooty()
y2 += target_candidate.winfo_rooty()
#print 'x1 = ' + str(x1) + ' x2 = ' + str(x2) + ' y1 = ' + str(y1) + ' y2 = ' + str(y2)
#print 'x = ' + str(x) + ' y = ' + str(y)
if x >= x1 and x <= x2 and y >= y1 and y <= y2:
return target_candidate
return None
def dnd_accept(self, source, event):
return self
def dnd_enter(self, source, event, canvas, root):
#print 'Dnd Enter'
#canvas.focus_set() # Show highlight border
source.scrolled_frame.interior().focus_set()
x, y = source.where(canvas, event)
if source.id == None:
label = IconLabel(canvas, text=source.name,
borderwidth=2, relief='raised',
icon_fname=source.icon_fname, root=root)
#id = canvas.create_window(x, y, window=label)
id = label.create_window(canvas, x, y)
source.id = id
x1, y1, x2, y2 = canvas.bbox(source.id)
dx, dy = x2-x1, y2-y1
#print 'dx ' + str(dx)
#print 'dy ' + str(dy)
#self.dndid = canvas.create_rectangle(x, y, x+dx, y+dy)
self.dnd_motion(source, event, canvas)
def dnd_motion(self, source, event, canvas):
#print 'Dnd motion'
x, y = source.where(canvas, event)
#x1, y1, x2, y2 = canvas.bbox(self.dndid)
#print source.id
x1, y1, x2, y2 = canvas.bbox(source.id)
#canvas.move(self.dndid, x-x1, y-y1)
if source.moved:
x_center = source.label.winfo_width()
y_center = source.label.winfo_height()
x1 -= int(float(x_center)/2.0)
y1 -= int(float(y_center)/2.0)
source.moved = False
canvas.move(source.id, x-x1, y-y1)
def dnd_leave(self, source, event, canvas):
#print 'Dnd leave'
self.top.focus_set() # Hide highlight border
canvas.delete(source.id)
source.id = None
def dnd_commit(self, source, event, canvas):
#print 'Dnd commit'
self.dnd_leave(source, event, canvas)
x, y = source.where(canvas, event)
source.attach(self, x, y, True)
def set_grid(self, row, column, sticky, padx, pady):
self.row = row
self.column = column
self.sticky = sticky
self.padx = padx
self.pady = pady
self.regrid()
def attach(self, window):
self.children.append(window)
self.regrid()
def detach(self, window):
#window.grid_forget()
window.unset_grid()
self.children.remove(window)
self.regrid()
def regrid(self):
if self.row == 0:
self.top.pack(expand=True, fill=Tkinter.BOTH, side=Tkinter.LEFT)
else:
self.top.pack(expand=True, fill=Tkinter.BOTH, side=Tkinter.RIGHT)
#self.scrolled_frame.place(relx=0.0, rely=0.0, relwidth=1.0, relheight=1.0)
self.scrolled_frame.pack(expand=True, fill=Tkinter.BOTH)
# Re-grid children
children = self.children
current_row = 0
current_column = 0
self.current_width = 0
for child in children:
child.set_grid(row=current_row, column=current_column)
current_column += 1
window_width = self.scrolled_frame.component('hull').winfo_width()
child_width = child.winfo_width()
#print 'Child width=' + str(child_width) + ', Interior width=' + str(window_width)
#print self.scrolled_frame.components()
self.current_width += child_width
if self.current_width > window_width:
current_column = 0
current_row += 1
self.current_width = 0
self.refresh()
def ungrid(self):
children = self.children
for child in children:
child.label.grid_forget()
child.grid_forget()
self.scrolled_frame.pack_forget()
self.top.pack_forget()
def resize(self, width, height):
self.top.grid_remove()
self.regrid()
def update_dims(self):
self.width = self.top.winfo_width()
self.height = self.top.winfo_height()
def refresh(self):
#self.scrolled_frame.component('hull').update()
self.scrolled_frame.component('horizscrollbar').propagate()
self.scrolled_frame.component('vertscrollbar').propagate()
def get_children_labels(self):
children_labels = list()
for child in self.children:
children_labels.append(child.get_label_text())
return children_labels
def clear(self):
self.ungrid()
self.children = list()
class MainContainer:
def __init__(self, main, left_frame, right_frame, finish_creating_plot_gui):
self.main = main
self.left_frame = left_frame
self.right_frame = right_frame
self.width = main.winfo_width()
self.height = main.winfo_height()
self.release_pattern = None
self.on_button = False
self.finish_creating_plot_gui = finish_creating_plot_gui
self.hint_label = None
#self.main.bind('<Motion>', self.on_label_hint)
#self.draw_canvas = DrawCanvas(self.main, self.main.winfo_geometry(), self, alpha=0.0)
def on_resize(self, event):
width = event.width
height = event.height
if isinstance(event.widget, Tkinter.Tk):
#print 'Event resize: ' + event.widget.winfo_class() + ' ' + str(event.num)
self.left_frame.regrid()
self.right_frame.regrid()
def on_press(self, event):
self.on_button = True
self.release_pattern = "<B%d-ButtonRelease-%d>" % (event.num, event.num)
self.main.bind(self.release_pattern, self.on_release)
def on_release(self, event):
self.on_button = False
def clear_windows(self):
self.main.withdraw()
self.left_frame.ungrid()
self.right_frame.ungrid()
def on_continue(self):
self.clear_windows()
# TODO: Iterate through all the items in RIGHT pane to get the plots the user wants and then pass them
# back to the caller
self.finish_creating_plot_gui(self.right_frame.get_children_labels())
def on_close(self):
self.clear_windows()
self.finish_creating_plot_gui(list())
def on_label_hint(self, event):
x, y = event.x_root, event.y_root
target_widget = self.dnd_lift(x, y)
if isinstance(target_widget, ttk.Label):
print 'The underlying widget is a ttk.Label'
elif isinstance(target_widget, ttk.Button):
print 'The underlying widget is a ttk.Button'
elif isinstance(target_widget, ttk.LabelFrame):
print 'The underlying widget is a ttk.LabelFrame'
elif isinstance(target_widget, ttk.Frame):
print 'The underlying widget is a ttk.Frame'
else:
print 'The underlying widget is of an unknown widget type'
label_text = 'Graph file'
try:
label_text = target_widget.hint_text
except AttributeError:
print 'Could not find hint label'
if self.hint_label != None:
label = self.hint_label
label.destroy()
self.hint_label = None
self.hint_label = ttk.Label(target_widget, text=label_text)
self.id = self.draw_canvas.canvas.create_window(x, y, window=self.hint_label)
print 'Created hint text'
def dnd_lift(self, x, y):
parent = self.main.winfo_toplevel()
#print '\n'
#print 'Parent: ' + parent.winfo_class()
find = self.dnd_find(parent, x, y)
if not find:
pass
#print 'Target not found'
else:
pass
#print 'Target found: ' + find.winfo_class()
#print '\n'
return find
def dnd_find(self, target_candidate, x, y):
#print 'Target: ' + target_candidate.winfo_class()
if target_candidate.winfo_class() != 'TopLevel':
children = target_candidate.winfo_children()
for child in children:
#print 'Calling find'
find = self.dnd_find(child, x, y)
#print 'Return from find'
if find:
return find
# If the target_candidate is of the same type as the target type
# then determine if it is the actual target
try:
x1, y1, x2, y2 = target_candidate.bbox()
except Tkinter.TclError as tcle:
#print 'TclError: ' + str(tcle)
return None
#x += target_candidate.winfo_rootx()
x1 += target_candidate.winfo_rootx()
x2 += target_candidate.winfo_rootx()
#y += target_candidate.winfo_rooty()
y1 += target_candidate.winfo_rooty()
y2 += target_candidate.winfo_rooty()
#print 'x1 = ' + str(x1) + ' x2 = ' + str(x2) + ' y1 = ' + str(y1) + ' y2 = ' + str(y2)
#print 'x = ' + str(x) + ' y = ' + str(y)
if x >= x1 and x <= x2 and y >= y1 and y <= y2:
return target_candidate
return None
def dnd_accept(self):
pass
def printChildren(parent, depth=0):
#print 'Widget ' + parent.winfo_class() + ' width = ' + str(parent.winfo_width()) + ' height = ' + str(parent.winfo_height())
if depth < 100:
children = parent.winfo_children()
for child in children:
printChildren(child, depth)
def createFrame(parent=None, plot_title=None, graphs=list(), finish_creating_plot_gui=None):
main = parent
if main == None:
main = Tkinter.Tk()
else:
main = Tkinter.Toplevel(parent)
if plot_title == None:
main.title('Graphs')
else:
main.title(plot_title)
btn_frame_outer = ttk.Frame(main)
btn_frame_outer.pack(expand=True, fill=Tkinter.X, side=Tkinter.BOTTOM)
btn_frame_inner = ttk.Frame(btn_frame_outer)
btn_frame_inner.pack(side=Tkinter.RIGHT)
left_frame = DnDFilePane(main, pane_name='Output Graphs')
left_frame.set_grid(row=0, column=0, sticky=Tkinter.E + Tkinter.W + Tkinter.N + Tkinter.S, padx=2, pady=2)
right_frame = DnDFilePane(main, pane_name='Graphs To Display')
right_frame.set_grid(row=0, column=1, sticky=Tkinter.E + Tkinter.W + Tkinter.N + Tkinter.S, padx=2, pady=2)
filename = 'file_icon.png'
for graph in graphs:
interactive_file = InteractiveFile(name=graph, icon_fname=filename, root=main)
interactive_file.attach(left_frame)
left_frame_children = left_frame.scrolled_frame.interior().winfo_children()
child_index = 0
for child in left_frame_children:
#print 'Child ' + str(child_index) + ' width: ' + str(child.winfo_reqwidth())
#print 'Child ' + str(child_index) + ' height: ' + str(child.winfo_reqheight())
child_index += 1
#main.minsize(width=10, height=10)
#main.maxsize(width=100, height=100)
main_container = MainContainer(left_frame=left_frame, right_frame=right_frame, main=main, finish_creating_plot_gui=finish_creating_plot_gui)
main.bind('<ButtonPress>', main_container.on_press)
main.bind('<Configure>', main_container.on_resize)
ok_btn = ttk.Button(btn_frame_inner, text='OK', command=main_container.on_continue)
cancel_btn = ttk.Button(btn_frame_inner, text='Cancel', command=main_container.on_close)
cancel_btn.grid(row=1, column=0, sticky=Tkinter.E + Tkinter.W + Tkinter.N + Tkinter.S, padx=2, pady=2)
ok_btn.grid(row=1, column=1, sticky=Tkinter.E + Tkinter.W + Tkinter.N + Tkinter.S, padx=2, pady=2)
main.grid()
return main
def main(argv):
#printChildren(main)
#sample_args = list()
#for i in range(20):
#sample_args.append('File ' + str(i))
#main = createFrame(None, sample_args)
#main.mainloop()
main = Tkinter.Tk()
sample_args = list()
for i in range(20):
sample_args.append('File ' + str(i))
main.grid()
main_frame = createFrame(parent=main, graphs=sample_args)
main.mainloop()
if __name__ == '__main__':
main(sys.argv) | gpl-2.0 | 8,886,849,832,114,287,000 | 34.731513 | 144 | 0.614258 | false |
STIXProject/python-stix | stix/ttp/behavior.py | 1 | 2251 | # Copyright (c) 2017, The MITRE Corporation. All rights reserved.
# See LICENSE.txt for complete terms.
# mixbox
from mixbox import fields
# stix
import stix
import stix.bindings.ttp as ttp_binding
from .malware_instance import MalwareInstance, MalwareInstanceFactory
from .exploit import Exploit
from .attack_pattern import AttackPattern
class Behavior(stix.Entity):
_binding = ttp_binding
_binding_class = _binding.BehaviorType
_namespace = "http://stix.mitre.org/TTP-1"
malware_instances = fields.TypedField("Malware", type_="stix.ttp.behavior.MalwareInstances", key_name="malware_instances")
attack_patterns = fields.TypedField("Attack_Patterns", type_="stix.ttp.behavior.AttackPatterns")
exploits = fields.TypedField("Exploits", type_="stix.ttp.behavior.Exploits")
def __init__(self, malware_instances=None, attack_patterns=None, exploits=None):
super(Behavior, self).__init__()
self.malware_instances = malware_instances or MalwareInstances()
self.attack_patterns = attack_patterns or AttackPatterns()
self.exploits = exploits or Exploits()
def add_malware_instance(self, malware):
self.malware_instances.append(malware)
def add_attack_pattern(self, attack_pattern):
self.attack_patterns.append(attack_pattern)
def add_exploit(self, exploit):
self.exploits.append(exploit)
class Exploits(stix.EntityList):
_namespace = "http://stix.mitre.org/TTP-1"
_contained_type = Exploit
_binding = ttp_binding
_binding_class = _binding.ExploitsType
exploit = fields.TypedField("Exploit", Exploit, multiple=True, key_name="exploits")
class MalwareInstances(stix.EntityList):
_namespace = "http://stix.mitre.org/TTP-1"
_binding = ttp_binding
_binding_class = _binding.MalwareType
malware_instance = fields.TypedField("Malware_Instance", MalwareInstance, multiple=True, factory=MalwareInstanceFactory, key_name="malware_instances")
class AttackPatterns(stix.EntityList):
_namespace = "http://stix.mitre.org/TTP-1"
_binding = ttp_binding
_binding_class = _binding.AttackPatternsType
attack_pattern = fields.TypedField("Attack_Pattern", AttackPattern, multiple=True, key_name="attack_patterns")
| bsd-3-clause | 6,403,979,115,881,606,000 | 34.730159 | 154 | 0.727677 | false |
balopat/pyquil | pyquil/quilbase.py | 1 | 22078 | #!/usr/bin/python
##############################################################################
# Copyright 2016-2017 Rigetti Computing
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##############################################################################
"""
Contains the core pyQuil objects that correspond to Quil instructions.
"""
import numpy as np
from copy import deepcopy
from pyquil.quil_atom import QuilAtom
from pyquil.slot import Slot
from pyquil.resource_manager import AbstractQubit, DirectQubit, Qubit, \
ResourceManager, check_live_qubit, merge_resource_managers
allow_raw_instructions = True
"""
Allow constructing programs containing raw instructions.
"""
def issubinstance(x, cls):
"""
Checks if class x is an instance or subclass of cls.
"""
return isinstance(x, cls) or issubclass(x.__class__, cls)
# These are the first values to a 2-tuple.
# This indicates all regular Quil instructions except resource management.
ACTION_INSTALL_INSTRUCTION = 0
# These are for resource management.
ACTION_INSTANTIATE_QUBIT = 1
ACTION_RELEASE_QUBIT = 2
def action(type, obj):
return (type, obj)
def action_install(obj):
return action(ACTION_INSTALL_INSTRUCTION, obj)
def format_matrix_element(element):
"""
Formats a parameterized matrix element.
:param element: {int, long, float, complex, str} The parameterized element to format.
"""
if isinstance(element, (int, long, float, complex)):
return format_parameter(element)
elif isinstance(element, str):
return element
else:
assert False, "Invalid matrix element: %r" % element
def format_parameter(element):
"""
Formats a particular parameter.
:param element: {int, float, long, complex, Slot} Formats a parameter for Quil output.
"""
if isinstance(element, (int, float)):
return repr(element)
elif isinstance(element, long):
return repr(element)[0:-1]
elif isinstance(element, complex):
r = element.real
i = element.imag
if i < 0:
return repr(r) + "-" + repr(abs(i)) + "i"
else:
return repr(r) + "+" + repr(i) + "i"
elif isinstance(element, Slot):
return format_parameter(element.value())
assert False, "Invalid parameter: %r" % element
class Addr(QuilAtom):
"""
Representation of a classical bit address.
:param int value: The classical address.
"""
def __init__(self, value):
if not isinstance(value, int) or value < 0:
raise TypeError("Addr value must be a non-negative int")
self.address = value
def __repr__(self):
return "<Addr {0}>".format(self.address)
def __str__(self):
return "[{0}]".format(self.address)
class Label(QuilAtom):
"""
Representation of a label.
:param string label_name: The label name.
"""
def __init__(self, label_name):
self.name = label_name
def __repr__(self):
return "<Label {0}>".format(repr(self.name))
def __str__(self):
return "@" + self.name
class QuilAction(object):
"""
Representation of some executable code, i.e., something that can be
synthesized into final Quil instructions.
"""
def synthesize(self, resource_manager=None):
raise NotImplementedError()
class AbstractInstruction(QuilAction):
"""
Abstract class for representing single instructionos.
"""
def synthesize(self, resource_manager=None):
return [self]
def out(self):
return NotImplementedError()
class DefGate(AbstractInstruction):
"""
A DEFGATE directive.
:param string name: The name of the newly defined gate.
:param array-like matrix: {list, nparray, np.matrix} The matrix defining this gate.
"""
def __init__(self, name, matrix):
assert isinstance(name, str)
assert isinstance(matrix, (list, np.ndarray, np.matrix))
if isinstance(matrix, list):
rows = len(matrix)
assert all([len(row) == rows for row in matrix]), "Matrix must be square."
elif isinstance(matrix, (np.ndarray, np.matrix)):
rows, cols = matrix.shape
assert rows == cols, "Matrix must be square."
else:
raise TypeError("Matrix argument must be a list or NumPy array/matrix")
if 0 != rows & (rows - 1):
raise AssertionError("Dimension of matrix must be a power of 2, got {0}"
.format(rows))
self.name = name
self.matrix = np.asarray(matrix)
is_unitary = np.allclose(np.eye(rows), self.matrix.dot(self.matrix.T.conj()))
if not is_unitary:
raise AssertionError("Matrix must be unitary.")
def out(self):
"""
Prints a readable Quil string representation of this gate.
:returns: String representation of a gate
:rtype: string
"""
result = "DEFGATE %s:\n" % (self.name)
for row in self.matrix:
result += " "
fcols = [format_matrix_element(col) for col in row]
result += ", ".join(fcols)
result += "\n"
return result
def get_constructor(self):
"""
:returns: A function that constructs this gate on variable qubit indices. E.g.
`mygate.get_constructor()(1) applies the gate to qubit 1.`
"""
return lambda *qubits: Gate(name=self.name, params=[], qubits=qubits)
def num_args(self):
"""
:return: The number of qubit arguments the gate takes.
:rtype: int
"""
rows = len(self.matrix)
return int(np.log2(rows))
class InstructionGroup(QuilAction):
"""
Representation of a sequence of instructions that can be synthesized into a Quil program.
"""
def __init__(self, resource_manager=None):
self.actions = []
if resource_manager is None:
self.resource_manager = ResourceManager()
else:
self.resource_manager = resource_manager
def synthesize(self, resource_manager=None):
synthesized = []
for action_type, obj in self.actions:
if action_type == ACTION_INSTALL_INSTRUCTION:
synthesized.extend(obj.synthesize(self.resource_manager))
elif action_type == ACTION_INSTANTIATE_QUBIT:
self.resource_manager.instantiate(obj)
elif action_type == ACTION_RELEASE_QUBIT:
self.resource_manager.uninstantiate_index(obj.assignment)
else:
raise RuntimeError("encountered invalid action")
return synthesized
def __str__(self):
return self.out()
def out(self):
instrs = self.synthesize()
s = ""
for instr in instrs:
s += instr.out() + "\n"
return s
def alloc(self):
"""
Get a new qubit.
:return: A qubit.
:rtype: Qubit
"""
qubit = self.resource_manager.allocate_qubit()
self.actions.append(action(ACTION_INSTANTIATE_QUBIT, qubit))
return qubit
def free(self, qubit):
"""
Free a qubit.
:param AbstractQubit q: An AbstractQubit instance.
"""
check_live_qubit(qubit)
if qubit.resource_manager != self.resource_manager:
raise RuntimeError("qubit is managed by a different instruction group")
self.actions.append(action(ACTION_RELEASE_QUBIT, qubit))
self.resource_manager.free_qubit(qubit)
def inst(self, *instructions):
"""
Mutates the Program object by appending new instructions.
:param instructions: A list of Instruction objects, e.g. Gates
:return: self
"""
for instruction in instructions:
if isinstance(instruction, list):
self.inst(*instruction)
elif isinstance(instruction, tuple):
if len(instruction) == 0:
raise ValueError("tuple should have at least one element")
elif len(instruction) == 1:
self.actions.append(action_install(Instr(instruction[0], [], [])))
else:
op = instruction[0]
params = []
possible_params = instruction[1]
rest = instruction[2:]
if isinstance(possible_params, list):
params = possible_params
else:
rest = [possible_params] + list(rest)
self.actions.append(action_install(Instr(op, params, rest)))
elif isinstance(instruction, str):
self.actions.append(action_install(RawInstr(instruction)))
elif issubinstance(instruction, QuilAction):
self.actions.append(action_install(instruction))
elif issubinstance(instruction, InstructionGroup):
self.resource_manager = merge_resource_managers(self.resource_manager,
instruction.resource_manager)
self.actions.extend(list(instruction.actions))
else:
raise TypeError("Invalid instruction: {}".format(instruction))
# Return self for method chaining.
return self
def __add__(self, instruction):
p = deepcopy(self)
return p.inst(instruction)
def pop(self):
"""
Pops off the last instruction.
:return: The (action, instruction) pair for the instruction that was popped.
:rtype: tuple
"""
if 0 != len(self.actions):
return self.actions.pop()
def extract_qubits(self):
"""
Return all qubit addresses involved in the instruction group.
:return: Set of qubits.
:rtype: set
"""
qubits = set()
for jj, act_jj in self.actions:
if jj == ACTION_INSTALL_INSTRUCTION:
if isinstance(act_jj, Instr):
qubits = qubits | act_jj.qubits()
elif isinstance(act_jj, If):
qubits = qubits | act_jj.Then.extract_qubits() | act_jj.Else.extract_qubits()
elif isinstance(act_jj, While):
qubits = qubits | act_jj.Body.extract_qubits()
elif isinstance(act_jj, InstructionGroup):
qubits = qubits | act_jj.extract_qubits()
elif isinstance(act_jj, (JumpTarget, JumpConditional, SimpleInstruction,
UnaryClassicalInstruction, BinaryClassicalInstruction,
Jump)):
continue
else:
raise ValueError(type(act_jj))
elif jj in (ACTION_INSTANTIATE_QUBIT, ACTION_RELEASE_QUBIT):
continue
return qubits
class JumpTarget(AbstractInstruction):
"""
Representation of a target that can be jumped to.
"""
def __init__(self, label):
if not isinstance(label, Label):
raise TypeError("label must be a Label")
self.label = label
def __repr__(self):
return "<JumpTarget {0}>".format(str(self.label))
def out(self):
return "LABEL {0}".format(str(self.label))
class JumpConditional(AbstractInstruction):
"""
Abstract representation of an conditional jump instruction.
"""
def __init__(self, target, condition):
if not isinstance(target, Label):
raise TypeError("target should be a Label")
if not isinstance(condition, Addr):
raise TypeError("condition should be an Addr")
self.target = target
self.condition = condition
def __str__(self):
return self.out()
def out(self):
return "%s %s %s" % (self.op, self.target, self.condition)
class JumpWhen(JumpConditional):
"""
The JUMP-WHEN instruction.
"""
op = "JUMP-WHEN"
class JumpUnless(JumpConditional):
"""
The JUMP-UNLESS instruction.
"""
op = "JUMP-UNLESS"
class SimpleInstruction(AbstractInstruction):
"""
Abstract class for simple instructions with no arguments.
"""
def __str__(self):
return self.out()
def out(self):
return self.op
class Halt(SimpleInstruction):
"""
The HALT instruction.
"""
op = "HALT"
class Wait(SimpleInstruction):
"""
The WAIT instruction.
"""
op = "WAIT"
class Reset(SimpleInstruction):
"""
The RESET instruction.
"""
op = "RESET"
class Nop(SimpleInstruction):
"""
The RESET instruction.
"""
op = "NOP"
class UnaryClassicalInstruction(AbstractInstruction):
"""
The abstract class for unary classical instructions.
"""
def __init__(self, target):
if not isinstance(target, Addr):
raise TypeError("target operand should be an Addr")
self.target = target
def __str__(self):
return self.out()
def out(self):
return "%s %s" % (self.op, self.target)
class ClassicalTrue(UnaryClassicalInstruction):
op = "TRUE"
class ClassicalFalse(UnaryClassicalInstruction):
op = "FALSE"
class ClassicalNot(UnaryClassicalInstruction):
op = "NOT"
class BinaryClassicalInstruction(AbstractInstruction):
"""
The abstract class for binary classical instructions.
"""
def __init__(self, left, right):
if not isinstance(left, Addr):
raise TypeError("left operand should be an Addr")
if not isinstance(right, Addr):
raise TypeError("right operand should be an Addr")
self.left = left
self.right = right
def __str__(self):
return self.out()
def out(self):
return "%s %s %s" % (self.op, self.left, self.right)
class ClassicalAnd(BinaryClassicalInstruction):
op = "AND"
class ClassicalOr(BinaryClassicalInstruction):
op = "OR"
class ClassicalMove(BinaryClassicalInstruction):
op = "MOVE"
class ClassicalExchange(BinaryClassicalInstruction):
op = "EXCHANGE"
class Jump(AbstractInstruction):
"""
Representation of an unconditional jump instruction (JUMP).
"""
def __init__(self, target):
if not isinstance(target, Label):
raise TypeError("target should be a Label")
self.target = target
def __str__(self):
return self.out()
def out(self):
return "JUMP %s" % self.target
label_counter = 0
def reset_label_counter():
global label_counter
label_counter = 0
def gen_label(prefix="L"):
"""
Generate a fresh label.
:param string prefix: An optional prefix for the label name.
:return: A new Label instance.
:rtype: Label
"""
global label_counter
label_counter += 1
return Label(prefix + str(label_counter))
class RawInstr(AbstractInstruction):
"""
A raw instruction represented as a string.
"""
def __init__(self, instr_str):
if not isinstance(instr_str, str):
raise TypeError("Raw instructions require a string.")
if not allow_raw_instructions:
raise RuntimeError("Raw instructions are not allowed. Consider changing"
"the variable `allow_raw_instructions` to `True`.")
self.instr = instr_str
def out(self):
return self.instr
def __repr__(self):
return '<RawInstr>'
def __str__(self):
return self.instr
class Instr(AbstractInstruction):
"""
Representation of an instruction represented by an operator, parameters, and arguments.
"""
def __init__(self, op, params, args):
if not isinstance(op, (str, unicode)):
raise TypeError("op must be a string")
self.operator_name = op
self.parameters = params
self.arguments = []
if 0 != len(args):
if isinstance(args[0], list):
self.parameters = None if 0 == len(args[0]) else args[0]
self.arguments = args[1:]
else:
self.arguments = args
def __str__(self):
return "<Instr {0}>".format(self.operator_name)
def __eq__(self, other):
return self.out() == other.out()
def __hash__(self):
return hash(self.out())
def synthesize(self, resource_manager=None):
if resource_manager is not None:
self.make_qubits_known(resource_manager)
return [self]
def out(self):
def format_params(params):
if not params:
return ""
else:
return "(" + ",".join(map(format_parameter, params)) + ")"
def format_args(args):
if 0 == len(args):
return ""
else:
return " " + " ".join([str(arg) for arg in args])
if self.parameters:
return self.operator_name + format_params(self.parameters) + format_args(self.arguments)
else:
return self.operator_name + format_args(self.arguments)
def make_qubits_known(self, rm):
"""
Make the qubits involved with this instruction known to a ResourceManager.
:param ResourceManager rm: A ResourceManager object.
"""
if not isinstance(rm, ResourceManager):
raise TypeError("rm should be a ResourceManager")
for arg in self.arguments:
if isinstance(arg, DirectQubit):
current = rm.in_use.get(arg.index(), False)
rm.in_use[arg.index()] = arg
# re-instantiate the qubit
if current and isinstance(current, Qubit):
rm.instantiate(current)
def qubits(self):
"""
The qubits this instruction affects.
:return: Set of qubit indexes.
:rtype: set
"""
qubits = set()
for arg in self.arguments:
if issubinstance(arg, AbstractQubit):
qubits.add(arg.index())
return qubits
class Gate(Instr):
"""
This is the pyQuil object for a quantum gate instruction.
"""
def __init__(self, name, params, qubits):
for qubit in qubits:
check_live_qubit(qubit)
super(Gate, self).__init__(name, params, qubits)
def __repr__(self):
return "<Gate: " + self.out() + ">"
def __str__(self):
return self.out()
class Measurement(Instr):
"""
This is the pyQuil object for a Quil measurement instruction.
"""
def __init__(self, qubit, classical_reg=None):
check_live_qubit(qubit)
if classical_reg is None:
args = (qubit,)
elif isinstance(classical_reg, Addr):
args = (qubit, classical_reg)
else:
raise TypeError("classical_reg should be None or an Addr instance")
super(Measurement, self).__init__("MEASURE",
params=None,
args=args)
self.classical_reg = classical_reg
class While(QuilAction):
"""
Representation of a ``while`` construct. To use, initialize with an
address to branch on, and use ``self.Body.inst()`` to add instructions to
the body of the loop.
"""
def __init__(self, condition):
if not isinstance(condition, Addr):
raise TypeError("condition must be an Addr")
super(While, self).__init__()
self.condition = condition
self.Body = InstructionGroup()
def synthesize(self, resource_manager=None):
# WHILE [c]:
# instr...
#
# =>
#
# LABEL @START
# JUMP-UNLESS @END [c]
# instr...
# JUMP @START
# LABEL @END
label_start = gen_label("START")
label_end = gen_label("END")
insts = list()
insts.append(JumpTarget(label_start))
insts.append(JumpUnless(target=label_end, condition=self.condition))
insts.extend(self.Body.synthesize())
insts.append(Jump(target=label_start))
insts.append(JumpTarget(label_end))
return insts
class If(QuilAction):
"""
Representation of an ``if`` construct. To use, initialize with an address
to be branched on, and add instructions to ``self.Then`` and ``self.Else``
for the corresponding branches.
"""
def __init__(self, condition):
if not isinstance(condition, Addr):
raise TypeError("condition must be an Addr")
super(If, self).__init__()
self.condition = condition
self.Then = InstructionGroup()
self.Else = InstructionGroup()
def synthesize(self, resource_manager=None):
# IF [c]:
# instrA...
# ELSE:
# instrB...
#
# =>
#
# JUMP-WHEN @THEN [c]
# instrB...
# JUMP @END
# LABEL @THEN
# instrA...
# LABEL @END
label_then = gen_label("THEN")
label_end = gen_label("END")
insts = list()
insts.append(JumpWhen(target=label_then, condition=self.condition))
insts.extend(self.Else.synthesize())
insts.append(Jump(target=label_end))
insts.append(JumpTarget(label_then))
insts.extend(self.Then.synthesize())
insts.append(JumpTarget(label_end))
return insts
| apache-2.0 | 1,625,788,238,896,269,800 | 27.897906 | 100 | 0.579264 | false |
stackimpact/stackimpact-python | stackimpact/reporters/profile_reporter.py | 1 | 5286 | from __future__ import division
import os
import sys
import time
import threading
import re
import random
from ..runtime import min_version, runtime_info
from ..utils import timestamp
from ..metric import Metric
from ..metric import Breakdown
from ..frame import Frame
class ProfilerConfig(object):
def __init__(self):
self.log_prefix = None
self.max_profile_duration = None
self.max_span_duration = None
self.max_span_count = None
self.span_interval = None
self.report_interval = None
self.report_only = False
class ProfileReporter:
def __init__(self, agent, profiler, config):
self.agent = agent
self.profiler = profiler
self.config = config
self.started = False
self.span_timer = None
self.span_timeout = None
self.random_timer = None
self.report_timer = None
self.profile_start_ts = None
self.profile_duration = None
self.span_count = None
self.span_active = False
self.span_start_ts = None
self.span_trigger = None
def setup(self):
self.profiler.setup()
def start(self):
if not self.profiler.ready:
return
if self.started:
return
self.started = True
self.reset()
if self.agent.get_option('auto_profiling'):
if not self.config.report_only:
def random_delay():
timeout = random.randint(0, round(self.config.span_interval - self.config.max_span_duration))
self.random_timer = self.agent.delay(timeout, self.start_profiling, False, True)
self.span_timer = self.agent.schedule(0, self.config.span_interval, random_delay)
self.report_timer = self.agent.schedule(self.config.report_interval, self.config.report_interval, self.report)
def stop(self):
if not self.started:
return
self.started = False
if self.span_timer:
self.span_timer.cancel()
self.span_timer = None
if self.random_timer:
self.random_timer.cancel()
self.random_timer = None
if self.report_timer:
self.report_timer.cancel()
self.report_timer = None
self.stop_profiling()
def destroy(self):
self.profiler.destroy()
def reset(self):
self.profiler.reset()
self.profile_start_ts = timestamp()
self.profile_duration = 0
self.span_count = 0
self.span_trigger = Metric.TRIGGER_TIMER
def start_profiling(self, api_call, with_timeout):
if not self.started:
return False
if self.profile_duration > self.config.max_profile_duration:
self.agent.log(self.config.log_prefix + ': max profiling duration reached.')
return False
if api_call and self.span_count > self.config.max_span_count:
self.agent.log(self.config.log_prefix + ': max recording count reached.')
return False
if self.agent.profiler_active:
self.agent.log(self.config.log_prefix + ': profiler lock exists.')
return False
self.agent.profiler_active = True
self.agent.log(self.config.log_prefix + ': started.')
try:
self.profiler.start_profiler()
except Exception:
self.agent.profiler_active = False
self.exception()
return False
if with_timeout:
self.span_timeout = self.agent.delay(self.config.max_span_duration, self.stop_profiling)
self.span_count = self.span_count + 1
self.span_active = True
self.span_start_ts = time.time()
if api_call:
self.span_trigger = Metric.TRIGGER_API
return True
def stop_profiling(self):
if not self.span_active:
return
self.span_active = False
try:
self.profile_duration = self.profile_duration + time.time() - self.span_start_ts
self.profiler.stop_profiler()
except Exception:
self.exception()
self.agent.profiler_active = False
if self.span_timeout:
self.span_timeout.cancel()
self.agent.log(self.config.log_prefix + ': stopped.')
def report(self, with_interval=False):
if not self.started:
return
if with_interval:
if self.profile_start_ts > timestamp() - self.config.report_interval:
return
elif self.profile_start_ts < timestamp() - 2 * self.config.report_interval:
self.reset()
return
if not self.config.report_only and self.profile_duration == 0:
return
self.agent.log(self.config.log_prefix + ': reporting profile.')
profile_data = self.profiler.build_profile(self.profile_duration)
for data in profile_data:
metric = Metric(self.agent, Metric.TYPE_PROFILE, data['category'], data['name'], data['unit'])
metric.create_measurement(self.span_trigger, data['profile'].measurement, data['unit_interval'], data['profile'])
self.agent.message_queue.add('metric', metric.to_dict())
self.reset()
| bsd-3-clause | 5,937,886,945,018,756,000 | 27.26738 | 123 | 0.599886 | false |
mcflugen/dakota-experiments | dakota_utils/convert.py | 1 | 1595 | #! /usr/bin/env python
#
# Dakota utility programs for converting output.
#
# Mark Piper ([email protected])
import shutil
from subprocess import check_call, CalledProcessError
from .read import get_names
def has_interface_column(tab_file):
'''
Returns True if the tabular output file has the v6.1 'interface' column.
'''
try:
val = get_names(tab_file)[1] == 'interface'
except IOError:
raise
except TypeError:
return False
else:
return(val)
def strip_interface_column(tab_file):
'''
Strips the 'interface' column from a Dakota 6.1 tabular output file.
'''
try:
bak_file = tab_file + '.orig'
shutil.copyfile(tab_file, bak_file)
cmd = 'cat ' + bak_file +' | colrm 9 18 > ' + tab_file
check_call(cmd, shell=True)
except (IOError, CalledProcessError):
raise
def main():
import argparse
from dakota_utils import __version__, convert_script
parser = argparse.ArgumentParser(
description="Converts a Dakota tabular output file to v6.0 format.")
parser.add_argument("output_file",
help="path to a Dakota v6.1 tabular output file")
parser.add_argument('--version', action='version',
version=convert_script + ' ' + __version__)
args = parser.parse_args()
if has_interface_column(args.output_file) is False:
print('Error: Not a Dakota v6.1 tabular output file.')
return
else:
strip_interface_column(args.output_file)
if __name__ == '__main__':
main()
| mit | 5,199,411,763,182,056,000 | 26.033898 | 76 | 0.618182 | false |
mdales/django-oauth | views.py | 1 | 7170 | from __future__ import absolute_import
import cgi
import logging
import urllib
import urlparse
from django.conf import settings
from django.http import (HttpResponse, HttpResponseRedirect,
HttpResponseNotAllowed, HttpResponseBadRequest)
from django.utils.translation import ugettext as _
from django.contrib.auth.decorators import login_required
from django.core.urlresolvers import get_callable
from .decorators import oauth_required
from .models import Token
from .oauth import OAuthError
from .utils import (initialize_server_request, send_oauth_error,
add_query_params_to_url)
from .stores import check_valid_callback
OAUTH_AUTHORIZE_VIEW = 'OAUTH_AUTHORIZE_VIEW'
OAUTH_CALLBACK_VIEW = 'OAUTH_CALLBACK_VIEW'
OAUTH_AUTHORIZE_CALLBACK = 'OAUTH_AUTHORIZE_CALLBACK'
INVALID_PARAMS_RESPONSE = send_oauth_error(OAuthError(
_('Invalid request parameters.')))
log = logging.getLogger(__name__)
log.setLevel(logging.INFO)
def request_token(request):
"""
The Consumer obtains an unauthorized Request Token by asking the Service
Provider to issue a Token. The Request Token's sole purpose is to receive
User approval and can only be used to obtain an Access Token.
"""
oauth_server, oauth_request = initialize_server_request(request)
if oauth_server is None:
return INVALID_PARAMS_RESPONSE
try:
# create a request token
token = oauth_server.fetch_request_token(oauth_request)
# return the token
d = token.to_dict()
if token.is_1_0a_request:
d['oauth_callback_confirmed'] = 'true'
response = HttpResponse(urllib.urlencode(d), mimetype="text/plain")
except OAuthError, err:
response = send_oauth_error(err)
return response
@login_required
def user_authorization(request):
"""
The Consumer cannot use the Request Token until it has been authorized by
the User.
"""
oauth_server, oauth_request = initialize_server_request(request)
if oauth_request is None:
return INVALID_PARAMS_RESPONSE
try:
# get the request toke/verify
token = oauth_server.fetch_request_token(oauth_request)
except OAuthError, err:
return send_oauth_error(err)
try:
callback = oauth_server.get_callback(oauth_request)
if token.is_1_0a_request:
return HttpResponseBadRequest("Cannot specify oauth_callback at authorization step for 1.0a protocol")
if not check_valid_callback(callback):
return HttpResponseBadRequest("Invalid callback URL")
except OAuthError:
callback = None
if token.is_1_0a_request:
callback = token.callback
if callback == "oob":
callback = None
# entry point for the user
if request.method == 'GET':
# try to get custom authorize view
authorize_view_str = getattr(settings, OAUTH_AUTHORIZE_VIEW,
'django_oauth.views.fake_authorize_view')
try:
authorize_view = get_callable(authorize_view_str)
except AttributeError:
raise Exception, "%s view doesn't exist." % authorize_view_str
params = oauth_request.get_normalized_parameters()
# set the oauth flag
request.session['oauth'] = token.key
return authorize_view(request, token, callback, params)
# user grant access to the service
elif request.method == 'POST':
# verify the oauth flag set in previous GET
if request.session.get('oauth', '') == token.key:
request.session['oauth'] = ''
try:
if request.POST.get('authorize_access') == 'on':
# authorize the token
token = oauth_server.authorize_token(token, request.user)
# let the rest of the django world react if they want
if hasattr(settings, OAUTH_AUTHORIZE_CALLBACK):
get_callable(settings.OAUTH_AUTHORIZE_CALLBACK)(request, token)
# return the token key
args = {'oauth_token': token.key}
if token.verifier:
args['oauth_verifier'] = token.verifier
else:
args = {'error': _('Access not granted by user.')}
except OAuthError, err:
response = send_oauth_error(err)
if callback:
callback = add_query_params_to_url(callback, args)
response = HttpResponseRedirect(callback)
else:
# try to get custom callback view
callback_view_str = getattr(settings, OAUTH_CALLBACK_VIEW,
'django_oauth.views.fake_callback_view')
try:
callback_view = get_callable(callback_view_str)
except AttributeError:
raise Exception, "%s view doesn't exist." % callback_view_str
response = callback_view(request, **args)
else:
response = send_oauth_error(OAuthError(_('Action not allowed.')))
return response
def access_token(request):
"""
The Consumer exchanges the Request Token for an Access Token capable of
accessing the Protected Resources.
"""
oauth_server, oauth_request = initialize_server_request(request)
if oauth_request is None:
return INVALID_PARAMS_RESPONSE
try:
# get the request token
token = oauth_server.fetch_request_token(oauth_request)
except OAuthError, err:
return send_oauth_error(err)
try:
# get the access token
token = oauth_server.fetch_access_token(oauth_request)
# return the token
d = token.to_dict()
response = HttpResponse(urllib.urlencode(d), mimetype="text/plain")
except OAuthError, err:
response = send_oauth_error(err)
return response
@login_required
def revoke_token(request):
if request.method == 'POST':
if 'todelete' in request.POST:
key = request.POST['todelete']
request.user.token_set.filter(key=key).delete()
log.info("OAuth token %s for user %s has been revoked" % (key, request.user))
return HttpResponse('The token has been revoked.')
else:
return HttpResponseNotAllowed(['POST'])
@oauth_required
def protected_resource_example(request):
"""
Test view for accessing a Protected Resource.
"""
return HttpResponse('Protected Resource access!')
@login_required
def fake_authorize_view(request, token, callback, params):
"""
Fake view for tests. It must return an ``HttpResponse``.
You need to define your own in ``settings.OAUTH_AUTHORIZE_VIEW``.
"""
return HttpResponse('Fake authorize view for %s.' % token.consumer.name)
def fake_callback_view(request):
"""
Fake view for tests. It must return an ``HttpResponse``.
You can define your own in ``settings.OAUTH_CALLBACK_VIEW``.
"""
return HttpResponse('Fake callback view.')
| bsd-3-clause | 5,926,247,509,342,452,000 | 36.936508 | 114 | 0.628591 | false |
DayGitH/Python-Challenges | DailyProgrammer/DP20170612A.py | 1 | 1420 | """
[2017-06-12] Challenge #319 [Easy] Condensing Sentences
https://www.reddit.com/r/dailyprogrammer/comments/6grwny/20170612_challenge_319_easy_condensing_sentences/
# Description
Compression makes use of the fact that repeated structures are redundant, and it's more efficient to represent the
pattern and the count or a reference to it. Siimilarly, we can *condense* a sentence by using the redundancy of
overlapping letters from the end of one word and the start of the next. In this manner we can reduce the size of the
sentence, even if we start to lose meaning.
For instance, the phrase "live verses" can be condensed to "liverses".
In this challenge you'll be asked to write a tool to condense sentences.
# Input Description
You'll be given a sentence, one per line, to condense. Condense where you can, but know that you can't condense
everywhere. Example:
I heard the pastor sing live verses easily.
# Output Description
Your program should emit a sentence with the appropriate parts condensed away. Our example:
I heard the pastor sing liverses easily.
# Challenge Input
Deep episodes of Deep Space Nine came on the television only after the news.
Digital alarm clocks scare area children.
# Challenge Output
Deepisodes of Deep Space Nine came on the televisionly after the news.
Digitalarm clockscarea children.
"""
def main():
pass
if __name__ == "__main__":
main()
| mit | 3,155,732,654,619,010,000 | 40.764706 | 116 | 0.762676 | false |
thunderhoser/GewitterGefahr | gewittergefahr/deep_learning/data_augmentation.py | 1 | 10404 | """Augments image dataset by shifting, rotating, or adding Gaussian noise."""
import numpy
from scipy.ndimage.interpolation import rotate as scipy_rotate
from gewittergefahr.gg_utils import error_checking
from gewittergefahr.deep_learning import deep_learning_utils as dl_utils
PADDING_VALUE = 0.
MIN_ABSOLUTE_ROTATION_ANGLE_DEG = 1.
MAX_ABSOLUTE_ROTATION_ANGLE_DEG = 90.
MIN_NOISE_STANDARD_DEVIATION = 1e-9
MAX_NOISE_STANDARD_DEVIATION = 0.25
def get_translations(
num_translations, max_translation_pixels, num_grid_rows,
num_grid_columns):
"""Creates an array of x- and y-translations.
These translations ("offsets") are meant for use in `shift_radar_images`.
N = number of translations
:param num_translations: Number of translations. Image will be translated
in only the x- and y-directions, not the z-direction.
:param max_translation_pixels: Max translation in either direction. Must be
an integer.
:param num_grid_rows: Number of rows in the image.
:param num_grid_columns: Number of columns in the image.
:return: x_offsets_pixels: length-N numpy array of x-translations
(integers).
:return: y_offsets_pixels: length-N numpy array of y-translations
(integers).
"""
error_checking.assert_is_integer(num_translations)
if num_translations == 0:
return numpy.array([], dtype=int), numpy.array([], dtype=int)
error_checking.assert_is_greater(num_translations, 0)
error_checking.assert_is_integer(num_grid_rows)
error_checking.assert_is_geq(num_grid_rows, 2)
error_checking.assert_is_integer(num_grid_columns)
error_checking.assert_is_geq(num_grid_columns, 2)
error_checking.assert_is_integer(max_translation_pixels)
error_checking.assert_is_greater(max_translation_pixels, 0)
smallest_horiz_dimension = min([num_grid_rows, num_grid_columns])
max_max_translation_pixels = int(numpy.floor(
float(smallest_horiz_dimension) / 2
))
error_checking.assert_is_leq(
max_translation_pixels, max_max_translation_pixels)
x_offsets_pixels = numpy.random.randint(
low=-max_translation_pixels, high=max_translation_pixels + 1,
size=num_translations * 4)
y_offsets_pixels = numpy.random.randint(
low=-max_translation_pixels, high=max_translation_pixels + 1,
size=num_translations * 4)
good_indices = numpy.where(
numpy.absolute(x_offsets_pixels) + numpy.absolute(y_offsets_pixels) > 0
)[0]
good_indices = numpy.random.choice(
good_indices, size=num_translations, replace=False)
return x_offsets_pixels[good_indices], y_offsets_pixels[good_indices]
def get_rotations(num_rotations, max_absolute_rotation_angle_deg):
"""Creates an array of rotation angles.
These angles are meant for use in `rotate_radar_images`.
N = number of rotations
:param num_rotations: Number of rotations. Image will be rotated only in
the xy-plane (about the z-axis).
:param max_absolute_rotation_angle_deg: Max absolute rotation angle
(degrees). In general, the image will be rotated both clockwise and
counterclockwise, up to this angle.
:return: ccw_rotation_angles_deg: length-N numpy array of counterclockwise
rotation angles (degrees).
"""
error_checking.assert_is_integer(num_rotations)
if num_rotations == 0:
return numpy.array([], dtype=float)
error_checking.assert_is_greater(num_rotations, 0)
error_checking.assert_is_geq(
max_absolute_rotation_angle_deg, MIN_ABSOLUTE_ROTATION_ANGLE_DEG)
error_checking.assert_is_leq(
max_absolute_rotation_angle_deg, MAX_ABSOLUTE_ROTATION_ANGLE_DEG)
absolute_rotation_angles_deg = numpy.random.uniform(
low=1., high=max_absolute_rotation_angle_deg, size=num_rotations)
possible_signs = numpy.array([-1, 1], dtype=int)
return absolute_rotation_angles_deg * numpy.random.choice(
possible_signs, size=num_rotations, replace=True)
def get_noisings(num_noisings, max_standard_deviation):
"""Creates an array of standard deviations for Gaussian noising.
These standard deviations are meant for use in `noise_radar_images`.
N = number of noisings
:param num_noisings: Number of times to noise the image.
:param max_standard_deviation: Max standard deviation of Gaussian noise.
:return: standard_deviations: length-N numpy array of standard deviations.
"""
error_checking.assert_is_integer(num_noisings)
if num_noisings == 0:
return numpy.array([], dtype=float)
error_checking.assert_is_greater(num_noisings, 0)
error_checking.assert_is_geq(
max_standard_deviation, MIN_NOISE_STANDARD_DEVIATION)
error_checking.assert_is_leq(
max_standard_deviation, MAX_NOISE_STANDARD_DEVIATION)
return numpy.random.uniform(
low=0., high=max_standard_deviation, size=num_noisings)
def shift_radar_images(radar_image_matrix, x_offset_pixels, y_offset_pixels):
"""Shifts each radar image in the x- and y-dimensions.
:param radar_image_matrix: See doc for
`deep_learning_utils.check_radar_images`.
:param x_offset_pixels: Each image will be shifted this many pixels in the
+x-direction.
:param y_offset_pixels: Each image will be shifted this many pixels in the
+y-direction.
:return: shifted_image_matrix: Same as `radar_image_matrix`, but after
shifting. The shapes of the two numpy arrays are the same.
"""
dl_utils.check_radar_images(
radar_image_matrix=radar_image_matrix, min_num_dimensions=3,
max_num_dimensions=5)
num_grid_rows = radar_image_matrix.shape[1]
half_num_grid_rows = int(numpy.floor(
float(num_grid_rows) / 2
))
error_checking.assert_is_integer(y_offset_pixels)
error_checking.assert_is_geq(y_offset_pixels, -half_num_grid_rows)
error_checking.assert_is_leq(y_offset_pixels, half_num_grid_rows)
num_grid_columns = radar_image_matrix.shape[2]
half_num_grid_columns = int(numpy.floor(
float(num_grid_columns) / 2
))
error_checking.assert_is_integer(x_offset_pixels)
error_checking.assert_is_geq(x_offset_pixels, -half_num_grid_columns)
error_checking.assert_is_leq(x_offset_pixels, half_num_grid_columns)
if x_offset_pixels == y_offset_pixels == 0:
return radar_image_matrix + 0.
num_padding_columns_at_left = max([x_offset_pixels, 0])
num_padding_columns_at_right = max([-x_offset_pixels, 0])
num_padding_rows_at_top = max([y_offset_pixels, 0])
num_padding_rows_at_bottom = max([-y_offset_pixels, 0])
pad_width_input_arg = (
(0, 0),
(num_padding_rows_at_top, num_padding_rows_at_bottom),
(num_padding_columns_at_left, num_padding_columns_at_right)
)
num_dimensions = len(radar_image_matrix.shape)
for _ in range(3, num_dimensions):
pad_width_input_arg += ((0, 0),)
shifted_image_matrix = numpy.pad(
radar_image_matrix, pad_width=pad_width_input_arg, mode='constant',
constant_values=PADDING_VALUE)
if x_offset_pixels >= 0:
shifted_image_matrix = shifted_image_matrix[
:, :, :num_grid_columns, ...]
else:
shifted_image_matrix = shifted_image_matrix[
:, :, -num_grid_columns:, ...]
if y_offset_pixels >= 0:
shifted_image_matrix = shifted_image_matrix[:, :num_grid_rows, ...]
else:
shifted_image_matrix = shifted_image_matrix[:, -num_grid_rows:, ...]
return shifted_image_matrix
def rotate_radar_images(radar_image_matrix, ccw_rotation_angle_deg):
"""Rotates each radar image in the xy-plane.
:param radar_image_matrix: See doc for
`deep_learning_utils.check_radar_images`.
:param ccw_rotation_angle_deg: Each image will be rotated counterclockwise
by this angle (degrees).
:return: rotated_image_matrix: Same as `radar_image_matrix`, but after
rotation. The shapes of the two numpy arrays are the same.
"""
dl_utils.check_radar_images(
radar_image_matrix=radar_image_matrix, min_num_dimensions=3,
max_num_dimensions=5)
error_checking.assert_is_geq(ccw_rotation_angle_deg, -180.)
error_checking.assert_is_leq(ccw_rotation_angle_deg, 180.)
return scipy_rotate(
input=radar_image_matrix, angle=-ccw_rotation_angle_deg, axes=(1, 2),
reshape=False, order=1, mode='constant', cval=PADDING_VALUE)
def noise_radar_images(radar_image_matrix, standard_deviation):
"""Adds Gaussian noise to each radar image.
This method assumes that images are normalized (as by
`deep_learning_utils.normalize_radar_images`), so the standard deviation is
unitless and the same standard deviation applies to all channels.
:param radar_image_matrix: See doc for
`deep_learning_utils.check_radar_images`.
:param standard_deviation: Standard deviation of Gaussian noise.
:return: noised_image_matrix: Same as `radar_image_matrix`, but after
noising. The shapes of the two numpy arrays are the same.
"""
dl_utils.check_radar_images(
radar_image_matrix=radar_image_matrix, min_num_dimensions=3,
max_num_dimensions=5)
error_checking.assert_is_greater(standard_deviation, 0.)
noise_matrix = numpy.random.normal(
loc=0., scale=standard_deviation, size=radar_image_matrix.shape)
return radar_image_matrix + noise_matrix
def flip_radar_images_x(radar_image_matrix):
"""Flips radar images in the x-direction.
:param radar_image_matrix: See doc for
`deep_learning_utils.check_radar_images`.
:return: flipped_image_matrix: Flipped version of input (same dimensions).
"""
dl_utils.check_radar_images(
radar_image_matrix=radar_image_matrix, min_num_dimensions=3,
max_num_dimensions=5)
return numpy.flip(radar_image_matrix, axis=2)
def flip_radar_images_y(radar_image_matrix):
"""Flips radar images in the y-direction.
:param radar_image_matrix: See doc for
`deep_learning_utils.check_radar_images`.
:return: flipped_image_matrix: Flipped version of input (same dimensions).
"""
dl_utils.check_radar_images(
radar_image_matrix=radar_image_matrix, min_num_dimensions=3,
max_num_dimensions=5)
return numpy.flip(radar_image_matrix, axis=1)
| mit | -2,660,933,531,477,397,000 | 36.695652 | 80 | 0.688774 | false |
NeCTAR-RC/swift | test/unit/common/middleware/test_account_quotas.py | 1 | 18390 | # Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
from swift.common.swob import Request
from swift.common.middleware import account_quotas
from swift.proxy.controllers.base import _get_cache_key, \
headers_to_account_info, get_object_env_key, \
headers_to_object_info
class FakeCache(object):
def __init__(self, val):
self.val = val
def get(self, *args):
return self.val
def set(self, *args, **kwargs):
pass
class FakeBadApp(object):
def __init__(self, headers=[]):
self.headers = headers
def __call__(self, env, start_response):
start_response('404 NotFound', self.headers)
return []
class FakeApp(object):
def __init__(self, headers=[]):
self.headers = headers
def __call__(self, env, start_response):
if env['REQUEST_METHOD'] == "HEAD" and \
env['PATH_INFO'] == '/v1/a/c2/o2':
env_key = get_object_env_key('a', 'c2', 'o2')
env[env_key] = headers_to_object_info(self.headers, 200)
start_response('200 OK', self.headers)
elif env['REQUEST_METHOD'] == "HEAD" and \
env['PATH_INFO'] == '/v1/a/c2/o3':
start_response('404 Not Found', [])
else:
# Cache the account_info (same as a real application)
cache_key, env_key = _get_cache_key('a', None)
env[env_key] = headers_to_account_info(self.headers, 200)
start_response('200 OK', self.headers)
return []
class TestAccountQuota(unittest.TestCase):
def test_unauthorized(self):
headers = [('x-account-bytes-used', '1000'), ]
app = account_quotas.AccountQuotaMiddleware(FakeApp(headers))
cache = FakeCache(None)
req = Request.blank('/v1/a/c/o',
environ={'REQUEST_METHOD': 'PUT',
'swift.cache': cache})
res = req.get_response(app)
#Response code of 200 because authentication itself is not done here
self.assertEquals(res.status_int, 200)
def test_no_quotas(self):
headers = [('x-account-bytes-used', '1000'), ]
app = account_quotas.AccountQuotaMiddleware(FakeApp(headers))
cache = FakeCache(None)
req = Request.blank('/v1/a/c/o',
environ={'REQUEST_METHOD': 'PUT',
'swift.cache': cache})
res = req.get_response(app)
self.assertEquals(res.status_int, 200)
def test_obj_request_ignores_attempt_to_set_quotas(self):
# If you try to set X-Account-Meta-* on an object, it's ignored, so
# the quota middleware shouldn't complain about it even if we're not a
# reseller admin.
headers = [('x-account-bytes-used', '1000')]
app = account_quotas.AccountQuotaMiddleware(FakeApp(headers))
cache = FakeCache(None)
req = Request.blank('/v1/a/c/o',
headers={'X-Account-Meta-Quota-Bytes': '99999'},
environ={'REQUEST_METHOD': 'PUT',
'swift.cache': cache})
res = req.get_response(app)
self.assertEquals(res.status_int, 200)
def test_container_request_ignores_attempt_to_set_quotas(self):
# As with an object, if you try to set X-Account-Meta-* on a
# container, it's ignored.
headers = [('x-account-bytes-used', '1000')]
app = account_quotas.AccountQuotaMiddleware(FakeApp(headers))
cache = FakeCache(None)
req = Request.blank('/v1/a/c',
headers={'X-Account-Meta-Quota-Bytes': '99999'},
environ={'REQUEST_METHOD': 'PUT',
'swift.cache': cache})
res = req.get_response(app)
self.assertEquals(res.status_int, 200)
def test_bogus_quota_is_ignored(self):
# This can happen if the metadata was set by a user prior to the
# activation of the account-quota middleware
headers = [('x-account-bytes-used', '1000'),
('x-account-meta-quota-bytes', 'pasty-plastogene')]
app = account_quotas.AccountQuotaMiddleware(FakeApp(headers))
cache = FakeCache(None)
req = Request.blank('/v1/a/c/o',
environ={'REQUEST_METHOD': 'PUT',
'swift.cache': cache})
res = req.get_response(app)
self.assertEquals(res.status_int, 200)
def test_exceed_bytes_quota(self):
headers = [('x-account-bytes-used', '1000'),
('x-account-meta-quota-bytes', '0')]
app = account_quotas.AccountQuotaMiddleware(FakeApp(headers))
cache = FakeCache(None)
req = Request.blank('/v1/a/c/o',
environ={'REQUEST_METHOD': 'PUT',
'swift.cache': cache})
res = req.get_response(app)
self.assertEquals(res.status_int, 413)
def test_over_quota_container_create_still_works(self):
headers = [('x-account-bytes-used', '1001'),
('x-account-meta-quota-bytes', '1000')]
app = account_quotas.AccountQuotaMiddleware(FakeApp(headers))
cache = FakeCache(None)
req = Request.blank('/v1/a/new_container',
environ={'REQUEST_METHOD': 'PUT',
'HTTP_X_CONTAINER_META_BERT': 'ernie',
'swift.cache': cache})
res = req.get_response(app)
self.assertEquals(res.status_int, 200)
def test_over_quota_container_post_still_works(self):
headers = [('x-account-bytes-used', '1001'),
('x-account-meta-quota-bytes', '1000')]
app = account_quotas.AccountQuotaMiddleware(FakeApp(headers))
cache = FakeCache(None)
req = Request.blank('/v1/a/new_container',
environ={'REQUEST_METHOD': 'POST',
'HTTP_X_CONTAINER_META_BERT': 'ernie',
'swift.cache': cache})
res = req.get_response(app)
self.assertEquals(res.status_int, 200)
def test_over_quota_obj_post_still_works(self):
headers = [('x-account-bytes-used', '1001'),
('x-account-meta-quota-bytes', '1000')]
app = account_quotas.AccountQuotaMiddleware(FakeApp(headers))
cache = FakeCache(None)
req = Request.blank('/v1/a/c/o',
environ={'REQUEST_METHOD': 'POST',
'HTTP_X_OBJECT_META_BERT': 'ernie',
'swift.cache': cache})
res = req.get_response(app)
self.assertEquals(res.status_int, 200)
def test_exceed_bytes_quota_copy_from(self):
headers = [('x-account-bytes-used', '500'),
('x-account-meta-quota-bytes', '1000'),
('content-length', '1000')]
app = account_quotas.AccountQuotaMiddleware(FakeApp(headers))
cache = FakeCache(None)
req = Request.blank('/v1/a/c/o',
environ={'REQUEST_METHOD': 'PUT',
'swift.cache': cache},
headers={'x-copy-from': '/c2/o2'})
res = req.get_response(app)
self.assertEquals(res.status_int, 413)
def test_exceed_bytes_quota_copy_verb(self):
headers = [('x-account-bytes-used', '500'),
('x-account-meta-quota-bytes', '1000'),
('content-length', '1000')]
app = account_quotas.AccountQuotaMiddleware(FakeApp(headers))
cache = FakeCache(None)
req = Request.blank('/v1/a/c2/o2',
environ={'REQUEST_METHOD': 'COPY',
'swift.cache': cache},
headers={'Destination': '/c/o'})
res = req.get_response(app)
self.assertEquals(res.status_int, 413)
def test_not_exceed_bytes_quota_copy_from(self):
headers = [('x-account-bytes-used', '0'),
('x-account-meta-quota-bytes', '1000'),
('content-length', '1000')]
app = account_quotas.AccountQuotaMiddleware(FakeApp(headers))
cache = FakeCache(None)
req = Request.blank('/v1/a/c/o',
environ={'REQUEST_METHOD': 'PUT',
'swift.cache': cache},
headers={'x-copy-from': '/c2/o2'})
res = req.get_response(app)
self.assertEquals(res.status_int, 200)
def test_not_exceed_bytes_quota_copy_verb(self):
headers = [('x-account-bytes-used', '0'),
('x-account-meta-quota-bytes', '1000'),
('content-length', '1000')]
app = account_quotas.AccountQuotaMiddleware(FakeApp(headers))
cache = FakeCache(None)
req = Request.blank('/v1/a/c2/o2',
environ={'REQUEST_METHOD': 'COPY',
'swift.cache': cache},
headers={'Destination': '/c/o'})
res = req.get_response(app)
self.assertEquals(res.status_int, 200)
def test_quota_copy_from_no_src(self):
headers = [('x-account-bytes-used', '0'),
('x-account-meta-quota-bytes', '1000')]
app = account_quotas.AccountQuotaMiddleware(FakeApp(headers))
cache = FakeCache(None)
req = Request.blank('/v1/a/c/o',
environ={'REQUEST_METHOD': 'PUT',
'swift.cache': cache},
headers={'x-copy-from': '/c2/o3'})
res = req.get_response(app)
self.assertEquals(res.status_int, 200)
def test_quota_copy_from_bad_src(self):
headers = [('x-account-bytes-used', '0'),
('x-account-meta-quota-bytes', '1000')]
app = account_quotas.AccountQuotaMiddleware(FakeApp(headers))
cache = FakeCache(None)
req = Request.blank('/v1/a/c/o',
environ={'REQUEST_METHOD': 'PUT',
'swift.cache': cache},
headers={'x-copy-from': 'bad_path'})
res = req.get_response(app)
self.assertEquals(res.status_int, 412)
def test_exceed_bytes_quota_reseller(self):
headers = [('x-account-bytes-used', '1000'),
('x-account-meta-quota-bytes', '0')]
app = account_quotas.AccountQuotaMiddleware(FakeApp(headers))
cache = FakeCache(None)
req = Request.blank('/v1/a',
environ={'REQUEST_METHOD': 'PUT',
'swift.cache': cache,
'reseller_request': True})
res = req.get_response(app)
self.assertEquals(res.status_int, 200)
def test_exceed_bytes_quota_reseller_copy_from(self):
headers = [('x-account-bytes-used', '500'),
('x-account-meta-quota-bytes', '1000'),
('content-length', '1000')]
app = account_quotas.AccountQuotaMiddleware(FakeApp(headers))
cache = FakeCache(None)
req = Request.blank('/v1/a/c/o',
environ={'REQUEST_METHOD': 'PUT',
'swift.cache': cache,
'reseller_request': True},
headers={'x-copy-from': 'c2/o2'})
res = req.get_response(app)
self.assertEquals(res.status_int, 200)
def test_exceed_bytes_quota_reseller_copy_verb(self):
headers = [('x-account-bytes-used', '500'),
('x-account-meta-quota-bytes', '1000'),
('content-length', '1000')]
app = account_quotas.AccountQuotaMiddleware(FakeApp(headers))
cache = FakeCache(None)
req = Request.blank('/v1/a/c2/o2',
environ={'REQUEST_METHOD': 'COPY',
'swift.cache': cache,
'reseller_request': True},
headers={'Destination': 'c/o'})
res = req.get_response(app)
self.assertEquals(res.status_int, 200)
def test_bad_application_quota(self):
headers = []
app = account_quotas.AccountQuotaMiddleware(FakeBadApp(headers))
cache = FakeCache(None)
req = Request.blank('/v1/a/c/o',
environ={'REQUEST_METHOD': 'PUT',
'swift.cache': cache})
res = req.get_response(app)
self.assertEquals(res.status_int, 404)
def test_no_info_quota(self):
headers = []
app = account_quotas.AccountQuotaMiddleware(FakeApp(headers))
cache = FakeCache(None)
req = Request.blank('/v1/a/c/o',
environ={'REQUEST_METHOD': 'PUT',
'swift.cache': cache})
res = req.get_response(app)
self.assertEquals(res.status_int, 200)
def test_not_exceed_bytes_quota(self):
headers = [('x-account-bytes-used', '1000'),
('x-account-meta-quota-bytes', 2000)]
app = account_quotas.AccountQuotaMiddleware(FakeApp(headers))
cache = FakeCache(None)
req = Request.blank('/v1/a/c/o',
environ={'REQUEST_METHOD': 'PUT',
'swift.cache': cache})
res = req.get_response(app)
self.assertEquals(res.status_int, 200)
def test_invalid_quotas(self):
headers = [('x-account-bytes-used', '0'), ]
app = account_quotas.AccountQuotaMiddleware(FakeApp(headers))
cache = FakeCache(None)
req = Request.blank('/v1/a',
environ={'REQUEST_METHOD': 'POST',
'swift.cache': cache,
'HTTP_X_ACCOUNT_META_QUOTA_BYTES': 'abc',
'reseller_request': True})
res = req.get_response(app)
self.assertEquals(res.status_int, 400)
def test_valid_quotas_admin(self):
headers = [('x-account-bytes-used', '0'), ]
app = account_quotas.AccountQuotaMiddleware(FakeApp(headers))
cache = FakeCache(None)
req = Request.blank('/v1/a',
environ={'REQUEST_METHOD': 'POST',
'swift.cache': cache,
'HTTP_X_ACCOUNT_META_QUOTA_BYTES': '100'})
res = req.get_response(app)
self.assertEquals(res.status_int, 403)
def test_valid_quotas_reseller(self):
headers = [('x-account-bytes-used', '0'), ]
app = account_quotas.AccountQuotaMiddleware(FakeApp(headers))
cache = FakeCache(None)
req = Request.blank('/v1/a',
environ={'REQUEST_METHOD': 'POST',
'swift.cache': cache,
'HTTP_X_ACCOUNT_META_QUOTA_BYTES': '100',
'reseller_request': True})
res = req.get_response(app)
self.assertEquals(res.status_int, 200)
def test_delete_quotas(self):
headers = [('x-account-bytes-used', '0'), ]
app = account_quotas.AccountQuotaMiddleware(FakeApp(headers))
cache = FakeCache(None)
req = Request.blank('/v1/a',
environ={'REQUEST_METHOD': 'POST',
'swift.cache': cache,
'HTTP_X_ACCOUNT_META_QUOTA_BYTES': ''})
res = req.get_response(app)
self.assertEquals(res.status_int, 403)
def test_delete_quotas_with_remove_header(self):
headers = [('x-account-bytes-used', '0'), ]
app = account_quotas.AccountQuotaMiddleware(FakeApp(headers))
cache = FakeCache(None)
req = Request.blank('/v1/a', environ={
'REQUEST_METHOD': 'POST',
'swift.cache': cache,
'HTTP_X_REMOVE_ACCOUNT_META_QUOTA_BYTES': 'True'})
res = req.get_response(app)
self.assertEquals(res.status_int, 403)
def test_delete_quotas_reseller(self):
headers = [('x-account-bytes-used', '0'), ]
app = account_quotas.AccountQuotaMiddleware(FakeApp(headers))
req = Request.blank('/v1/a',
environ={'REQUEST_METHOD': 'POST',
'HTTP_X_ACCOUNT_META_QUOTA_BYTES': '',
'reseller_request': True})
res = req.get_response(app)
self.assertEquals(res.status_int, 200)
def test_delete_quotas_with_remove_header_reseller(self):
headers = [('x-account-bytes-used', '0'), ]
app = account_quotas.AccountQuotaMiddleware(FakeApp(headers))
cache = FakeCache(None)
req = Request.blank('/v1/a', environ={
'REQUEST_METHOD': 'POST',
'swift.cache': cache,
'HTTP_X_REMOVE_ACCOUNT_META_QUOTA_BYTES': 'True',
'reseller_request': True})
res = req.get_response(app)
self.assertEquals(res.status_int, 200)
def test_invalid_request_exception(self):
headers = [('x-account-bytes-used', '1000'), ]
app = account_quotas.AccountQuotaMiddleware(FakeApp(headers))
cache = FakeCache(None)
req = Request.blank('/v1',
environ={'REQUEST_METHOD': 'PUT',
'swift.cache': cache})
res = req.get_response(app)
# Response code of 200 because authentication itself is not done here
self.assertEquals(res.status_int, 200)
if __name__ == '__main__':
unittest.main()
| apache-2.0 | 5,786,382,722,435,728,000 | 43.313253 | 79 | 0.530234 | false |
sssilver/angler | rod/rod/model/lesson.py | 1 | 1292 | import sqlalchemy.types
import sqlalchemy.schema
import sqlalchemy.orm
import rod.model
class Lesson(rod.model.db.Model, rod.model.PersistentMixin):
__tablename__ = 'lesson'
id = sqlalchemy.schema.Column(sqlalchemy.types.Integer, primary_key=True)
time = sqlalchemy.schema.Column(sqlalchemy.types.DateTime)
teacher_id = sqlalchemy.schema.Column(sqlalchemy.types.Integer,
sqlalchemy.schema.ForeignKey(
'staff.id',
name='fk_lesson_teacher_id'
))
teacher = sqlalchemy.orm.relationship(
'Staff',
back_populates='lessons'
)
group_id = sqlalchemy.schema.Column(sqlalchemy.types.Integer,
sqlalchemy.schema.ForeignKey(
'group.id',
name='fk_lesson_group_id'
))
group = sqlalchemy.orm.relationship(
'Group',
back_populates='lessons'
)
# Students attendance for this lesson
attendance = sqlalchemy.orm.relationship(
'Attendance',
back_populates='lesson'
)
| bsd-3-clause | 8,059,350,905,018,114,000 | 33 | 77 | 0.51548 | false |
aarora79/sb_study | common/utils.py | 1 | 12780 | # -*- coding: utf-8 -*-
import os
import string
import json
import pandas as pd
import numpy as np
from scipy.stats.stats import pearsonr
from dateutil.parser import parse
from common import globals as glob
SPECIAL_CHARS = string.punctuation.replace("_", "")
SPECIAL_CHARS = string.punctuation.replace("-", "")
SPECIAL_CHARS = set(SPECIAL_CHARS)
def get_quality_summary(qual):
summary = {}
#we are only intrested in the dqs (data quality score) under each category
for q in qual.keys():
summary[q] = {}
summary[q]['dqs'] = qual[q]['dqs']
return summary
def contains_special_chars(word):
#glob.log.info(word)
if any(char in SPECIAL_CHARS for char in word):
return True
else:
return False
def encode_str_in_csv(line, s):
line += '\"' + s + '\"' + ','
return line
def store_in_dict(d, k, key1, val1, key2, val2):
d[k] = {}
d[k][key1] = val1
d[k][key2] = val2
def check_date(qual, category, df, field):
glob.log.info('checking %s ...' %(field))
#just a simple check to validate date
num_invalid = 0
for i in range(len(df)):
store = df.ix[i]
try:
parse(store[field])
except Exception as e:
num_invalid += 1
glob.log.error('found an invalid %s -> %s for store id %s' % (field, str(store[field]), str(store['store_id'])))
glob.log.error(str(e))
qual[category][field] = {}
qual[category][field]['count'] = num_invalid
qual[category][field]['percent'] = round(100*(float(num_invalid)/len(df)), glob.PRECISION)
return num_invalid
def check_as_string_wo_special_chars(qual, category, df, field, prim_key_field):
glob.log.info('checking %s ...' %(field))
#just a simple check to say should not contain any special characters
num_invalid = 0
for i in range(len(df)):
store = df.ix[i]
if contains_special_chars(str(store[field])) == True:
num_invalid += 1
glob.log.error('found an invalid %s -> %s for store id %s' % (field, str(store[field]), str(store[prim_key_field])))
qual[category][field] = {}
qual[category][field]['count'] = num_invalid
qual[category][field]['percent'] = round(100*(float(num_invalid)/len(df)), glob.PRECISION)
return num_invalid
def check_as_numeric(qual, category, df, field):
glob.log.info('checking %s ...' %(field))
#just a simple check to say field should be numeric
num_invalid = 0
for i in range(len(df)):
row = df.ix[i]
val = str(row[field])
try:
float(val)
except Exception as e:
num_invalid += 1
glob.log.error('found an invalid %s -> %s' % (field, str(row[field])))
qual[category][field] = {}
qual[category][field]['count'] = num_invalid
qual[category][field]['percent'] = round(100*(float(num_invalid)/len(df)), glob.PRECISION)
return num_invalid
def check_missing(qual, df, mf = []):
glob.log.info('checking missing data...')
qual['missing_data'] = {}
#check missing fraction of missing rows in each column and also overall
total_cell_count = df.shape[0] * df.shape[1] #rows x columns
total_empty_cells = 0
mandatory_cells_empty_count = 0 #count of empty cells in mandatory columns
mandatory_feature_list_provided = (True if len(mf) != 0 else False)
for col in df.columns:
#find out the number of columns that are empty, the idea is that
#we would end up with a TRUE/FALSE array and then summing it up
#gives the count of FALSE or empty cells
empty_cells = df[col].isnull()
num_empty_cells = sum(empty_cells)
total_empty_cells += num_empty_cells
total_cells = len(empty_cells)
#if mandatory feature list provided then check if this feature is mandatory
#if no specific list is provided then consider all features as mandatory
if mandatory_feature_list_provided == True:
if col in mf:
mandatory_cells_empty_count += num_empty_cells
else:
mandatory_cells_empty_count += num_empty_cells
fraction_empty = 100*(float(num_empty_cells)/(total_cells))
#store this info in the dict if there were any empty cells
if num_empty_cells != 0:
store_in_dict(qual['missing_data'], col, 'percent',
round(fraction_empty, glob.PRECISION),
'count', num_empty_cells)
#overall empty cell fraction
fraction_empty = 100*(float(total_empty_cells)/(total_cell_count))
store_in_dict(qual['missing_data'], 'overall', 'percent',
round(fraction_empty, glob.PRECISION),
'count', total_empty_cells)
#calculate two data quality scores; a raw score calculated simply as
# 1 - (total missing values/total values) and an adjusted score which
#is calculated based on missing values that really matter i.e. which would
#cause the entire row to get discarded.
raw_score = round(100 - fraction_empty, glob.PRECISION)
if mandatory_feature_list_provided == True:
adjusted_raw_score = round(100 - ((float(mandatory_cells_empty_count)/total_cell_count)))
else:
#in case no mandatory features were provided then adjusted score is same as raw score
adjusted_raw_score = raw_score
qual['missing_data']['dqs'] = {}
qual['missing_data']['dqs']['raw_score'] = raw_score
qual['missing_data']['dqs']['adjusted_score'] = adjusted_raw_score
return qual
def write_dict_to_csv(data_dict, fname):
fname = os.path.join(glob.OUTPUT_DIR_NAME, fname)
glob.log.info('going to write dictionary to ' + fname)
#glob.log.info(json.dumps(data_dict, indent=glob.INDENT_LEVEL))
with open(fname, 'w') as csv_file:
#write the header row
line = '\"country_code\",' #first column is country code
#now the indicators
#get all the keys from the first dictionary, they are the same for all dictionaries
#this is a python3 thing because we would get a dict_keys, we convert it into a list
#and then the first element of the list we access and then get the keys from there
#this is specific to the WB data dictionary so this function shouldnt technically be
#in utils.py, but ok..
key_list = data_dict[list(data_dict.keys())[0]].keys()
for k in key_list:
line = encode_str_in_csv(line, k)
#remove the last ',' from the end
line = line[:-1]
line += '\n'
#read to write header row
csv_file.write(line)
#access dictionary by dictionary or rather country by country
for k in data_dict.keys():
#start with an empty line to write
line = ''
d = data_dict[k] #this dictionary represents one country
line = encode_str_in_csv(line, k) #country code
key_count = 0
for k2 in d.keys(): #indicators within a country
val = d[k2]
if k2 != 'name': #name key already holds a string
val = str(val)
#if there is single quote in the name then rmeove it, caused problem
#when reading the file back
val = val.replace('’', '')
#put the key in quotes, as some keys/values could have spaces
line = encode_str_in_csv(line, val) #value of individual indicators
key_count += 1
glob.log.info('country %s, indicators %d' %(k, key_count))
#write to csv
#remove the last ',' from the end
line = line[:-1]
line += '\n'
#glob.log.info(line)
#ignore any non-ascii characters, this is needed because certain country names
#have non utf-8 characters..they were causing an exception..
#line = line.encode('ascii', 'ignore')
csv_file.write(str(line))
def do_eda(df, filename, ds_name, categorial_feature_list, excluded_list):
glob.log.info('about to do some EDA (exploratory data analysis) on %s data...' %(ds_name))
eda_dict = { 'feature': [], 'mean': [], 'mode':[], 'median':[], 'stddev':[]}
#except for country code all fields are numeric and we can calculate
#mean, median and sd
for col in df.columns:
if col in excluded_list:
continue
eda_dict['feature'].append(col)
if col in categorial_feature_list:
#calc mode by first storing in a categorical series
s = pd.Categorical(df[col], categories=df[col].unique())
cc = s.value_counts()
pd.Series.sort(cc, inplace=True, ascending=False)
#what if there are two modes...just handle one case..doesnt happen in our dataset anyway
#if cc.ix[cc.index[0]] == cc.ix[cc.index[1]]:
# mode = cc.index[0] + ',' + cc.index[1]
# glob.log.info('there are more than 1 modes for %s[%s]' %(ds_name, col))
#else:
# mode = cc.index[0]
mode_str = str(cc.index[0]) + ':' + str(cc.ix[cc.index[0]])
eda_dict['mode'].append(mode_str)
eda_dict['mean'].append(0)
eda_dict['median'].append(0)
eda_dict['stddev'].append(0)
else:
#calc other descriptive stats
eda_dict['mode'].append(0)
eda_dict['mean'].append(df[col].mean())
eda_dict['median'].append(df[col].median())
eda_dict['stddev'].append(np.sqrt(df[col].var()))
eda_df = pd.DataFrame(eda_dict)
eda_df.to_csv(filename, index=False, encoding='utf-8')
try:
glob.log.info(eda_df)
except Exception as e:
glob.log.error('Exception while logging eda_df: ' + str(e))
def detect_outliers(df, ds_name, excluded_col_list, key_col_name):
glob.log.info('Detecting outliers for %s dataset' %(ds_name))
fname = os.path.join(glob.OUTPUT_DIR_NAME, glob.EDA_DIR, ds_name + '_' + glob.OUTLIERS_CSV)
f = open(fname, 'w')
f.write('dataset,entry,field,value,3rdStdDev\n')
for col in df.columns:
if col in excluded_col_list:
continue
#refer to the column as a series, just for ease of understanding
S = df[col]
# we want to say anything outside of the 3rd standard deviation is an outlier...
outliers = S[((S-S.mean()).abs()>3*S.std())]
if len(outliers) > 0:
#print out the outliers
sigma = S.std()
for i in outliers.index:
entry = df.iloc[i]
glob.log.error('[%s] for entry %s, field %s has value %f which is outside the 3rd stddev(%f)'
%(ds_name, entry[key_col_name], col, entry[col], 3*sigma))
f.write('"%s","%s","%s","%f","%f"\n' %(ds_name, entry[key_col_name], col, entry[col], 3*sigma))
f.close()
def calc_r(ds_name, fname, df, feature_list):
glob.log.info('Calculating r for %s dataset' %(ds_name))
f = open(fname, 'w')
f.write('Dataset,feature1,feature2,r\n')
#remove all NA values as pearsons needs to have both series of the same size
df2 = df[feature_list].dropna()
#calculate perason coefficient for all possible combinations
for i in range(len(feature_list)-1):
for j in range(i+1, len(feature_list)):
r = pearsonr(df2[feature_list[i]], df2[feature_list[j]])[0]
glob.log.info('Pearson coeff(r) for %s and %s is %f' %(feature_list[i], feature_list[j], r))
f.write('%s,%s,%s,%f\n' %(ds_name,feature_list[i], feature_list[j], r))
f.close()
def calc_dqs(df):
df_temp = pd.isnull(df)
num_cells = df_temp.shape[0]*df_temp.shape[1]
empty_cells = 0
for c in df_temp.columns:
empty_cells += sum(df_temp[c])
dqs = ((num_cells-empty_cells)*100)/num_cells
glob.log.info('data quality score for dataset is %f' %(dqs))
return dqs
def get_numeric_feature_list(df, excluded_feature_list):
numeric_features = []
for col in df.columns:
try:
x=df[col].iloc[0]
float(x)#typecast the data to float to test if it is numeric
except:
glob.log.info('%s is not a numeric feature, ignoring' %(col))
else:
if col not in excluded_feature_list:
numeric_features.append(col)
return numeric_features | mit | 3,349,361,493,153,902,600 | 41.73913 | 129 | 0.582799 | false |
serge-sans-paille/pythran | pythran/tests/cases/sobelfilter.py | 1 | 2118 | #skip.runas import Image; im = Image.open("Scribus.gif"); image_list = list(im.getdata()); cols, rows = im.size; res = range(len(image_list)); sobelFilter(image_list, res, cols, rows)
#runas cols = 100; rows = 100 ;image_list=[x%10+y%20 for x in range(cols) for y in range(rows)]; sobelFilter(image_list, cols, rows)
#bench cols = 1000; rows = 500 ;image_list=[x%10+y%20 for x in range(cols) for y in range(rows)]; sobelFilter(image_list, cols, rows)
#pythran export sobelFilter(int list, int, int)
def sobelFilter(original_image, cols, rows):
edge_image = list(range(len(original_image)))
for i in range(rows):
edge_image[i * cols] = 255
edge_image[((i + 1) * cols) - 1] = 255
for i in range(1, cols - 1):
edge_image[i] = 255
edge_image[i + ((rows - 1) * cols)] = 255
for iy in range(1, rows - 1):
for ix in range(1, cols - 1):
sum_x = 0
sum_y = 0
sum = 0
#x gradient approximation
sum_x += original_image[ix - 1 + (iy - 1) * cols] * -1
sum_x += original_image[ix + (iy - 1) * cols] * -2
sum_x += original_image[ix + 1 + (iy - 1) * cols] * -1
sum_x += original_image[ix - 1 + (iy + 1) * cols] * 1
sum_x += original_image[ix + (iy + 1) * cols] * 2
sum_x += original_image[ix + 1 + (iy + 1) * cols] * 1
sum_x = min(255, max(0, sum_x))
#y gradient approximatio
sum_y += original_image[ix - 1 + (iy - 1) * cols] * 1
sum_y += original_image[ix + 1 + (iy - 1) * cols] * -1
sum_y += original_image[ix - 1 + (iy) * cols] * 2
sum_y += original_image[ix + 1 + (iy) * cols] * -2
sum_y += original_image[ix - 1 + (iy + 1) * cols] * 1
sum_y += original_image[ix + 1 + (iy + 1) * cols] * -1
sum_y = min(255, max(0, sum_y))
#GRADIENT MAGNITUDE APPROXIMATION
sum = abs(sum_x) + abs(sum_y)
#make edges black and background white
edge_image[ix + iy * cols] = 255 - (255 & sum)
return edge_image
| bsd-3-clause | 6,285,914,057,603,005,000 | 49.428571 | 183 | 0.520774 | false |
chuckinator0/Projects | scripts/singleNumber.py | 1 | 1163 | '''
Given a non-empty array of integers, every element appears twice except for one. Find that single one.
Note:
Your algorithm should have a linear runtime complexity. Could you implement it without using extra memory?
Example 1:
Input: [2,2,1]
Output: 1
Example 2:
Input: [4,1,2,1,2]
Output: 4
'''
def singleNumber(nums):
# initialize a set of unique items
unique_set = set()
for item in nums:
# if an item isn't already in the unique set, add it to the unique set
if item not in unique_set:
unique_set.add(item)
# if the item is already in unique set, it's no longer unique, so remove it
else:
unique_set.remove(item)
# At this point, the only element to survive the loop will be the one element that is
# not repeated
return unique_set.pop()
# keep in mind that this function doesn't work if the input has greater than 2 repetitions since they
# would be erroniously added to the unique set.
# Another solution that doesn't use extra memory would be using XOR (bit manipulation)
def singleNumberXOR(nums):
a = 0
for i in nums:
a ^= i
return a
nums = [4,1,2,1,2]
print(singleNumber(nums))
print(singleNumberXOR(nums))
| gpl-3.0 | 7,711,206,163,601,824,000 | 22.734694 | 106 | 0.72227 | false |
stuartsale/BibShorts | bibshorts/__init__.py | 1 | 3454 | from __future__ import print_function
import requests # for working with remote resources
import re # regular expresions
import os
class bib_entry:
def __init__(self, bibtex_in):
"""
Initialise the object with its arxiv reference number
"""
self.bibtex = bibtex_in
def __cmp__(self, other):
return cmp(self.key, other.key)
def get_bibtex_doi(self):
"""
Scrape full bibtex off web if a doi is contained in entry
"""
self.search_success = False
doi_pattern = re.compile('doi = .*$', re.MULTILINE)
try:
doi = doi_pattern.search(self.bibtex).group().lstrip("doi = {").\
rstrip("},")
except AttributeError:
print("no DOI")
return
print(doi)
url = "http://dx.doi.org/" + doi
url_headers = {"Accept": "application/x-bibtex"}
r = requests.get(url, headers=url_headers)
print(r.text)
self.bibtex = r.text
self.search_sucess = True
def set_key(self):
"""
Alter the Bibtex entry's key to match prefered scheme
"""
replace_list = ["{", "\\", "}", "'", "`", '"', "\n", "\t", "^", " "]
author_pattern = re.compile('author = .*\.}\,$|author = .*\t}\,$',
re.MULTILINE | re.DOTALL)
author_list = author_pattern.search(self.bibtex).group().\
lstrip("author = ").rstrip("},")[1:]
author_list = author_list.split(' and ')
if len(author_list)==0:
print("ERROR")
return
name_pattern = re.compile('{[^,]*}')
name1 = name_pattern.search(author_list[0]).group().lstrip("{").rstrip("}")
name1 = name1.replace("{","").replace("\\","").replace("}","").\
replace("'","").replace("`","").replace('"',"").\
replace("\n","").replace("\t","").replace("^","").\
replace(" ","").strip(" ")
if len(author_list)==1:
name2 = "only"
elif len(author_list)>1:
try:
name2 = name_pattern.search(author_list[1]).group().\
lstrip("{").rstrip("}")
name2 = name2.replace("{","").replace("\\","").\
replace("}","").replace("'","").replace("`","").\
replace('"',"").replace("\n","").\
replace("\t","").replace("^","").\
replace(" ","").strip(" ")
except AttributeError:
name2 = "only"
year_pattern = re.compile('year = .*$', re.MULTILINE)
year = year_pattern.search(self.bibtex).group().lstrip('year =').\
rstrip(",")
self.key = name1+"_"+name2+"."+year
print(self.key)
def bibtex_write(self, output_file):
"""
Dump bibtex to file, checking key not already used
"""
for other_key in written_keys:
if other_key==self.key:
self.key = self.key+"a"
written_keys.append(self.key)
split_bibtex = self.bibtex.split("\n")
output_file.write(self.bib_type+"{"+self.key+",\n")
for n in range(1, len(split_bibtex)):
output_file.write(split_bibtex[n]+"\n")
| bsd-3-clause | -3,439,400,351,898,363,400 | 29.034783 | 83 | 0.458888 | false |
deweysasser/SimpleS3Backup | setup.py | 1 | 1066 | import os
from setuptools import setup
setup(
name = "SimpleS3Backup",
version = "0.2",
author = "Dewey Sasser",
author_email = "[email protected]",
description = ("A simple backup script that saves artbitrary command output to s3 in a rotating series of files"),
license = "Artistic License",
keywords ="Amazon AWS S3 Backup",
url = "https://github.com/deweysasser/SimpleS3Backup",
scripts = ['s3backup'],
long_description = '''Simple backup script to run commands and send the output to S3
./s3backup -bucket example-bucket -path backups -name mybackup.tar -count 3 tar -C /some/path cp .
Creates /backups/mybackup.tar.1 in AWS bucket "example-bucket", using
the default boto profile. As you run it each day it will rotate up to
COUNT versions, then overwrite.
It uses local disk to store the file, then uploads to S3.''',
install_requires=[ 'boto' ],
classifiers = [
"Development Status :: 3 - Alpha",
"Topic :: Utilities",
"License :: OSI Approved :: Artistic License"
],
)
| artistic-2.0 | -5,598,649,035,324,486,000 | 35.758621 | 118 | 0.674484 | false |
davidsminor/gaffer | python/GafferUITest/WidgetTest.py | 1 | 12624 | ##########################################################################
#
# Copyright (c) 2011-2012, John Haddon. All rights reserved.
# Copyright (c) 2011-2013, Image Engine Design Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above
# copyright notice, this list of conditions and the following
# disclaimer.
#
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided with
# the distribution.
#
# * Neither the name of John Haddon nor the names of
# any other contributors to this software may be used to endorse or
# promote products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
##########################################################################
import unittest
import weakref
import sys
import IECore
import Gaffer
import GafferTest
import GafferUI
import GafferUITest
QtCore = GafferUI._qtImport( "QtCore" )
QtGui = GafferUI._qtImport( "QtGui" )
class TestWidget( GafferUI.Widget ) :
def __init__( self, **kw ) :
GafferUI.Widget.__init__( self, QtGui.QLabel( "hello" ), **kw )
class TestWidget2( GafferUI.Widget ) :
def __init__( self ) :
self.topLevelGafferWidget = TestWidget()
GafferUI.Widget.__init__( self, self.topLevelGafferWidget )
class WidgetTest( GafferUITest.TestCase ) :
def testOwner( self ) :
w = TestWidget()
self.assert_( GafferUI.Widget._owner( w._qtWidget() ) is w )
def testParent( self ) :
w = TestWidget()
self.assert_( w.parent() is None )
def testCanDie( self ) :
w = TestWidget()
wr1 = weakref.ref( w )
wr2 = weakref.ref( w._qtWidget() )
del w
self.assert_( wr1() is None )
self.assert_( wr2() is None )
def testAncestor( self ) :
w = GafferUI.Window( "test" )
l = GafferUI.ListContainer( GafferUI.ListContainer.Orientation.Vertical )
p = GafferUI.SplitContainer()
l.append( p )
w.setChild( l )
self.assert_( p.ancestor( GafferUI.ListContainer ) is l )
self.assert_( p.ancestor( GafferUI.Window ) is w )
self.assert_( p.ancestor( GafferUI.Menu ) is None )
def testIsAncestorOf( self ) :
with GafferUI.Window( "test" ) as w :
with GafferUI.SplitContainer() as p :
with GafferUI.ListContainer() as l1 :
b1 = GafferUI.Button()
with GafferUI.ListContainer() as l2 :
b2 = GafferUI.Button()
self.assertTrue( l2.isAncestorOf( b2 ) )
self.assertFalse( l1.isAncestorOf( b2 ) )
self.assertTrue( p.isAncestorOf( b2 ) )
self.assertTrue( w.isAncestorOf( b2 ) )
self.assertFalse( b2.isAncestorOf( b1 ) )
self.assertFalse( b2.isAncestorOf( l1 ) )
self.assertFalse( b2.isAncestorOf( l2 ) )
self.assertFalse( b2.isAncestorOf( p ) )
self.assertFalse( b2.isAncestorOf( w ) )
self.assertTrue( l1.isAncestorOf( b1 ) )
self.assertFalse( l2.isAncestorOf( b1 ) )
self.assertTrue( p.isAncestorOf( b1 ) )
self.assertTrue( w.isAncestorOf( b1 ) )
def testGafferWidgetAsTopLevel( self ) :
w = TestWidget2()
self.assert_( GafferUI.Widget._owner( w._qtWidget() ) is w )
self.assert_( w.topLevelGafferWidget.parent() is w )
self.assert_( GafferUI.Widget._owner( w.topLevelGafferWidget._qtWidget() ) is not w )
def testToolTip( self ) :
w = TestWidget()
self.assertEqual( w.getToolTip(), "" )
w = TestWidget( toolTip="hi" )
self.assertEqual( w.getToolTip(), "hi" )
w.setToolTip( "a" )
self.assertEqual( w.getToolTip(), "a" )
def testEnabledState( self ) :
w = TestWidget()
self.assertEqual( w.getEnabled(), True )
self.assertEqual( w.enabled(), True )
w.setEnabled( False )
self.assertEqual( w.getEnabled(), False )
self.assertEqual( w.enabled(), False )
w.setEnabled( True )
self.assertEqual( w.getEnabled(), True )
self.assertEqual( w.enabled(), True )
def testDisabledWidgetsDontGetSignals( self ) :
w = TestWidget()
def f( w, event ) :
WidgetTest.signalsEmitted += 1
c = w.buttonPressSignal().connect( f )
WidgetTest.signalsEmitted = 0
event = QtGui.QMouseEvent( QtCore.QEvent.MouseButtonPress, QtCore.QPoint( 0, 0 ), QtCore.Qt.LeftButton, QtCore.Qt.LeftButton, QtCore.Qt.NoModifier )
QtGui.QApplication.instance().sendEvent( w._qtWidget(), event )
self.assertEqual( WidgetTest.signalsEmitted, 1 )
w.setEnabled( False )
QtGui.QApplication.instance().sendEvent( w._qtWidget(), event )
self.assertEqual( WidgetTest.signalsEmitted, 1 )
w.setEnabled( True )
QtGui.QApplication.instance().sendEvent( w._qtWidget(), event )
self.assertEqual( WidgetTest.signalsEmitted, 2 )
def testCanDieAfterUsingSignals( self ) :
w = TestWidget()
wr1 = weakref.ref( w )
wr2 = weakref.ref( w._qtWidget() )
w.buttonPressSignal()
w.buttonReleaseSignal()
w.mouseMoveSignal()
w.wheelSignal()
del w
self.assert_( wr1() is None )
self.assert_( wr2() is None )
def testVisibility( self ) :
with GafferUI.Window() as w :
with GafferUI.ListContainer() as l :
t = TestWidget()
self.assertEqual( w.getVisible(), False )
self.assertEqual( l.getVisible(), True )
self.assertEqual( t.getVisible(), True )
self.assertEqual( w.visible(), False )
self.assertEqual( l.visible(), False )
self.assertEqual( t.visible(), False )
w.setVisible( True )
self.assertEqual( w.getVisible(), True )
self.assertEqual( l.getVisible(), True )
self.assertEqual( t.getVisible(), True )
self.assertEqual( w.visible(), True )
self.assertEqual( l.visible(), True )
self.assertEqual( t.visible(), True )
w.setVisible( False )
self.assertEqual( w.getVisible(), False )
self.assertEqual( l.getVisible(), True )
self.assertEqual( t.getVisible(), True )
self.assertEqual( w.visible(), False )
self.assertEqual( l.visible(), False )
self.assertEqual( t.visible(), False )
self.assertEqual( t.visible( relativeTo = l ), True )
self.assertEqual( t.visible( relativeTo = w ), True )
w.setVisible( True )
t.setVisible( False )
self.assertEqual( t.getVisible(), False )
self.assertEqual( t.visible(), False )
self.assertEqual( t.visible( relativeTo = l ), False )
def testGetVisibleForNewWidgets( self ) :
w = TestWidget()
self.assertEqual( w.getVisible(), True )
def testVisibilityOfParentlessWidgets( self ) :
w = GafferUI.Window()
t = TestWidget()
# windows must be explicitly shown
self.assertEqual( w.getVisible(), False )
self.assertEqual( w.visible(), False )
# widgets don't need to be explicitly shown but
# must not be visible on screen until parented
# to a window
self.assertEqual( t.getVisible(), True )
self.assertEqual( t.visible(), False )
w.setVisible( True )
self.assertEqual( w.getVisible(), True )
self.assertEqual( w.visible(), True )
w.setChild( t )
self.assertEqual( t.getVisible(), True )
self.assertEqual( t.visible(), True )
# removing a widget from its parent must not
# leave it visible on screen.
w.removeChild( t )
self.assertEqual( t.parent(), None )
self.assertEqual( t.getVisible(), True )
self.assertEqual( t.visible(), False )
def testVisibilityWhenTransferringWidgets( self ) :
w1 = GafferUI.Window()
w1.setVisible( True )
w2 = GafferUI.Window()
w2.setVisible( True )
v = TestWidget()
self.assertEqual( v.getVisible(), True )
self.assertEqual( v.visible(), False )
h = TestWidget()
self.assertEqual( h.getVisible(), True )
h.setVisible( False )
self.assertEqual( h.getVisible(), False )
self.assertEqual( h.visible(), False )
w1.setChild( v )
self.assertEqual( v.getVisible(), True )
self.assertEqual( v.visible(), True )
self.assertEqual( h.getVisible(), False )
self.assertEqual( h.visible(), False )
w2.setChild( v )
self.assertEqual( v.getVisible(), True )
self.assertEqual( v.visible(), True )
self.assertEqual( h.getVisible(), False )
self.assertEqual( h.visible(), False )
w1.setChild( h )
self.assertEqual( v.getVisible(), True )
self.assertEqual( v.visible(), True )
self.assertEqual( h.getVisible(), False )
self.assertEqual( h.visible(), False )
w2.setChild( h )
self.assertEqual( v.getVisible(), True )
self.assertEqual( v.visible(), False )
self.assertEqual( h.getVisible(), False )
self.assertEqual( h.visible(), False )
def testSignals( self ) :
w = TestWidget()
for s in [
( "keyPressSignal", GafferUI.WidgetEventSignal ),
( "keyReleaseSignal", GafferUI.WidgetEventSignal ),
( "buttonPressSignal", GafferUI.WidgetEventSignal ),
( "buttonReleaseSignal", GafferUI.WidgetEventSignal ),
( "buttonDoubleClickSignal", GafferUI.WidgetEventSignal ),
( "mouseMoveSignal", GafferUI.WidgetEventSignal ),
( "enterSignal", GafferUI.WidgetSignal ),
( "leaveSignal", GafferUI.WidgetSignal ),
( "wheelSignal", GafferUI.WidgetEventSignal ),
( "visibilityChangedSignal", GafferUI.WidgetSignal ),
( "contextMenuSignal", GafferUI.WidgetSignal ),
( "parentChangedSignal", GafferUI.WidgetSignal ),
] :
self.failUnless( isinstance( getattr( w, s[0] )(), s[1] ) )
self.failUnless( getattr( w, s[0] )() is getattr( w, s[0] )() )
def testBound( self ) :
w = GafferUI.Window( borderWidth = 8 )
b = GafferUI.Button()
w.setChild( b )
w.setVisible( True )
w.setPosition( IECore.V2i( 100 ) )
self.waitForIdle( 1000 )
wb = w.bound()
bb = b.bound()
bbw = b.bound( relativeTo = w )
self.failUnless( isinstance( wb, IECore.Box2i ) )
self.failUnless( isinstance( bb, IECore.Box2i ) )
self.failUnless( isinstance( bbw, IECore.Box2i ) )
self.assertEqual( bb.size(), bbw.size() )
self.assertEqual( bbw.min, bb.min - wb.min )
self.assertEqual( b.size(), bb.size() )
def testParentChangedSignal( self ) :
w = TestWidget()
window = GafferUI.Window()
cs = GafferTest.CapturingSlot( w.parentChangedSignal() )
self.assertEqual( len( cs ), 0 )
window.setChild( w )
self.assertEqual( len( cs ), 1 )
self.assertEqual( cs[0], ( w, ) )
window.setChild( None )
self.assertEqual( len( cs ), 2 )
self.assertEqual( cs[1], ( w, ) )
def testHighlighting( self ) :
w = TestWidget()
self.assertEqual( w.getHighlighted(), False )
w.setHighlighted( True )
self.assertEqual( w.getHighlighted(), True )
w.setHighlighted( False )
self.assertEqual( w.getHighlighted(), False )
def testWidgetAt( self ) :
with GafferUI.Window() as w1 :
t1 = GafferUI.TextWidget( "hello" )
with GafferUI.Window() as w2 :
t2 = GafferUI.TextWidget( "hello" )
w1.setVisible( True )
w2.setVisible( True )
w1.setPosition( IECore.V2i( 100 ) )
w2.setPosition( IECore.V2i( 300 ) )
self.waitForIdle( 1000 )
self.assertTrue( GafferUI.Widget.widgetAt( w1.bound().center() ) is t1 )
self.assertTrue( GafferUI.Widget.widgetAt( w2.bound().center() ) is t2 )
self.assertTrue( GafferUI.Widget.widgetAt( w1.bound().center(), widgetType=GafferUI.Window ) is w1 )
self.assertTrue( GafferUI.Widget.widgetAt( w2.bound().center(), widgetType=GafferUI.Window ) is w2 )
def testMousePosition( self ) :
w = GafferUI.Window( borderWidth = 8 )
b = GafferUI.Button()
w.setChild( b )
w.setVisible( True )
w.setPosition( IECore.V2i( 100 ) )
self.waitForIdle( 1000 )
mouseGlobal = GafferUI.Widget.mousePosition()
mouseLocal = GafferUI.Widget.mousePosition( relativeTo = b )
self.assertEqual( mouseGlobal, mouseLocal + b.bound().min )
if __name__ == "__main__":
unittest.main()
| bsd-3-clause | -3,350,717,458,281,964,500 | 28.290023 | 150 | 0.668805 | false |
Mhynlo/SickRage | sickbeard/providers/cpasbien.py | 1 | 4616 | # coding=utf-8
# Author: Guillaume Serre <[email protected]>
#
# URL: https://sickrage.github.io
#
# This file is part of SickRage.
#
# SickRage is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# SickRage is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with SickRage. If not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
import re
from sickbeard import logger, tvcache
from sickbeard.bs4_parser import BS4Parser
from sickrage.helper.common import convert_size, try_int
from sickrage.providers.torrent.TorrentProvider import TorrentProvider
class CpasbienProvider(TorrentProvider):
def __init__(self):
TorrentProvider.__init__(self, "Cpasbien")
self.public = True
self.minseed = None
self.minleech = None
self.url = "http://www.cpasbien.cm"
self.proper_strings = ['PROPER', 'REPACK']
self.cache = tvcache.TVCache(self)
def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals
results = []
for mode in search_strings:
items = []
logger.log(u"Search Mode: {0}".format(mode), logger.DEBUG)
for search_string in search_strings[mode]:
if mode == 'Season':
search_string = re.sub(ur'(.*)S0?', ur'\1Saison ', search_string)
if mode != 'RSS':
logger.log(u"Search string: {0}".format
(search_string.decode("utf-8")), logger.DEBUG)
search_url = self.url + '/recherche/' + search_string.replace('.', '-').replace(' ', '-') + '.html,trie-seeds-d'
else:
search_url = self.url + '/view_cat.php?categorie=series&trie=date-d'
data = self.get_url(search_url, returns='text')
if not data:
continue
with BS4Parser(data, 'html5lib') as html:
torrent_rows = html(class_=re.compile('ligne[01]'))
for result in torrent_rows:
try:
title = result.find(class_="titre").get_text(strip=True).replace("HDTV", "HDTV x264-CPasBien")
title = re.sub(r' Saison', ' Season', title, flags=re.I)
tmp = result.find("a")['href'].split('/')[-1].replace('.html', '.torrent').strip()
download_url = (self.url + '/telechargement/{0}'.format(tmp))
if not all([title, download_url]):
continue
seeders = try_int(result.find(class_="up").get_text(strip=True))
leechers = try_int(result.find(class_="down").get_text(strip=True))
if seeders < self.minseed or leechers < self.minleech:
if mode != 'RSS':
logger.log(u"Discarding torrent because it doesn't meet the minimum seeders or leechers: {0} (S:{1} L:{2})".format
(title, seeders, leechers), logger.DEBUG)
continue
torrent_size = result.find(class_="poid").get_text(strip=True)
units = ['o', 'Ko', 'Mo', 'Go', 'To', 'Po']
size = convert_size(torrent_size, units=units) or -1
item = {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers, 'hash': ''}
if mode != 'RSS':
logger.log(u"Found result: {0} with {1} seeders and {2} leechers".format(title, seeders, leechers), logger.DEBUG)
items.append(item)
except StandardError:
continue
# For each search mode sort all the items by seeders if available
items.sort(key=lambda d: try_int(d.get('seeders', 0)), reverse=True)
results += items
return results
provider = CpasbienProvider()
| gpl-3.0 | -5,184,486,897,737,168,000 | 42.140187 | 150 | 0.543761 | false |
ProfHoekstra/bluesky | plugins/example.py | 1 | 3455 | """ BlueSky plugin template. The text you put here will be visible
in BlueSky as the description of your plugin. """
from random import randint
import numpy as np
# Import the global bluesky objects. Uncomment the ones you need
from bluesky import core, stack, traf #, settings, navdb, sim, scr, tools
### Initialization function of your plugin. Do not change the name of this
### function, as it is the way BlueSky recognises this file as a plugin.
def init_plugin():
''' Plugin initialisation function. '''
# Instantiate our example entity
example = Example()
# Configuration parameters
config = {
# The name of your plugin
'plugin_name': 'EXAMPLE',
# The type of this plugin. For now, only simulation plugins are possible.
'plugin_type': 'sim',
}
# init_plugin() should always return a configuration dict.
return config
### Entities in BlueSky are objects that are created only once (called singleton)
### which implement some traffic or other simulation functionality.
### To define an entity that ADDS functionality to BlueSky, create a class that
### inherits from bluesky.core.Entity.
### To replace existing functionality in BlueSky, inherit from the class that
### provides the original implementation (see for example the asas/eby plugin).
class Example(core.Entity):
''' Example new entity object for BlueSky. '''
def __init__(self):
super().__init__()
# All classes deriving from Entity can register lists and numpy arrays
# that hold per-aircraft data. This way, their size is automatically
# updated when aircraft are created or deleted in the simulation.
with self.settrafarrays():
self.npassengers = np.array([])
def create(self, n=1):
''' This function gets called automatically when new aircraft are created. '''
# Don't forget to call the base class create when you reimplement this function!
super().create(n)
# After base creation we can change the values in our own states for the new aircraft
self.npassengers[-n:] = [randint(0, 150) for _ in range(n)]
# Functions that need to be called periodically can be indicated to BlueSky
# with the timed_function decorator
@core.timed_function(name='example', dt=5)
def update(self):
''' Periodic update function for our example entity. '''
stack.stack('ECHO Example update: creating a random aircraft')
stack.stack('MCRE 1')
# You can create new stack commands with the stack.command decorator.
# By default, the stack command name is set to the function name.
# The default argument type is a case-sensitive word. You can indicate different
# types using argument annotations. This is done in the below function:
# - The acid argument is a BlueSky-specific argument with type 'acid'.
# This converts callsign to the corresponding index in the traffic arrays.
# - The count argument is a regular int.
@stack.command
def passengers(self, acid: 'acid', count: int = -1):
''' Set the number of passengers on aircraft 'acid' to 'count'. '''
if count < 0:
return True, f'Aircraft {traf.id[acid]} currently has {self.npassengers[acid]} passengers on board.'
self.npassengers[acid] = count
return True, f'The number of passengers on board {traf.id[acid]} is set to {count}.'
| gpl-3.0 | 8,750,351,373,721,328,000 | 46.328767 | 112 | 0.684515 | false |
bdcht/amoco | amoco/arch/x86/utils.py | 1 | 4777 | # -*- coding: utf-8 -*-
# This code is part of Amoco
# Copyright (C) 2014 Axel Tillequin ([email protected])
# published under GPLv2 license
# spec_xxx files are providers for instruction objects.
from amoco.arch.x86 import env
from amoco.arch.core import *
# for ia32 arch we want some specialized 'modrm' format
# so we redefine ispec decorator here to allow /0-/7 and /r
# tokens in the spec format, following the Intel doc to
# indicate how ModR/M byte should be used :
class ispec_ia32(ispec):
def __init__(self, format, **kargs):
n = format.find("/")
if 0 < n < len(format) - 1:
c = format[n + 1]
if c == "r":
f = format.replace("/r", "RM(3) REG(3) Mod(2) ~data(*)")
else:
f = format.replace(
"/%c" % c, "RM(3) %s Mod(2) ~data(*)" % Bits(int(c, 8), 3)
)
else:
f = format
ispec.__init__(self, f, **kargs)
# read ModR/M + SIB values and update obj:
def getModRM(obj, Mod, RM, data):
opdsz = obj.misc["opdsz"] or env.internals["mode"]
adrsz = obj.misc["adrsz"] or env.internals["mode"]
seg = obj.misc["segreg"]
# r/16/32 case:
if Mod == 0b11:
op1 = env.getreg(RM, opdsz)
return op1, data
# 32-bit SIB cases:
if adrsz == 32 and RM == 0b100:
# read SIB byte in data:
if data.size < 8:
raise InstructionError(obj)
sib, data = data[0:8], data[8 : data.size]
# add sib byte:
obj.bytes += pack(sib)
# decode base & scaled index
b = env.getreg(sib[0:3].int(), adrsz)
i = env.getreg(sib[3:6].int(), adrsz)
ss = 1 << (sib[6:8].int())
s = i * ss if not i.ref in ("esp", "sp") else 0
else:
s = 0
if adrsz == 32:
b = env.getreg(RM, adrsz)
else:
b = (
env.bx + env.si,
env.bx + env.di,
env.bp + env.si,
env.bp + env.di,
env.si,
env.di,
env.bp,
env.bx,
)[RM]
# check [disp16/32] case:
if (b is env.ebp or b is env.bp) and Mod == 0:
Mod = 0b10
bs = s + env.cst(0, adrsz)
elif s == 0:
bs = b
elif env.internals.get("keep_order"):
# Instead of doing bs = b+s, which will reorder arguments, we do
# the addition manually, and change 'prop' so the many future calls
# to 'simplify' does not reorder the arguments
from amoco.cas import expressions
bs = expressions.op("+", b, s)
bs.prop |= 16
else:
bs = b + s
# now read displacement bytes:
if Mod == 0b00:
d = 0
elif Mod == 0b01:
if data.size < 8:
raise InstructionError(obj)
d = data[0:8]
data = data[8 : data.size]
obj.bytes += pack(d)
d = d.signextend(adrsz).int(-1)
elif Mod == 0b10:
if data.size < adrsz:
raise InstructionError(obj)
d = data[0:adrsz]
obj.bytes += pack(d)
data = data[adrsz : data.size]
d = d.int(-1)
if bs._is_cst and bs.v == 0x0:
bs.size = adrsz
bs.v = d & bs.mask
d = 0
return env.mem(bs, opdsz, seg, d), data
# Condition codes:
CONDITION_CODES = {
0x0: ("O", (env.of == 1)),
0x1: ("NO", (env.of == 0)),
0x2: ("B/NAE/C", (env.cf == 1)),
0x3: ("NB/AE/NC", (env.cf == 0)),
0x4: ("Z/E", (env.zf == 1)),
0x5: ("NZ/NE", (env.zf == 0)),
0x6: ("BE/NA", (env.cf == 1) | (env.zf == 1)),
0x7: ("NBE/A", (env.cf == 0) & (env.zf == 0)),
0x8: ("S", (env.sf == 1)),
0x9: ("NS", (env.sf == 0)),
0xA: ("P/PE", (env.pf == 1)),
0xB: ("NP/PO", (env.pf == 0)),
0xC: ("L/NGE", (env.sf != env.of)),
0xD: ("NL/GE", (env.sf == env.of)),
0xE: ("LE/NG", (env.zf == 1) | (env.sf != env.of)),
0xF: ("NLE/G", (env.zf == 0) & (env.sf == env.of)),
}
def do_nothing(obj):
pass
def set_opdsz_128(obj):
obj.misc["opdsz"] = 128
def set_opdsz_64(obj):
obj.misc["opdsz"] = 64
def set_opdsz_32(obj):
obj.misc["opdsz"] = 32
def check_f2(obj, f=do_nothing):
if obj.misc["pfx"] and obj.misc["pfx"][0] == "repne":
obj.misc["pfx"][0] = None
f(obj)
return True
return False
def check_f3(obj, f=do_nothing):
if obj.misc["pfx"] and obj.misc["pfx"][0] == "rep":
obj.misc["pfx"][0] = None
f(obj)
return True
return False
def check_66(obj, f=do_nothing):
if obj.misc["opdsz"] == 16:
f(obj)
return True
return False
def check_nopfx(obj, f=do_nothing):
if obj.misc["pfx"] is None:
f(obj)
return True
return False
| gpl-2.0 | 6,701,555,174,954,964,000 | 26.454023 | 78 | 0.494871 | false |
davy39/eric | E5Network/E5NetworkMonitor.py | 1 | 13678 | # -*- coding: utf-8 -*-
# Copyright (c) 2009 - 2014 Detlev Offenbach <[email protected]>
#
"""
Module implementing a network monitor dialog.
"""
from __future__ import unicode_literals
from PyQt5.QtCore import Qt, QAbstractTableModel, QModelIndex, QUrl, \
QSortFilterProxyModel
from PyQt5.QtGui import QStandardItemModel
from PyQt5.QtWidgets import QDialog
from PyQt5.QtNetwork import QNetworkRequest, QNetworkAccessManager
from .Ui_E5NetworkMonitor import Ui_E5NetworkMonitor
class E5NetworkRequest(object):
"""
Class for storing all data related to a specific request.
"""
def __init__(self):
"""
Constructor
"""
self.op = -1
self.request = None
self.reply = None
self.response = ""
self.length = 0
self.contentType = ""
self.info = ""
self.replyHeaders = [] # list of tuple of two items
class E5NetworkMonitor(QDialog, Ui_E5NetworkMonitor):
"""
Class implementing a network monitor dialog.
"""
_monitor = None
@classmethod
def instance(cls, networkAccessManager):
"""
Class method to get a reference to our singleton.
@param networkAccessManager reference to the network access manager
(QNetworkAccessManager)
@return reference to the network monitor singleton (E5NetworkMonitor)
"""
if cls._monitor is None:
cls._monitor = E5NetworkMonitor(networkAccessManager)
return cls._monitor
@classmethod
def closeMonitor(cls):
"""
Class method to close the monitor dialog.
"""
if cls._monitor is not None:
cls._monitor.close()
def __init__(self, networkAccessManager, parent=None):
"""
Constructor
@param networkAccessManager reference to the network access manager
(QNetworkAccessManager)
@param parent reference to the parent widget (QWidget)
"""
super(E5NetworkMonitor, self).__init__(parent)
self.setupUi(self)
self.__requestHeaders = QStandardItemModel(self)
self.__requestHeaders.setHorizontalHeaderLabels(
[self.tr("Name"), self.tr("Value")])
self.requestHeadersList.setModel(self.__requestHeaders)
self.requestHeadersList.horizontalHeader().setStretchLastSection(True)
self.requestHeadersList.doubleClicked.connect(self.__showHeaderDetails)
self.__replyHeaders = QStandardItemModel(self)
self.__replyHeaders.setHorizontalHeaderLabels(
[self.tr("Name"), self.tr("Value")])
self.responseHeadersList.setModel(self.__replyHeaders)
self.responseHeadersList.horizontalHeader().setStretchLastSection(True)
self.responseHeadersList.doubleClicked.connect(
self.__showHeaderDetails)
self.requestsList.horizontalHeader().setStretchLastSection(True)
self.requestsList.verticalHeader().setMinimumSectionSize(-1)
self.__proxyModel = QSortFilterProxyModel(self)
self.__proxyModel.setFilterKeyColumn(-1)
self.searchEdit.textChanged.connect(
self.__proxyModel.setFilterFixedString)
self.removeButton.clicked.connect(self.requestsList.removeSelected)
self.removeAllButton.clicked.connect(self.requestsList.removeAll)
self.__model = E5RequestModel(networkAccessManager, self)
self.__proxyModel.setSourceModel(self.__model)
self.requestsList.setModel(self.__proxyModel)
self.__proxyModel.rowsInserted.connect(
self.requestsList.scrollToBottom)
self.requestsList.selectionModel()\
.currentChanged[QModelIndex, QModelIndex]\
.connect(self.__currentChanged)
fm = self.fontMetrics()
em = fm.width("m")
self.requestsList.horizontalHeader().resizeSection(0, em * 5)
self.requestsList.horizontalHeader().resizeSection(1, em * 20)
self.requestsList.horizontalHeader().resizeSection(3, em * 5)
self.requestsList.horizontalHeader().resizeSection(4, em * 15)
self.__headersDlg = None
def closeEvent(self, evt):
"""
Protected method called upon closing the dialog.
@param evt reference to the close event object (QCloseEvent)
"""
self.__class__._monitor = None
super(E5NetworkMonitor, self).closeEvent(evt)
def reject(self):
"""
Public slot to close the dialog with a Reject status.
"""
self.__class__._monitor = None
super(E5NetworkMonitor, self).reject()
def __currentChanged(self, current, previous):
"""
Private slot to handle a change of the current index.
@param current new current index (QModelIndex)
@param previous old current index (QModelIndex)
"""
self.__requestHeaders.setRowCount(0)
self.__replyHeaders.setRowCount(0)
if not current.isValid():
return
row = self.__proxyModel.mapToSource(current).row()
req = self.__model.requests[row].request
for header in req.rawHeaderList():
self.__requestHeaders.insertRows(0, 1, QModelIndex())
self.__requestHeaders.setData(
self.__requestHeaders.index(0, 0),
str(header, "utf-8"))
self.__requestHeaders.setData(
self.__requestHeaders.index(0, 1),
str(req.rawHeader(header), "utf-8"))
self.__requestHeaders.item(0, 0).setFlags(
Qt.ItemIsSelectable | Qt.ItemIsEnabled)
self.__requestHeaders.item(0, 1).setFlags(
Qt.ItemIsSelectable | Qt.ItemIsEnabled)
for header in self.__model.requests[row].replyHeaders:
self.__replyHeaders.insertRows(0, 1, QModelIndex())
self.__replyHeaders.setData(
self.__replyHeaders.index(0, 0),
header[0])
self.__replyHeaders.setData(
self.__replyHeaders.index(0, 1),
header[1])
self.__replyHeaders.item(0, 0).setFlags(
Qt.ItemIsSelectable | Qt.ItemIsEnabled)
self.__replyHeaders.item(0, 1).setFlags(
Qt.ItemIsSelectable | Qt.ItemIsEnabled)
def __showHeaderDetails(self, index):
"""
Private slot to show a dialog with the header details.
@param index index of the entry to show (QModelIndex)
"""
if not index.isValid():
return
headerList = self.sender()
if headerList is None:
return
row = index.row()
name = headerList.model().data(headerList.model().index(row, 0))
value = headerList.model().data(headerList.model().index(row, 1))
if self.__headersDlg is None:
from .E5NetworkHeaderDetailsDialog import \
E5NetworkHeaderDetailsDialog
self.__headersDlg = E5NetworkHeaderDetailsDialog(self)
self.__headersDlg.setData(name, value)
self.__headersDlg.show()
class E5RequestModel(QAbstractTableModel):
"""
Class implementing a model storing request objects.
"""
def __init__(self, networkAccessManager, parent=None):
"""
Constructor
@param networkAccessManager reference to the network access manager
(QNetworkAccessManager)
@param parent reference to the parent object (QObject)
"""
super(E5RequestModel, self).__init__(parent)
self.__headerData = [
self.tr("Method"),
self.tr("Address"),
self.tr("Response"),
self.tr("Length"),
self.tr("Content Type"),
self.tr("Info"),
]
self.__operations = {
QNetworkAccessManager.HeadOperation: "HEAD",
QNetworkAccessManager.GetOperation: "GET",
QNetworkAccessManager.PutOperation: "PUT",
QNetworkAccessManager.PostOperation: "POST",
}
self.requests = []
networkAccessManager.requestCreated.connect(self.__requestCreated)
def __requestCreated(self, operation, request, reply):
"""
Private slot handling the creation of a network request.
@param operation network operation (QNetworkAccessManager.Operation)
@param request reference to the request object (QNetworkRequest)
@param reply reference to the reply object(QNetworkReply)
"""
req = E5NetworkRequest()
req.op = operation
req.request = QNetworkRequest(request)
req.reply = reply
self.__addRequest(req)
def __addRequest(self, req):
"""
Private method to add a request object to the model.
@param req reference to the request object (E5NetworkRequest)
"""
self.beginInsertRows(
QModelIndex(), len(self.requests), len(self.requests))
self.requests.append(req)
req.reply.finished.connect(self.__addReply)
self.endInsertRows()
def __addReply(self):
"""
Private slot to add the reply data to the model.
"""
reply = self.sender()
if reply is None:
return
offset = len(self.requests) - 1
while offset >= 0:
if self.requests[offset].reply is reply:
break
offset -= 1
if offset < 0:
return
# save the reply header data
for header in reply.rawHeaderList():
self.requests[offset].replyHeaders.append((
str(header, "utf-8"), str(reply.rawHeader(header), "utf-8")))
# save reply info to be displayed
status = reply.attribute(QNetworkRequest.HttpStatusCodeAttribute) or 0
reason = \
reply.attribute(QNetworkRequest.HttpReasonPhraseAttribute) or ""
self.requests[offset].response = "{0:d} {1}".format(status, reason)
self.requests[offset].length = \
reply.header(QNetworkRequest.ContentLengthHeader)
self.requests[offset].contentType = \
reply.header(QNetworkRequest.ContentTypeHeader)
if status == 302:
target = reply.attribute(
QNetworkRequest.RedirectionTargetAttribute) or QUrl()
self.requests[offset].info = \
self.tr("Redirect: {0}").format(target.toString())
def headerData(self, section, orientation, role=Qt.DisplayRole):
"""
Public method to get header data from the model.
@param section section number (integer)
@param orientation orientation (Qt.Orientation)
@param role role of the data to retrieve (integer)
@return requested data
"""
if orientation == Qt.Horizontal and role == Qt.DisplayRole:
return self.__headerData[section]
return QAbstractTableModel.headerData(self, section, orientation, role)
def data(self, index, role=Qt.DisplayRole):
"""
Public method to get data from the model.
@param index index to get data for (QModelIndex)
@param role role of the data to retrieve (integer)
@return requested data
"""
if index.row() < 0 or index.row() >= len(self.requests):
return None
if role == Qt.DisplayRole or role == Qt.EditRole:
col = index.column()
if col == 0:
try:
return self.__operations[self.requests[index.row()].op]
except KeyError:
return self.tr("Unknown")
elif col == 1:
return self.requests[index.row()].request.url().toEncoded()
elif col == 2:
return self.requests[index.row()].response
elif col == 3:
return self.requests[index.row()].length
elif col == 4:
return self.requests[index.row()].contentType
elif col == 5:
return self.requests[index.row()].info
return None
def columnCount(self, parent):
"""
Public method to get the number of columns of the model.
@param parent parent index (QModelIndex)
@return number of columns (integer)
"""
if parent.column() > 0:
return 0
else:
return len(self.__headerData)
def rowCount(self, parent):
"""
Public method to get the number of rows of the model.
@param parent parent index (QModelIndex)
@return number of columns (integer)
"""
if parent.isValid():
return 0
else:
return len(self.requests)
def removeRows(self, row, count, parent):
"""
Public method to remove entries from the model.
@param row start row (integer)
@param count number of rows to remove (integer)
@param parent parent index (QModelIndex)
@return flag indicating success (boolean)
"""
if parent.isValid():
return False
lastRow = row + count - 1
self.beginRemoveRows(parent, row, lastRow)
del self.requests[row:lastRow + 1]
self.endRemoveRows()
return True
| gpl-3.0 | 7,615,740,484,586,443,000 | 34.343669 | 79 | 0.588609 | false |
pettai/capirca | tests/packetfilter_test.py | 1 | 22997 | # Copyright 2013 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Unittest for packetfilter rendering module."""
import datetime
import unittest
from lib import aclgenerator
from lib import nacaddr
from lib import naming
from lib import packetfilter
from lib import policy
import mox
GOOD_HEADER = """
header {
comment:: "this is a test acl"
target:: packetfilter test-filter mixed
}
"""
GOOD_HEADER_STATELESS = """
header {
comment:: "this is a stateless test acl"
target:: packetfilter test-filter mixed nostate
}
"""
GOOD_HEADER_INET4 = """
header {
comment:: "this is a test acl"
target:: packetfilter test-filter
}
"""
GOOD_HEADER_INET6 = """
header {
comment:: "this is a test acl"
target:: packetfilter test-filter inet6
}
"""
GOOD_HEADER_DIRECTIONAL = """
header {
comment:: "this is a test acl"
target:: packetfilter test-filter out mixed
}
"""
GOOD_HEADER_DIRECTIONAL_STATELESS = """
header {
comment:: "this is a test acl"
target:: packetfilter test-filter out mixed nostate
}
"""
GOOD_TERM_ICMP = """
term good-term-icmp {
protocol:: icmp
action:: accept
}
"""
GOOD_TERM_ICMP_TYPES = """
term good-term-icmp-types {
protocol:: icmp
icmp-type:: echo-reply unreachable time-exceeded
action:: deny
}
"""
GOOD_TERM_ICMPV6 = """
term good-term-icmpv6 {
protocol:: icmpv6
action:: accept
}
"""
BAD_TERM_ICMP = """
term test-icmp {
icmp-type:: echo-request echo-reply
action:: accept
}
"""
BAD_TERM_ACTION = """
term bad-term-action {
protocol:: icmp
action:: reject-with-tcp-rst
}
"""
GOOD_TERM_TCP = """
term good-term-tcp {
comment:: "Test term 1"
destination-address:: PROD_NETWORK
destination-port:: SMTP
protocol:: tcp
action:: accept
}
"""
DENY_TERM_TCP = """
term deny-term-tcp {
protocol:: tcp
action:: deny
}
"""
GOOD_TERM_LOG = """
term good-term-log {
protocol:: tcp
logging:: true
action:: accept
}
"""
EXPIRED_TERM = """
term expired_test {
expiration:: 2000-1-1
action:: deny
}
"""
EXPIRED_TERM2 = """
term expired_test2 {
expiration:: 2015-01-01
action:: deny
}
"""
EXPIRING_TERM = """
term is_expiring {
expiration:: %s
action:: accept
}
"""
MULTIPLE_PROTOCOLS_TERM = """
term multi-proto {
protocol:: tcp udp icmp
action:: accept
}
"""
NEXT_TERM = """
term next {
action:: next
}
"""
NEXT_LOG_TERM = """
term next-log {
logging:: true
action:: next
}
"""
PORTRANGE_TERM = """
term portrange {
protocol:: tcp
action:: accept
destination-port:: HIGH_PORTS
}
"""
FLAGS_TERM = """
term flags {
protocol:: tcp
action:: accept
option:: syn fin
}
"""
INVALID_FLAGS_TERM = """
term invalid-flags {
protocol:: udp
action:: accept
option:: syn fin
}
"""
MULTILINE_COMMENT = """
term multiline-comment {
comment:: "This is a
multiline comment"
protocol:: tcp
action:: accept
}
"""
TCP_STATE_TERM = """
term tcp-established-only {
protocol:: tcp
option:: established
action:: accept
}
"""
TCP_GOOD_ESTABLISHED_TERM = """
term tcp-established-good {
protocol:: tcp
option:: established
action:: accept
}
"""
TCP_BAD_ESTABLISHED_TERM = """
term tcp-established-bad {
protocol:: tcp
option:: established syn
action:: accept
}
"""
UDP_ESTABLISHED_TERM = """
term udp-established {
protocol:: udp
option:: established
action:: accept
}
"""
MULTIPLE_NAME_TERM = """
term multiple-name {
protocol:: tcp
destination-address:: PROD_NETWORK
destination-port:: SMTP
source-address:: CORP_INTERNAL
action:: accept
}
"""
LONG_NAME_TERM = """
term multiple-name {
protocol:: tcp
destination-address:: PROD_NETWORK_EXTREAMLY_LONG_VERY_NO_GOOD_NAME
destination-port:: SMTP
action:: accept
}
"""
DUPLICATE_LONG_NAME_TERM = """
term multiple-name {
protocol:: tcp
destination-address:: PROD_NETWORK_EXTREAMLY_LONG_VERY_NO_GOOD_NAME
destination-port:: SMTP
source-address:: PROD_NETWORK_EXTREAMLY_LONG_VERY_GOOD_NAME
action:: accept
}
"""
BAD_PROTO_TERM = """
term bad-proto {
protocol:: hop-by-hop
action:: accept
}
"""
# Print a info message when a term is set to expire in that many weeks.
# This is normally passed from command line.
EXP_INFO = 2
class PacketFilterTest(unittest.TestCase):
def setUp(self):
self.mox = mox.Mox()
self.naming = self.mox.CreateMock(naming.Naming)
def tearDown(self):
self.mox.VerifyAll()
self.mox.UnsetStubs()
self.mox.ResetAll()
def testTcp(self):
ip = nacaddr.IP('10.0.0.0/8')
ip.parent_token = 'PROD_NETWORK'
self.naming.GetNetAddr('PROD_NETWORK').AndReturn([ip])
self.naming.GetServiceByProto('SMTP', 'tcp').AndReturn(['25'])
self.mox.ReplayAll()
acl = packetfilter.PacketFilter(policy.ParsePolicy(
GOOD_HEADER + GOOD_TERM_TCP, self.naming), EXP_INFO)
result = str(acl)
self.failUnless('# term good-term-tcp' in result,
'did not find comment for good-term-tcp')
self.failUnless(
'pass quick proto { tcp } from { any } to { <PROD_NETWORK> } port '
'{ 25 }' in result,
'did not find actual term for good-term-tcp')
def testLog(self):
self.mox.ReplayAll()
acl = packetfilter.PacketFilter(policy.ParsePolicy(
GOOD_HEADER + GOOD_TERM_LOG, self.naming), EXP_INFO)
result = str(acl)
self.failUnless('# term good-term-log' in result,
'did not find comment for good-term-log')
self.failUnless(
'pass quick log proto { tcp } from { any } to { any } flags S/SA '
'keep state\n'
in result,
'did not find actual term for good-term-log')
def testIcmp(self):
self.mox.ReplayAll()
acl = packetfilter.PacketFilter(policy.ParsePolicy(
GOOD_HEADER + GOOD_TERM_ICMP, self.naming), EXP_INFO)
result = str(acl)
self.failUnless('# term good-term-icmp' in result,
'did not find comment for good-term-icmp')
self.failUnless(
'pass quick proto { icmp } from { any } to { any } keep state\n'
in result,
'did not find actual term for good-term-icmp')
def testIcmpTypes(self):
self.mox.ReplayAll()
acl = packetfilter.PacketFilter(policy.ParsePolicy(
GOOD_HEADER + GOOD_TERM_ICMP_TYPES, self.naming), EXP_INFO)
result = str(acl)
self.failUnless('# term good-term-icmp-types' in result,
'did not find comment for good-term-icmp-types')
self.failUnless(
'block drop quick proto { icmp } from { any } to { any } '
'icmp-type { 0, 3, 11 }' in result,
'did not find actual term for good-term-icmp-types')
def testIcmpv6(self):
self.mox.ReplayAll()
acl = packetfilter.PacketFilter(policy.ParsePolicy(
GOOD_HEADER + GOOD_TERM_ICMPV6, self.naming), EXP_INFO)
result = str(acl)
self.failUnless('# term good-term-icmpv6' in result,
'did not find comment for good-term-icmpv6')
self.failUnless(
'pass quick proto { ipv6-icmp } from { any } to { any } keep state\n'
in result,
'did not find actual term for good-term-icmpv6')
def testBadIcmp(self):
self.mox.ReplayAll()
acl = packetfilter.PacketFilter(policy.ParsePolicy(
GOOD_HEADER + BAD_TERM_ICMP, self.naming), EXP_INFO)
self.assertRaises(aclgenerator.UnsupportedFilterError, str, acl)
def testExpiredTerm(self):
self.mox.StubOutWithMock(packetfilter.logging, 'warn')
# create mock to ensure we warn about expired terms being skipped
packetfilter.logging.warn('WARNING: Term %s in policy %s is expired and '
'will not be rendered.', 'expired_test',
'test-filter')
self.mox.ReplayAll()
packetfilter.PacketFilter(policy.ParsePolicy(
GOOD_HEADER + EXPIRED_TERM, self.naming), EXP_INFO)
def testExpiredTerm2(self):
self.mox.StubOutWithMock(packetfilter.logging, 'warn')
# create mock to ensure we warn about expired terms being skipped
packetfilter.logging.warn('WARNING: Term %s in policy %s is expired and '
'will not be rendered.', 'expired_test2',
'test-filter')
self.mox.ReplayAll()
packetfilter.PacketFilter(policy.ParsePolicy(
GOOD_HEADER + EXPIRED_TERM2, self.naming), EXP_INFO)
def testExpiringTerm(self):
self.mox.StubOutWithMock(packetfilter.logging, 'info')
# create mock to ensure we inform about expiring terms
packetfilter.logging.info('INFO: Term %s in policy %s expires in '
'less than two weeks.', 'is_expiring',
'test-filter')
self.mox.ReplayAll()
exp_date = datetime.date.today() + datetime.timedelta(weeks=EXP_INFO)
packetfilter.PacketFilter(policy.ParsePolicy(
GOOD_HEADER + EXPIRING_TERM % exp_date.strftime('%Y-%m-%d'),
self.naming), EXP_INFO)
def testBadAction(self):
self.mox.ReplayAll()
acl = packetfilter.PacketFilter(policy.ParsePolicy(
GOOD_HEADER + BAD_TERM_ACTION, self.naming), EXP_INFO)
self.assertRaises(aclgenerator.UnsupportedFilterError, str, acl)
def testMultiprotocol(self):
self.mox.ReplayAll()
acl = packetfilter.PacketFilter(policy.ParsePolicy(
GOOD_HEADER + MULTIPLE_PROTOCOLS_TERM, self.naming), EXP_INFO)
result = str(acl)
self.failUnless('# term multi-proto' in result,
'did not find comment for multi-proto')
self.failUnless(
'pass quick proto { tcp udp icmp } from { any } to { any } keep state\n'
in result,
'did not find actual term for multi-proto')
def testNextTerm(self):
self.mox.ReplayAll()
acl = packetfilter.PacketFilter(policy.ParsePolicy(
GOOD_HEADER + NEXT_TERM, self.naming), EXP_INFO)
result = str(acl)
self.failUnless('# term next' in result,
'did not find comment for next')
self.failUnless(
'pass from { any } to { any } flags S/SA keep state\n' in result,
'did not find actual term for next-term')
def testNextLogTerm(self):
self.mox.ReplayAll()
acl = packetfilter.PacketFilter(policy.ParsePolicy(
GOOD_HEADER + NEXT_LOG_TERM, self.naming), EXP_INFO)
result = str(acl)
self.failUnless('# term next-log' in result,
'did not find comment for next-log')
self.failUnless(
'pass log from { any } to { any } flags S/SA keep state\n' in result,
'did not find actual term for next-log-term')
def testPortRange(self):
self.naming.GetServiceByProto('HIGH_PORTS', 'tcp').AndReturn(
['12345-12354'])
self.mox.ReplayAll()
acl = packetfilter.PacketFilter(policy.ParsePolicy(
GOOD_HEADER + PORTRANGE_TERM, self.naming), EXP_INFO)
result = str(acl)
self.failUnless('# term portrange' in result,
'did not find comment for portrange')
self.failUnless(
'pass quick proto { tcp } from { any } to { any } '
'port { 12345:12354 }' in result,
'did not find actual term for portrange')
def testFlags(self):
self.mox.ReplayAll()
acl = packetfilter.PacketFilter(policy.ParsePolicy(
GOOD_HEADER + FLAGS_TERM, self.naming), EXP_INFO)
result = str(acl)
self.failUnless('# term flags' in result,
'did not find comment for flags')
self.failUnless(
'pass quick proto { tcp } from { any } to { any } '
'flags SF/SF' in result,
'did not find actual term for flags')
def testInvalidFlags(self):
self.mox.ReplayAll()
acl = packetfilter.PacketFilter(policy.ParsePolicy(
GOOD_HEADER + INVALID_FLAGS_TERM, self.naming), EXP_INFO)
self.assertRaises(aclgenerator.UnsupportedFilterError, str, acl)
def testMultilineComment(self):
self.mox.ReplayAll()
acl = packetfilter.PacketFilter(policy.ParsePolicy(
GOOD_HEADER + MULTILINE_COMMENT, self.naming), EXP_INFO)
result = str(acl)
self.failUnless('# term multiline-comment' in result,
'did not find comment for multiline-comment')
self.failUnless('# This is a\n# multiline comment' in result,
'did not find multiline comment for multiline-comment')
def testStateless(self):
ip = nacaddr.IP('10.0.0.0/8')
ip.parent_token = 'PROD_NETWORK'
self.naming.GetNetAddr('PROD_NETWORK').AndReturn([ip])
self.naming.GetServiceByProto('SMTP', 'tcp').AndReturn(['25'])
self.mox.ReplayAll()
acl = packetfilter.PacketFilter(policy.ParsePolicy(
GOOD_HEADER_STATELESS + GOOD_TERM_TCP, self.naming), EXP_INFO)
result = str(acl)
self.failUnless('# term good-term-tcp' in result,
'did not find comment for good-term-tcp')
self.failUnless(
'pass quick proto { tcp } from { any } to { <PROD_NETWORK> } port '
'{ 25 } no state' in result,
'did not find actual term for good-term-tcp')
def testInet4(self):
self.mox.ReplayAll()
acl = packetfilter.PacketFilter(policy.ParsePolicy(
GOOD_HEADER_INET4 + GOOD_TERM_LOG, self.naming), EXP_INFO)
result = str(acl)
self.failUnless('# term good-term-log' in result,
'did not find comment for good-term-log')
self.failUnless(
'pass quick log inet proto { tcp } from { any } to { any } flags S/SA '
'keep state\n'
in result,
'did not find actual term for good-term-log')
def testInet6(self):
self.mox.ReplayAll()
acl = packetfilter.PacketFilter(policy.ParsePolicy(
GOOD_HEADER_INET6 + GOOD_TERM_LOG, self.naming), EXP_INFO)
result = str(acl)
self.failUnless('# term good-term-log' in result,
'did not find comment for good-term-log')
self.failUnless(
'pass quick log inet6 proto { tcp } from { any } to { any } flags S/SA '
'keep state\n'
in result,
'did not find actual term for good-term-log')
def testDirectional(self):
ip = nacaddr.IP('10.0.0.0/8')
ip.parent_token = 'PROD_NETWORK'
self.naming.GetNetAddr('PROD_NETWORK').AndReturn([ip])
self.naming.GetServiceByProto('SMTP', 'tcp').AndReturn(['25'])
self.mox.ReplayAll()
acl = packetfilter.PacketFilter(policy.ParsePolicy(
GOOD_HEADER_DIRECTIONAL + GOOD_TERM_TCP, self.naming), EXP_INFO)
result = str(acl)
self.failUnless('# term good-term-tcp' in result,
'did not find comment for good-term-tcp')
self.failUnless(
'pass out quick proto { tcp } from { any } to { <PROD_NETWORK> } port '
'{ 25 }' in result,
'did not find actual term for good-term-tcp')
def testMultipleHeader(self):
self.mox.ReplayAll()
acl = packetfilter.PacketFilter(policy.ParsePolicy(
GOOD_HEADER_STATELESS + GOOD_TERM_LOG + GOOD_HEADER_INET6
+ GOOD_TERM_ICMP,
self.naming), EXP_INFO)
result = str(acl)
self.failUnless(
'pass quick log proto { tcp } from { any } to { any } no state'
in result,
'did not find actual term for good-term-log')
self.failUnless(
'pass quick inet6 proto { icmp } from { any } to { any } no state'
in result,
'did not find actual term for good-term-icmp')
def testDirectionalStateless(self):
ip = nacaddr.IP('10.0.0.0/8')
ip.parent_token = 'PROD_NETWORK'
self.naming.GetNetAddr('PROD_NETWORK').AndReturn([ip])
self.naming.GetServiceByProto('SMTP', 'tcp').AndReturn(['25'])
self.mox.ReplayAll()
acl = packetfilter.PacketFilter(policy.ParsePolicy(
GOOD_HEADER_DIRECTIONAL_STATELESS + GOOD_TERM_TCP, self.naming),
EXP_INFO)
result = str(acl)
self.failUnless('# term good-term-tcp' in result,
'did not find comment for good-term-tcp')
self.failUnless(
'pass out quick proto { tcp } from { any } to { <PROD_NETWORK> } port '
'{ 25 } no state' in result,
'did not find actual term for good-term-tcp')
def testStatelessEstablished(self):
self.mox.ReplayAll()
acl = packetfilter.PacketFilter(policy.ParsePolicy(
GOOD_HEADER_STATELESS + TCP_STATE_TERM, self.naming),
EXP_INFO)
result = str(acl)
self.failUnless('# term tcp-established-only' in result,
'did not find comment for tcp-established-only')
self.failUnless(
'pass quick proto { tcp } from { any } to { any } flags A/A no state'
in result,
'did not find actual term for tcp-established-only')
def testBadFlags(self):
self.mox.ReplayAll()
acl = packetfilter.PacketFilter(policy.ParsePolicy(
GOOD_HEADER + TCP_BAD_ESTABLISHED_TERM, self.naming), EXP_INFO)
self.assertRaises(aclgenerator.UnsupportedFilterError, str, acl)
# While "UDP stateless established" seems to be a strange combination it
# actually makes sense: e.g., the state or nostate header is a global
# header directive and indicates whether we do matching on established by
# flags or proper connection tracking, and pf's permissiveness allows things
# like:
# proto { udp, tcp } flags A/A no state'
# whereby the flags only apply to TCP protocol matches. However, the
# following is invalid:
# proto { udp } flags A/A no state'
# check to make sure we don't output the latter for things like:
# target:: packetfilter nostate
# term foo { protocol:: udp option:: established }
def testUdpStatelessEstablished(self):
self.mox.ReplayAll()
acl = packetfilter.PacketFilter(policy.ParsePolicy(
GOOD_HEADER_STATELESS + UDP_ESTABLISHED_TERM, self.naming),
EXP_INFO)
result = str(acl)
self.failUnless('# term udp-established' in result,
'did not find comment for udp-established')
self.failUnless(
'pass quick proto { udp } from { any } to { any } no state'
in result,
'did not find actual term for udp-established')
def testStatefulBlock(self):
self.mox.ReplayAll()
acl = packetfilter.PacketFilter(policy.ParsePolicy(
GOOD_HEADER + DENY_TERM_TCP, self.naming), EXP_INFO)
result = str(acl)
self.failUnless('# term deny-term-tcp' in result,
'did not find comment for udp-established')
self.failUnless(
'block drop quick proto { tcp } from { any } to { any } flags S/SA'
in result,
'did not find actual term for deny-term-tcp')
def testTcpEstablished(self):
self.mox.ReplayAll()
acl = packetfilter.PacketFilter(policy.ParsePolicy(
GOOD_HEADER + TCP_GOOD_ESTABLISHED_TERM, self.naming),
EXP_INFO)
result = str(acl)
self.failUnless('# term tcp-established-good' in result,
'did not find comment for tcp-established-good')
self.failUnless(
'pass quick proto { tcp } from { any } to { any } flags A/A keep state'
in result,
'did not find actual term for udp-established')
def testTableCreation(self):
prod_network = nacaddr.IP('10.0.0.0/8')
prod_network.parent_token = 'PROD_NETWORK'
corp_internal_one = nacaddr.IP('100.96.0.1/11')
corp_internal_one.parent_token = 'CORP_INTERNAL'
corp_internal_two = nacaddr.IP('172.16.0.0/16')
corp_internal_two.parent_token = 'CORP_INTERNAL'
self.naming.GetNetAddr('PROD_NETWORK').AndReturn([prod_network])
self.naming.GetNetAddr('CORP_INTERNAL').AndReturn([corp_internal_one,
corp_internal_two])
self.naming.GetServiceByProto('SMTP', 'tcp').AndReturn(['25'])
self.mox.ReplayAll()
acl = packetfilter.PacketFilter(policy.ParsePolicy(
GOOD_HEADER + MULTIPLE_NAME_TERM, self.naming),
EXP_INFO)
result = str(acl)
self.failUnless(
'table <PROD_NETWORK> {10.0.0.0/8}' in result,
'did not find PROD_NETWORKtable in header')
self.failUnless(
'table <CORP_INTERNAL> {100.96.0.1/11,\\\n'
'172.16.0.0/16}' in result,
'did not find CORP_INTERNAL table in header')
self.failUnless(
'pass quick proto { tcp } from { <CORP_INTERNAL> } to '
'{ <PROD_NETWORK> } port { 25 } flags S/SA keep state'
in result,
'did not find actual term for multiple-name')
def testTableNameShortened(self):
prod_network = nacaddr.IP('10.0.0.0/8')
prod_network.parent_token = 'PROD_NETWORK_EXTREAMLY_LONG_VERY_NO_GOOD_NAME'
self.naming.GetNetAddr(
'PROD_NETWORK_EXTREAMLY_LONG_VERY_NO_GOOD_NAME').AndReturn([prod_network])
self.naming.GetServiceByProto('SMTP', 'tcp').AndReturn(['25'])
self.mox.ReplayAll()
acl = packetfilter.PacketFilter(policy.ParsePolicy(
GOOD_HEADER_DIRECTIONAL + LONG_NAME_TERM, self.naming), EXP_INFO)
result = str(acl)
self.failUnless(
'table <PROD_NETWORK_EXTREAMLY_LONG_VER> {10.0.0.0/8}' in result,
'did not find shortened name in header.')
self.failUnless(
'pass out quick proto { tcp } from { any } to '
'{ <PROD_NETWORK_EXTREAMLY_LONG_VER> } '
'port { 25 } flags S/SA keep state'
in result,
'did not find actual term for multiple-name')
def testTableDuplicateShortNameError(self):
prod_network = nacaddr.IP('10.0.0.0/8')
prod_network.parent_token = 'PROD_NETWORK_EXTREAMLY_LONG_VERY_NO_GOOD_NAME'
prod_network_two = nacaddr.IP('172.0.0.1/8')
prod_network_two.parent_token = 'PROD_NETWORK_EXTREAMLY_LONG_VERY_GOOD_NAME'
self.naming.GetNetAddr(
'PROD_NETWORK_EXTREAMLY_LONG_VERY_NO_GOOD_NAME').AndReturn([prod_network])
self.naming.GetNetAddr(
'PROD_NETWORK_EXTREAMLY_LONG_VERY_GOOD_NAME').AndReturn([prod_network_two])
self.naming.GetServiceByProto('SMTP', 'tcp').AndReturn(['25'])
self.mox.ReplayAll()
self.assertRaises(
packetfilter.DuplicateShortenedTableName,
packetfilter.PacketFilter.__init__,
packetfilter.PacketFilter.__new__(packetfilter.PacketFilter),
policy.ParsePolicy(
GOOD_HEADER_DIRECTIONAL + DUPLICATE_LONG_NAME_TERM, self.naming),
EXP_INFO)
def testBadProtoError(self):
self.mox.ReplayAll()
acl = packetfilter.PacketFilter(policy.ParsePolicy(
GOOD_HEADER + BAD_PROTO_TERM, self.naming), EXP_INFO)
self.assertRaises(packetfilter.UnsupportedProtoError, str, acl)
if __name__ == '__main__':
unittest.main()
| apache-2.0 | -5,017,035,875,669,173,000 | 31.435825 | 83 | 0.643649 | false |
zijistark/zckTools | src/zck/token_codegen.py | 1 | 1078 | #!/usr/bin/python3
import sys
from pathlib import Path
list_scope_path = Path("./list_scope_tokens.txt")
keyword_bit = 13
list_scope_bit = 14
def main():
if len(sys.argv) < 2:
print("Error: Must specify an argument of either 'tokens' or 'emitters'!", file=sys.stderr)
return 1
list_scopes = set()
with list_scope_path.open('r') as f:
for line in f:
line = line.strip()
if line.startswith('#') or len(line) == 0:
continue
list_scopes.add(line)
max_kw_len = max( len(kw) for kw in list_scopes )
if sys.argv[1] == 'tokens':
t_id = (1 << (keyword_bit - 1)) | (1 << (list_scope_bit-1))
for t in sorted(list_scopes):
print(' {:<{width}} = 0x{:4X};'.format(t.upper(), t_id, width=max_kw_len))
t_id += 1
elif sys.argv[1] == 'emitters':
for t in sorted(list_scopes):
print(' {:<{width}} => T_{}(Lexeme);'.format('"' + t + '"', t.upper(), width = max_kw_len + 2))
else:
print("Error: Must specify an argument of either 'tokens' or 'emitters'!", file=sys.stderr)
return 1
return 0
if __name__ == '__main__':
sys.exit(main())
| lgpl-3.0 | -3,612,522,775,626,830,300 | 23.5 | 99 | 0.602968 | false |
nontas/digitrecognition | digitrecognition/data_converter.py | 1 | 12537 | import os
import urllib
import gzip
import numpy as np
import tensorflow as tf
from menpo.image import Image
from menpo.visualize import print_dynamic
from digitrecognition.base import src_dir_path
# MNIST url
SOURCE_URL = 'http://yann.lecun.com/exdb/mnist/'
# MNIST filenames
TRAIN_IMAGES = 'train-images-idx3-ubyte.gz'
TRAIN_LABELS = 'train-labels-idx1-ubyte.gz'
TEST_IMAGES = 't10k-images-idx3-ubyte.gz'
TEST_LABELS = 't10k-labels-idx1-ubyte.gz'
def download(filename, verbose=False):
r"""
Method that downloads the provided filename from SOURCE_URL and
stores it in the data path, if it doesn't already exist.
Parameters
----------
filename : `str`
The filename to download.
verbose : `bool`, optional
If `True`, then the progress will be printed.
Returns
-------
file_path : `pathlib.PosixPath`
The path where the file was stored.
"""
if verbose:
print_dynamic('Downloading {}'.format(filename))
# Path to store data
data_path = src_dir_path() / 'data'
# Check if data path exists, otherwise create it
if not os.path.isdir(str(data_path)):
os.makedirs(str(data_path))
# Check if file exists
file_path = data_path / filename
if not os.path.isfile(str(file_path)):
# It doesn't exist, so download it
urllib.request.urlretrieve(SOURCE_URL + filename,
filename=str(file_path))
# Return the path where the file is stored
return file_path
def _read32(bytestream):
r"""
Read bytes as 32-bit integers.
Parameters
----------
bytestream : `bytes`
The bytes to read.
Returns
-------
array : `array`
The 32-bit int data.
"""
dt = np.dtype(np.uint32).newbyteorder('>')
return np.frombuffer(bytestream.read(4), dtype=dt)[0]
def extract_images(filename, as_images=False, verbose=False):
r"""
Extract images from gzip file.
Parameters
----------
filename : `pathlib.PosixPath`
The gzip file path.
as_images : `bool`, optional
If `True`, then the method returns a list containing a
`menpo.image.Image` object per image. If `False`, then it
returns a numpy array of shape `(n_images, height, width, n_channels)`.
verbose : `bool`, optional
If `True`, then the progress will be printed.
Returns
-------
images : `list` or `array`
The image data.
"""
if verbose:
print_dynamic('Extracting {}'.format(filename))
with open(str(filename), 'rb') as f, gzip.GzipFile(fileobj=f) as bytestream:
magic = _read32(bytestream)
if magic != 2051:
raise ValueError('Invalid magic number %d in MNIST image file: %s' %
(magic, filename))
num_images = _read32(bytestream)
rows = _read32(bytestream)
cols = _read32(bytestream)
buf = bytestream.read(rows * cols * num_images)
data = np.frombuffer(buf, dtype=np.uint8)
data = data.reshape(num_images, rows, cols, 1)
# Convert data array to list of menpo.image.Image, if required
if as_images:
return [Image(data[i, :, :, 0]) for i in range(data.shape[0])]
return data
def _convert_dense_to_one_hot(labels_dense):
r"""
Method that converts an array of labels to one-hot labels.
Parameters
----------
labels_dense : `array`
An `(n_images,)` array with an integer label per image.
Returns
-------
labels : `array`
An `(n_images, n_labels)` array with the one-hot labels.
"""
# Get number of labels and classes
num_labels = labels_dense.shape[0]
num_classes = labels_dense.max() + 1
# Create binary one-hot indicator
index_offset = np.arange(num_labels) * num_classes
labels_one_hot = np.zeros((num_labels, num_classes))
labels_one_hot.flat[index_offset + labels_dense.ravel()] = 1
return labels_one_hot
def extract_labels(filename, as_one_hot=False, verbose=False):
r"""
Extract labels from gzip file.
Parameters
----------
filename : `pathlib.PosixPath`
The gzip file path.
as_one_hot : `bool`, optional
If `False`, then the labels are returned as integers within
a `(n_images,)` numpy array. If `True`, then the labels are
returned as one-hot vetors in an `(n_images, n_labels)` numpy
array.
verbose : `bool`, optional
If `True`, then the progress will be printed.
Returns
-------
labels : `array`
The extracted labels.
"""
if verbose:
print_dynamic('Extracting {}'.format(filename))
with open(str(filename), 'rb') as f, gzip.GzipFile(fileobj=f) as bytestream:
magic = _read32(bytestream)
if magic != 2049:
raise ValueError('Invalid magic number %d in MNIST label file: %s' %
(magic, filename))
num_items = _read32(bytestream)
buf = bytestream.read(num_items)
labels = np.frombuffer(buf, dtype=np.uint8)
# Convert labels array to one-hot labels, if required
if as_one_hot:
return _convert_dense_to_one_hot(labels)
return labels
def split_data(images, labels, n_images):
r"""
Method that splits a set of images and corresponding labels in two disjoint
sets. This is useful for creating a training and validation set.
Parameters
----------
images : `array` or `list`
The images to split.
labels : `array`
The corresponding labels to split.
n_images : `int`
The number of images of the first disjoint set.
Returns
-------
images1 : `array` or `list`
The first set of images.
labels1 : `array`
The first set of labels.
images2 : `array` or `list`
The second set of images.
labels2 : `array`
The second set of labels.
"""
images1 = images[:n_images]
labels1 = labels[:n_images]
images = images[n_images:]
labels = labels[n_images:]
return images1, labels1, images, labels
def convert_images_to_array(images):
r"""
Method that converts a list of `menpo.image.Image` objects to numpy array
of shape `(n_images, height, width, n_channels)`.
Parameters
----------
images : `list` of `menpo.image.Image`
The list of images.
Returns
-------
images : `array`
The `(n_images, height, width, n_channels)` array of images.
"""
if isinstance(images, list):
# Get dimensions
n_images = len(images)
height, width = images[0].shape
n_channels = images[0].n_channels
# Initialize array with zeros
arr = np.zeros((n_images, height, width, n_channels),
dtype=images[0].pixels.dtype)
# Extract pixels from each image
for i, im in enumerate(images):
arr[i] = im.pixels_with_channels_at_back()[..., None]
return arr
else:
return images
def import_mnist_data(n_validation_images=5000, as_one_hot=False, verbose=False):
r"""
Method that downloads, extracts and converts to appropriate format the MNIST
data. It returns the train, validation and test images with the corresponding
labels.
Parameters
----------
n_validation_images : `int`, optional
The number of images from the training set that will be used as
validation set.
as_one_hot : `bool`, optional
If `False`, then the labels are returned as integers within
a `(n_images,)` numpy array. If `True`, then the labels are
returned as one-hot vectors in an `(n_images, n_labels)` numpy array.
verbose : `bool`, optional
If `True`, then the progress will be printed.
Returns
-------
train_images : `list` of `menpo.image.Image`
The list of train images.
train_labels : `array`
The array of labels of the train images.
validation_images : `list` of `menpo.image.Image`
The list of validation images.
validation_labels : `array`
The array of labels of the validation images.
test_images : `list` of `menpo.image.Image`
The list of test images.
test_labels : `array`
The array of labels of the test images.
"""
# Download MNIST, if is not already downloaded
train_images_path = download(TRAIN_IMAGES, verbose=verbose)
train_labels_path = download(TRAIN_LABELS, verbose=verbose)
test_images_path = download(TEST_IMAGES, verbose=verbose)
test_labels_path = download(TEST_LABELS, verbose=verbose)
# Extract the gz files and convert them to appropriate format
train_images = extract_images(train_images_path, as_images=True,
verbose=verbose)
train_labels = extract_labels(train_labels_path, as_one_hot=as_one_hot,
verbose=verbose)
test_images = extract_images(test_images_path, as_images=True,
verbose=verbose)
test_labels = extract_labels(test_labels_path, as_one_hot=as_one_hot,
verbose=verbose)
# Generate a validation set from the training set
validation_images, validation_labels, train_images, train_labels = \
split_data(train_images, train_labels, n_validation_images)
if verbose:
print_dynamic('Successfully imported MNIST')
# Return images and labels
return (train_images, train_labels, validation_images, validation_labels,
test_images, test_labels)
def _int64_feature(value):
r"""
Convenience method for defining a 64-bit integer within tensorflow.
Parameters
----------
value : `int`
The input value.
Returns
-------
tf_int64 : `tf.train.Feature`
The converted value.
"""
return tf.train.Feature(int64_list=tf.train.Int64List(value=[value]))
def _bytes_feature(value):
r"""
Convenience method for defining a bytes list within tensorflow.
Parameters
----------
value : `bytes`
The input bytes list.
Returns
-------
tf_bytes : `tf.train.Feature`
The converted value.
"""
return tf.train.Feature(bytes_list=tf.train.BytesList(value=[value]))
def serialize_data(images, labels, filename, verbose=False):
r"""
Method that saves the provided images and labels to tfrecords file using the
tensorflow record writer.
Parameters
----------
images : `list` or `array`
The images to serialize.
labels : `array`
The corresponding labels.
filename : `str`
The filename to use. Note that the data will be saved in the 'data'
folder.
verbose : `bool`, optional
If `True`, then the progress will be printed.
"""
# If images is list, convert it to numpy array
images = convert_images_to_array(images)
# Get number of images, height, width and number of channels
num_examples = labels.shape[0]
if images.shape[0] != num_examples:
raise ValueError("Images size %d does not match labels size %d.".format(images.shape[0], num_examples))
height = images.shape[1]
width = images.shape[2]
n_channels = images.shape[3]
# Save data
filename = str(src_dir_path() / 'data' / (filename + '.tfrecords'))
if verbose:
print_dynamic('Writing {}'.format(filename))
writer = tf.python_io.TFRecordWriter(filename)
for index in range(num_examples):
image_raw = images[index].tostring()
example = tf.train.Example(features=tf.train.Features(feature={
'height': _int64_feature(height),
'width': _int64_feature(width),
'depth': _int64_feature(n_channels),
'label': _int64_feature(int(labels[index])),
'image_raw': _bytes_feature(image_raw)}))
writer.write(example.SerializeToString())
writer.close()
if verbose:
print_dynamic('Completed successfully!')
if __name__ == '__main__':
# Import MNIST data
(train_images, train_labels, validation_images, validation_labels,
test_images, test_labels) = import_mnist_data(verbose=True)
# Serialize MNIST data
serialize_data(train_images, train_labels, 'train', verbose=True)
serialize_data(validation_images, validation_labels, 'validation',
verbose=True)
serialize_data(test_images, test_labels, 'test', verbose=True)
| bsd-3-clause | -206,103,622,137,575,780 | 31.228792 | 111 | 0.618649 | false |
scientia-jp/db_file_storage | db_file_storage/model_utils.py | 1 | 1700 | # project
from db_file_storage.storage import DatabaseFileStorage
def delete_file_if_needed(instance, filefield_name):
"""
When editing and the filefield is a new file,
delete the previous file (if any) from the database.
Call this function immediately BEFORE saving the instance.
"""
if instance.id:
model_class = type(instance)
# Check if there is a file for the instance in the database
if model_class.objects.filter(pk=instance.pk).exclude(
**{'%s__isnull' % filefield_name: True}
).exclude(
**{'%s__exact' % filefield_name: ''}
).exists():
old_file = getattr(
model_class.objects.only(filefield_name).get(pk=instance.id),
filefield_name
)
else:
old_file = None
# If there is a file, delete it if needed
if old_file:
# When editing and NOT changing the file,
# old_file.name == getattr(instance, filefield_name)
# returns True. In this case, the file must NOT be deleted.
# If the file IS being changed, the comparison returns False.
# In this case, the old file MUST be deleted.
if (old_file.name == getattr(instance, filefield_name)) is False:
DatabaseFileStorage().delete(old_file.name)
def delete_file(instance, filefield_name):
"""
Delete the file (if any) from the database.
Call this function immediately AFTER deleting the instance.
"""
file_instance = getattr(instance, filefield_name)
if file_instance:
DatabaseFileStorage().delete(file_instance.name)
| gpl-3.0 | -4,666,363,623,960,723,000 | 36.777778 | 77 | 0.604706 | false |
mikewied/perfrunner | perfrunner/helpers/remote.py | 1 | 30033 | import time
from decorator import decorator
from fabric import state
from fabric.api import execute, get, put, run, parallel, settings
from fabric.exceptions import CommandTimeout
from logger import logger
from perfrunner.helpers.misc import uhex
@decorator
def all_hosts(task, *args, **kargs):
self = args[0]
return execute(parallel(task), *args, hosts=self.hosts, **kargs)
@decorator
def single_host(task, *args, **kargs):
self = args[0]
with settings(host_string=self.hosts[0]):
return task(*args, **kargs)
@decorator
def all_clients(task, *args, **kargs):
self = args[0]
return execute(parallel(task), *args, hosts=self.cluster_spec.workers, **kargs)
@decorator
def seriesly_host(task, *args, **kargs):
self = args[0]
with settings(host_string=self.test_config.gateload_settings.seriesly_host):
return task(*args, **kargs)
@decorator
def all_gateways(task, *args, **kargs):
self = args[0]
return execute(parallel(task), *args, hosts=self.gateways, **kargs)
@decorator
def all_gateloads(task, *args, **kargs):
self = args[0]
return execute(parallel(task), *args, hosts=self.gateloads, **kargs)
class RemoteHelper(object):
def __new__(cls, cluster_spec, test_config, verbose=False):
if not cluster_spec.ssh_credentials:
return None
state.env.user, state.env.password = cluster_spec.ssh_credentials
state.output.running = verbose
state.output.stdout = verbose
os = cls.detect_os(cluster_spec)
if os == 'Cygwin':
return RemoteWindowsHelper(cluster_spec, test_config, os)
else:
return RemoteLinuxHelper(cluster_spec, test_config, os)
@staticmethod
def detect_os(cluster_spec):
logger.info('Detecting OS')
with settings(host_string=cluster_spec.yield_hostnames().next()):
os = run('python -c "import platform; print platform.dist()[0]"',
pty=False)
if os:
return os
else:
return 'Cygwin'
class RemoteLinuxHelper(object):
ARCH = {'i386': 'x86', 'x86_64': 'x86_64', 'unknown': 'x86_64'}
CB_DIR = '/opt/couchbase'
MONGO_DIR = '/opt/mongodb'
PROCESSES = ('beam.smp', 'memcached', 'epmd', 'cbq-engine', 'mongod')
def __init__(self, cluster_spec, test_config, os):
self.os = os
self.hosts = tuple(cluster_spec.yield_hostnames())
self.cluster_spec = cluster_spec
self.test_config = test_config
self.env = {}
if self.cluster_spec.gateways and test_config is not None:
num_nodes = self.test_config.gateway_settings.num_nodes
self.gateways = self.cluster_spec.gateways[:num_nodes]
self.gateloads = self.cluster_spec.gateloads[:num_nodes]
else:
self.gateways = self.gateloads = None
@staticmethod
def wget(url, outdir='/tmp', outfile=None):
logger.info('Fetching {}'.format(url))
if outfile is not None:
run('wget -nc "{}" -P {} -O {}'.format(url, outdir, outfile))
else:
run('wget -N "{}" -P {}'.format(url, outdir))
@single_host
def detect_pkg(self):
logger.info('Detecting package manager')
if self.os in ('Ubuntu', 'Debian'):
return 'deb'
else:
return 'rpm'
@single_host
def detect_arch(self):
logger.info('Detecting platform architecture')
arch = run('uname -i', pty=False)
return self.ARCH[arch]
@single_host
def build_secondary_index(self, index_nodes, bucket, indexes, fields,
secondarydb, where_map):
logger.info('building secondary indexes')
# Remember what bucket:index was created
bucket_indexes = []
for index, field in zip(indexes, fields):
cmd = "/opt/couchbase/bin/cbindex"
cmd += ' -auth=Administrator:password'
cmd += ' -server {}'.format(index_nodes[0])
cmd += ' -type create -bucket {}'.format(bucket)
cmd += ' -fields={}'.format(field)
if secondarydb:
cmd += ' -using {}'.format(secondarydb)
if index in where_map and field in where_map[index]:
# Partition indexes over index nodes by deploying index with
# where clause on the corresponding index node
where_list = where_map[index][field]
for i, (index_node, where) in enumerate(
zip(index_nodes, where_list)):
# don't taint cmd itself because we need to reuse it.
final_cmd = cmd
index_i = index + "_{}".format(i)
final_cmd += ' -index {}'.format(index_i)
final_cmd += " -where='{}'".format(where)
# Since .format() is sensitive to {}, use % formatting
with_str_template = \
r'{\\\"defer_build\\\":true, \\\"nodes\\\":[\\\"%s\\\"]}'
with_str = with_str_template % index_node
final_cmd += ' -with=\\\"{}\\\"'.format(with_str)
bucket_indexes.append("{}:{}".format(bucket, index_i))
logger.info('submitting cbindex command {}'.format(final_cmd))
status = run(final_cmd, shell_escape=False, pty=False)
if status:
logger.info('cbindex status {}'.format(status))
else:
# no partitions, no where clause
final_cmd = cmd
final_cmd += ' -index {}'.format(index)
with_str = r'{\\\"defer_build\\\":true}'
final_cmd += ' -with=\\\"{}\\\"'.format(with_str)
bucket_indexes.append("{}:{}".format(bucket, index))
logger.info('submitting cbindex command {}'.format(final_cmd))
status = run(final_cmd, shell_escape=False, pty=False)
if status:
logger.info('cbindex status {}'.format(status))
time.sleep(10)
# build indexes
cmdstr = '/opt/couchbase/bin/cbindex -auth="Administrator:password"'
cmdstr += ' -server {}'.format(index_nodes[0])
cmdstr += ' -type build'
cmdstr += ' -indexes {}'.format(",".join(bucket_indexes))
logger.info('cbindex build command {}'.format(cmdstr))
status = run(cmdstr, shell_escape=False, pty=False)
if status:
logger.info('cbindex status {}'.format(status))
@single_host
def detect_openssl(self, pkg):
logger.info('Detecting openssl version')
if pkg == 'rpm':
return run('rpm -q --qf "%{VERSION}" openssl.x86_64')
@all_hosts
def reset_swap(self):
logger.info('Resetting swap')
run('swapoff --all && swapon --all')
@all_hosts
def drop_caches(self):
logger.info('Dropping memory cache')
run('sync && echo 3 > /proc/sys/vm/drop_caches')
@all_hosts
def set_swappiness(self):
logger.info('Changing swappiness to 0')
run('sysctl vm.swappiness=0')
@all_hosts
def disable_thp(self):
for path in (
'/sys/kernel/mm/transparent_hugepage/enabled',
'/sys/kernel/mm/redhat_transparent_hugepage/enabled',
):
run('echo never > {}'.format(path), quiet=True)
@all_hosts
def collect_info(self):
logger.info('Running cbcollect_info')
run('rm -f /tmp/*.zip')
fname = '/tmp/{}.zip'.format(uhex())
try:
r = run('{}/bin/cbcollect_info {}'.format(self.CB_DIR, fname),
warn_only=True, timeout=1200)
except CommandTimeout:
logger.error('cbcollect_info timed out')
return
if not r.return_code:
get('{}'.format(fname))
run('rm -f {}'.format(fname))
@all_hosts
def clean_data(self):
for path in self.cluster_spec.paths:
run('rm -fr {}/*'.format(path))
run('rm -fr {}'.format(self.CB_DIR))
@all_hosts
def kill_processes(self):
logger.info('Killing {}'.format(', '.join(self.PROCESSES)))
run('killall -9 {}'.format(' '.join(self.PROCESSES)),
warn_only=True, quiet=True)
@all_hosts
def uninstall_couchbase(self, pkg):
logger.info('Uninstalling Couchbase Server')
if pkg == 'deb':
run('yes | apt-get remove couchbase-server', quiet=True)
run('yes | apt-get remove couchbase-server-community', quiet=True)
else:
run('yes | yum remove couchbase-server', quiet=True)
run('yes | yum remove couchbase-server-community', quiet=True)
@all_hosts
def install_couchbase(self, pkg, url, filename, version=None):
self.wget(url, outdir='/tmp')
logger.info('Installing Couchbase Server')
if pkg == 'deb':
run('yes | apt-get install gdebi')
run('yes | numactl --interleave=all gdebi /tmp/{}'.format(filename))
else:
run('yes | numactl --interleave=all rpm -i /tmp/{}'.format(filename))
@all_hosts
def restart(self):
logger.info('Restarting server')
environ = ' '.join('{}={}'.format(k, v) for (k, v) in self.env.items())
run(environ +
' numactl --interleave=all /etc/init.d/couchbase-server restart',
pty=False)
def restart_with_alternative_num_vbuckets(self, num_vbuckets):
logger.info('Changing number of vbuckets to {}'.format(num_vbuckets))
self.env['COUCHBASE_NUM_VBUCKETS'] = num_vbuckets
self.restart()
def restart_with_alternative_num_cpus(self, num_cpus):
logger.info('Changing number of front-end memcached threads to {}'
.format(num_cpus))
self.env['MEMCACHED_NUM_CPUS'] = num_cpus
self.restart()
def restart_with_sfwi(self):
logger.info('Enabling +sfwi')
self.env['COUCHBASE_NS_SERVER_VM_EXTRA_ARGS'] = '["+sfwi", "100", "+sbwt", "long"]'
self.restart()
def restart_with_tcmalloc_aggressive_decommit(self):
logger.info('Enabling TCMalloc aggressive decommit')
self.env['TCMALLOC_AGGRESSIVE_DECOMMIT'] = 't'
self.restart()
@all_hosts
def disable_moxi(self):
logger.info('Disabling moxi')
run('rm /opt/couchbase/bin/moxi')
run('killall -9 moxi')
@all_hosts
def stop_server(self):
logger.info('Stopping Couchbase Server')
getosname = run('uname -a|cut -c1-6')
if(getosname.find("CYGWIN") != -1):
run('net stop CouchbaseServer')
else:
run('/etc/init.d/couchbase-server stop', pty=False)
@all_hosts
def start_server(self):
logger.info('Starting Couchbase Server')
getosname = run('uname -a|cut -c1-6')
if(getosname.find("CYGWIN") != -1):
run('net start CouchbaseServer')
else:
run('/etc/init.d/couchbase-server start', pty=False)
def detect_if(self):
for iface in ('em1', 'eth5', 'eth0'):
result = run('grep {} /proc/net/dev'.format(iface),
warn_only=True, quiet=True)
if not result.return_code:
return iface
def detect_ip(self, _if):
ifconfing = run('ifconfig {} | grep "inet addr"'.format(_if))
return ifconfing.split()[1].split(':')[1]
@all_hosts
def disable_wan(self):
logger.info('Disabling WAN effects')
_if = self.detect_if()
run('tc qdisc del dev {} root'.format(_if), warn_only=True, quiet=True)
@all_hosts
def enable_wan(self):
logger.info('Enabling WAN effects')
_if = self.detect_if()
for cmd in (
'tc qdisc add dev {} handle 1: root htb',
'tc class add dev {} parent 1: classid 1:1 htb rate 1gbit',
'tc class add dev {} parent 1:1 classid 1:11 htb rate 1gbit',
'tc qdisc add dev {} parent 1:11 handle 10: netem delay 40ms 2ms '
'loss 0.005% 50% duplicate 0.005% corrupt 0.005%',
):
run(cmd.format(_if))
@all_hosts
def filter_wan(self, src_list, dest_list):
logger.info('Filtering WAN effects')
_if = self.detect_if()
if self.detect_ip(_if) in src_list:
_filter = dest_list
else:
_filter = src_list
for ip in _filter:
run('tc filter add dev {} protocol ip prio 1 u32 '
'match ip dst {} flowid 1:11'.format(_if, ip))
@single_host
def detect_number_cores(self):
logger.info('Detecting number of cores')
return int(run('nproc', pty=False))
@all_hosts
def detect_core_dumps(self):
# Based on kernel.core_pattern = /tmp/core.%e.%p.%h.%t
r = run('ls /tmp/core*', quiet=True)
if not r.return_code:
return r.split()
else:
return []
@all_hosts
def tune_log_rotation(self):
logger.info('Tune log rotation so that it happens less frequently')
run('sed -i "s/num_files, [0-9]*/num_files, 50/" '
'/opt/couchbase/etc/couchbase/static_config')
@all_hosts
def start_cbq(self):
logger.info('Starting cbq-engine')
return run('nohup cbq-engine '
'-couchbase=http://127.0.0.1:8091 -dev=true -log=HTTP '
'&> /tmp/cbq.log &', pty=False)
@all_hosts
def collect_cbq_logs(self):
logger.info('Getting cbq-engine logs')
get('/tmp/cbq.log')
@all_clients
def cbbackup(self, wrapper=False, mode=None): # full, diff, accu
backup_path = self.cluster_spec.config.get('storage', 'backup_path')
logger.info('cbbackup into %s' % backup_path)
postfix = ''
if mode:
postfix = '-m %s' % mode
if not mode or mode in ['full']:
run('rm -rf %s' % backup_path)
if wrapper:
for master in self.cluster_spec.yield_masters():
cmd = '/opt/couchbase/bin/cbbackupwrapper' \
' http://%s:8091 %s -u %s -p %s %s' \
% (master.split(':')[0], backup_path,
self.cluster_spec.rest_credentials[0],
self.cluster_spec.rest_credentials[1], postfix)
logger.info(cmd)
run(cmd)
else:
for master in self.cluster_spec.yield_masters():
if not mode:
run('/opt/couchbase/bin/backup create --dir %s --name default' % backup_path)
cmd = '/opt/couchbase/bin/backup cluster --dir %s --name default ' \
'--host http://%s:8091 --username %s --password %s' \
% (backup_path, master.split(':')[0],
self.cluster_spec.rest_credentials[0],
self.cluster_spec.rest_credentials[1])
logger.info(cmd)
run(cmd)
return round(float(run('du -sh --block-size=1M %s' % backup_path).
split(' ')[0]) / 1024, 1) # in Gb
@all_clients
def cbrestore(self, wrapper=False):
restore_path = self.cluster_spec.config.get('storage', 'backup_path')
logger.info('restore from %s' % restore_path)
if wrapper:
for master in self.cluster_spec.yield_masters():
cmd = 'cd /opt/couchbase/bin && ./cbrestorewrapper %s ' \
'http://%s:8091 -u Administrator -p password' \
% (restore_path, master.split(':')[0])
logger.info(cmd)
run(cmd)
else:
for master in self.cluster_spec.yield_masters():
dates = run('ls %s/default/' % restore_path).split()
for i in range(len(dates) - 1):
print i
start_date = end_date = dates[i]
if i > 0:
start_date = dates[i-1]
cmd = '/opt/couchbase/bin/backup restore --dir %s --name default ' \
'--host http://%s:8091 --username %s --password %s --start %s --end %s' \
% (restore_path, master.split(':')[0],
self.cluster_spec.rest_credentials[0],
self.cluster_spec.rest_credentials[1], start_date, end_date)
logger.info(cmd)
run(cmd)
@seriesly_host
def restart_seriesly(self):
logger.info('Cleaning up and restarting seriesly')
run('killall -9 sample seriesly', quiet=True)
run('rm -f *.txt *.log *.gz *.json *.out /root/seriesly-data/*',
warn_only=True)
run('nohup seriesly -flushDelay=1s -root=/root/seriesly-data '
'&> seriesly.log &', pty=False)
@seriesly_host
def start_sampling(self):
for i, gateway_ip in enumerate(self.gateways, start=1):
logger.info('Starting sampling gateway_{}'.format(i))
run('nohup sample -v '
'http://{}:4985/_expvar http://localhost:3133/gateway_{} '
'&> sample.log &'.format(gateway_ip, i), pty=False)
for i, gateload_ip in enumerate(self.gateloads, start=1):
logger.info('Starting sampling gateload_{}'.format(i))
run('nohup sample -v '
'http://{}:9876/debug/vars http://localhost:3133/gateload_{} '
'&> sample.log &'.format(gateload_ip, i), pty=False)
@all_gateways
def install_gateway(self, url, filename):
logger.info('Installing Sync Gateway package - {}'.format(filename))
self.wget(url, outdir='/tmp')
run('yes | rpm -i /tmp/{}'.format(filename))
@all_gateways
def install_gateway_from_source(self, commit_hash):
logger.info('Installing Sync Gateway from source - {}'.format(commit_hash))
put('scripts/install_sgw_from_source.sh', '/root/install_sgw_from_source.sh')
run('chmod 777 /root/install_sgw_from_source.sh')
run('/root/install_sgw_from_source.sh {}'.format(commit_hash), pty=False)
@all_gateways
def uninstall_gateway(self):
logger.info('Uninstalling Sync Gateway package')
run('yes | yum remove couchbase-sync-gateway')
@all_gateways
def kill_processes_gateway(self):
logger.info('Killing Sync Gateway')
run('killall -9 sync_gateway sgw_test_info.sh sar', quiet=True)
@all_gateways
def clean_gateway(self):
logger.info('Cleaning up Gateway')
run('rm -f *.txt *.log *.gz *.json *.out *.prof', quiet=True)
@all_gateways
def start_gateway(self):
logger.info('Starting Sync Gateway instances')
_if = self.detect_if()
local_ip = self.detect_ip(_if)
index = self.gateways.index(local_ip)
source_config = 'templates/gateway_config_{}.json'.format(index)
put(source_config, '/root/gateway_config.json')
godebug = self.test_config.gateway_settings.go_debug
args = {
'ulimit': 'ulimit -n 65536',
'godebug': godebug,
'sgw': '/opt/couchbase-sync-gateway/bin/sync_gateway',
'config': '/root/gateway_config.json',
'log': '/root/gateway.log',
}
command = '{ulimit}; GODEBUG={godebug} nohup {sgw} {config} > {log} 2>&1 &'.format(**args)
logger.info("Command: {}".format(command))
run(command, pty=False)
@all_gateways
def start_test_info(self):
logger.info('Starting Sync Gateway sgw_test_info.sh')
put('scripts/sgw_test_config.sh', '/root/sgw_test_config.sh')
put('scripts/sgw_test_info.sh', '/root/sgw_test_info.sh')
run('chmod 777 /root/sgw_*.sh')
run('nohup /root/sgw_test_info.sh &> sgw_test_info.txt &', pty=False)
@all_gateways
def collect_info_gateway(self):
_if = self.detect_if()
local_ip = self.detect_ip(_if)
index = self.gateways.index(local_ip)
logger.info('Collecting diagnostic information from sync gateway_{} {}'.format(index, local_ip))
run('rm -f gateway.log.gz', warn_only=True)
run('gzip gateway.log', warn_only=True)
put('scripts/sgw_check_logs.sh', '/root/sgw_check_logs.sh')
run('chmod 777 /root/sgw_*.sh')
run('/root/sgw_check_logs.sh gateway > sgw_check_logs.out', warn_only=True)
self.try_get('gateway.log.gz', 'gateway.log_{}.gz'.format(index))
self.try_get('test_info.txt', 'test_info_{}.txt'.format(index))
self.try_get('test_info_sar.txt', 'test_info_sar_{}.txt'.format(index))
self.try_get('sgw_test_info.txt', 'sgw_test_info_{}.txt'.format(index))
self.try_get('gateway_config.json', 'gateway_config_{}.json'.format(index))
self.try_get('sgw_check_logs.out', 'sgw_check_logs_gateway_{}.out'.format(index))
@all_gateloads
def uninstall_gateload(self):
logger.info('Removing Gateload binaries')
run('rm -f /opt/gocode/bin/gateload', quiet=True)
@all_gateloads
def install_gateload(self):
logger.info('Installing Gateload')
run('go get -u github.com/couchbaselabs/gateload')
@all_gateloads
def kill_processes_gateload(self):
logger.info('Killing Gateload processes')
run('killall -9 gateload', quiet=True)
@all_gateloads
def clean_gateload(self):
logger.info('Cleaning up Gateload')
run('rm -f *.txt *.log *.gz *.json *.out', quiet=True)
@all_gateloads
def start_gateload(self):
logger.info('Starting Gateload')
_if = self.detect_if()
local_ip = self.detect_ip(_if)
idx = self.gateloads.index(local_ip)
config_fname = 'templates/gateload_config_{}.json'.format(idx)
put(config_fname, '/root/gateload_config.json')
put('scripts/sgw_check_logs.sh', '/root/sgw_check_logs.sh')
run('chmod 777 /root/sgw_*.sh')
run('ulimit -n 65536; nohup /opt/gocode/bin/gateload '
'-workload /root/gateload_config.json &>/root/gateload.log&',
pty=False)
@all_gateloads
def collect_info_gateload(self):
_if = self.detect_if()
local_ip = self.detect_ip(_if)
idx = self.gateloads.index(local_ip)
logger.info('Collecting diagnostic information from gateload_{} {}'.format(idx, local_ip))
run('rm -f gateload.log.gz', warn_only=True)
run('gzip gateload.log', warn_only=True)
put('scripts/sgw_check_logs.sh', '/root/sgw_check_logs.sh')
run('chmod 777 /root/sgw_*.sh')
run('/root/sgw_check_logs.sh gateload > sgw_check_logs.out', warn_only=True)
self.try_get('gateload.log.gz', 'gateload.log-{}.gz'.format(idx))
self.try_get('gateload_config.json', 'gateload_config_{}.json'.format(idx))
self.try_get('gateload_expvars.json', 'gateload_expvar_{}.json'.format(idx))
self.try_get('sgw_check_logs.out', 'sgw_check_logs_gateload_{}.out'.format(idx))
@all_gateways
def collect_profile_data_gateways(self):
"""
Collect CPU and heap profile raw data as well as rendered pdfs
from go tool pprof
"""
_if = self.detect_if()
local_ip = self.detect_ip(_if)
idx = self.gateways.index(local_ip)
logger.info('Collecting profiling data from gateway_{} {}'.format(idx, local_ip))
put('scripts/sgw_collect_profile.sh', '/root/sgw_collect_profile.sh')
run('chmod 777 /root/sgw_collect_profile.sh')
run('/root/sgw_collect_profile.sh /opt/couchbase-sync-gateway/bin/sync_gateway /root', pty=False)
self.try_get('profile_data.tar.gz', 'profile_data.tar-{}.gz'.format(idx))
@all_hosts
def clean_mongodb(self):
for path in self.cluster_spec.paths:
run('rm -fr {}/*'.format(path))
run('rm -fr {}'.format(self.MONGO_DIR))
@all_hosts
def install_mongodb(self, url):
self.wget(url, outdir='/tmp')
archive = url.split('/')[-1]
logger.info('Installing MongoDB')
run('mkdir {}'.format(self.MONGO_DIR))
run('tar xzf {} -C {} --strip-components 1'.format(archive,
self.MONGO_DIR))
run('numactl --interleave=all {}/bin/mongod '
'--dbpath={} --fork --logpath /tmp/mongodb.log'
.format(self.MONGO_DIR, self.cluster_spec.paths[0]))
def try_get(self, remote_path, local_path=None):
try:
get(remote_path, local_path)
except:
logger.warn("Exception calling get({}, {}). Ignoring.".format(remote_path, local_path))
@single_host
def install_beer_samples(self):
logger.info('run install_beer_samples')
cmd = '/opt/couchbase/bin/cbdocloader -n localhost:8091 -u Administrator -p password -b beer-sample /opt/couchbase/samples/beer-sample.zip'
result = run(cmd, pty=False)
return result
class RemoteWindowsHelper(RemoteLinuxHelper):
CB_DIR = '/cygdrive/c/Program\ Files/Couchbase/Server'
VERSION_FILE = '/cygdrive/c/Program Files/Couchbase/Server/VERSION.txt'
MAX_RETRIES = 5
TIMEOUT = 600
SLEEP_TIME = 60 # crutch
PROCESSES = ('erl*', 'epmd*')
@staticmethod
def exists(fname):
r = run('test -f "{}"'.format(fname), warn_only=True, quiet=True)
return not r.return_code
@single_host
def detect_pkg(self):
logger.info('Detecting package manager')
return 'exe'
@single_host
def detect_openssl(self, pkg):
pass
def reset_swap(self):
pass
def drop_caches(self):
pass
def set_swappiness(self):
pass
def disable_thp(self):
pass
def detect_ip(self):
return run('ipconfig | findstr IPv4').split(': ')[1]
@all_hosts
def collect_info(self):
logger.info('Running cbcollect_info')
run('rm -f *.zip')
fname = '{}.zip'.format(uhex())
r = run('{}/bin/cbcollect_info.exe {}'.format(self.CB_DIR, fname),
warn_only=True)
if not r.return_code:
get('{}'.format(fname))
run('rm -f {}'.format(fname))
@all_hosts
def clean_data(self):
for path in self.cluster_spec.paths:
path = path.replace(':', '').replace('\\', '/')
path = '/cygdrive/{}'.format(path)
run('rm -fr {}/*'.format(path))
@all_hosts
def kill_processes(self):
logger.info('Killing {}'.format(', '.join(self.PROCESSES)))
run('taskkill /F /T /IM {}'.format(' /IM '.join(self.PROCESSES)),
warn_only=True, quiet=True)
def kill_installer(self):
run('taskkill /F /T /IM setup.exe', warn_only=True, quiet=True)
def clean_installation(self):
with settings(warn_only=True):
run('rm -fr {}'.format(self.CB_DIR))
@all_hosts
def uninstall_couchbase(self, pkg):
local_ip = self.detect_ip()
logger.info('Uninstalling Package on {}'.format(local_ip))
if self.exists(self.VERSION_FILE):
for retry in range(self.MAX_RETRIES):
self.kill_installer()
try:
r = run('./setup.exe -s -f1"C:\\uninstall.iss"',
warn_only=True, quiet=True, timeout=self.TIMEOUT)
if not r.return_code:
t0 = time.time()
while self.exists(self.VERSION_FILE) and \
time.time() - t0 < self.TIMEOUT:
logger.info('Waiting for Uninstaller to finish on {}'.format(local_ip))
time.sleep(5)
break
else:
logger.warn('Uninstall script failed to run on {}'.format(local_ip))
except CommandTimeout:
logger.warn("Uninstall command timed out - retrying on {} ({} of {})"
.format(local_ip, retry, self.MAX_RETRIES))
continue
else:
logger.warn('Uninstaller failed with no more retries on {}'
.format(local_ip))
else:
logger.info('Package not present on {}'.format(local_ip))
logger.info('Cleaning registry on {}'.format(local_ip))
self.clean_installation()
@staticmethod
def put_iss_files(version):
logger.info('Copying {} ISS files'.format(version))
put('scripts/install_{}.iss'.format(version),
'/cygdrive/c/install.iss')
put('scripts/uninstall_{}.iss'.format(version),
'/cygdrive/c/uninstall.iss')
@all_hosts
def install_couchbase(self, pkg, url, filename, version=None):
self.kill_installer()
run('rm -fr setup.exe')
self.wget(url, outfile='setup.exe')
run('chmod +x setup.exe')
self.put_iss_files(version)
local_ip = self.detect_ip()
logger.info('Installing Package on {}'.format(local_ip))
try:
run('./setup.exe -s -f1"C:\\install.iss"')
except:
logger.error('Install script failed on {}'.format(local_ip))
raise
while not self.exists(self.VERSION_FILE):
logger.info('Waiting for Installer to finish on {}'.format(local_ip))
time.sleep(5)
logger.info('Sleeping for {} seconds'.format(self.SLEEP_TIME))
time.sleep(self.SLEEP_TIME)
def restart(self):
pass
def restart_with_alternative_num_vbuckets(self, num_vbuckets):
pass
def disable_wan(self):
pass
def enable_wan(self):
pass
def filter_wan(self, *args):
pass
def tune_log_rotation(self):
pass
| apache-2.0 | -4,513,793,339,710,990,000 | 35.895577 | 148 | 0.561116 | false |
Merinorus/adaisawesome | Homework/02 - Data from the Web/Question 2.py | 1 | 14839 |
# coding: utf-8
# Obtain all the data for the Master students, starting from 2007. Compute how many months it took each master student to complete their master, for those that completed it. Partition the data between male and female students, and compute the average -- is the difference in average statistically significant?
#
# Notice that master students' data is more tricky than the bachelors' one, as there are many missing records in the IS-Academia database. Therefore, try to guess how much time a master student spent at EPFL by at least checking the distance in months between Master semestre 1 and Master semestre 2. If the Mineur field is not empty, the student should also appear registered in Master semestre 3. Last but not the least, don't forget to check if the student has an entry also in the Projet Master tables. Once you can handle well this data, compute the "average stay at EPFL" for master students. Now extract all the students with a Spécialisation and compute the "average stay" per each category of that attribute -- compared to the general average, can you find any specialization for which the difference in average is statistically significant?
# In[1]:
# Requests : make http requests to websites
import requests
# BeautifulSoup : parser to manipulate easily html content
from bs4 import BeautifulSoup
# Regular expressions
import re
# Aren't pandas awesome ?
import pandas as pd
# Let's get the first page in which we will be able to extract some interesting content !
# In[2]:
# Ask for the first page on IS Academia. To see it, just type it on your browser address bar : http://isa.epfl.ch/imoniteur_ISAP/!GEDPUBLICREPORTS.filter?ww_i_reportModel=133685247
r = requests.get('http://isa.epfl.ch/imoniteur_ISAP/!GEDPUBLICREPORTS.filter?ww_i_reportModel=133685247')
htmlContent = BeautifulSoup(r.content, 'html.parser')
# In[3]:
print(htmlContent.prettify())
# Now we need to make other requests to IS Academia, which specify every parameter : computer science students, all the years, and all bachelor semester (which are a couple of two values : pedagogic period and semester type). Thus, we're going to get all the parameters we need to make the next request :
# In[4]:
# We first get the "Computer science" value
computerScienceField = htmlContent.find('option', text='Informatique')
computerScienceField
# In[5]:
computerScienceValue = computerScienceField.get('value')
computerScienceValue
# In[6]:
# Then, we're going to need all the academic years values.
academicYearsField = htmlContent.find('select', attrs={'name':'ww_x_PERIODE_ACAD'})
academicYearsSet = academicYearsField.findAll('option')
# Since there are several years to remember, we're storing all of them in a table to use them later
academicYearValues = []
# We'll put the textual content in a table aswell ("Master semestre 1", "Master semestre 2"...)
academicYearContent = []
for option in academicYearsSet:
value = option.get('value')
# However, we don't want any "null" value
if value != 'null':
academicYearValues.append(value)
academicYearContent.append(option.text)
# In[7]:
# Now, we have all the academic years that might interest us. We wrangle them a little bit so be able to make request more easily later.
academicYearValues_series = pd.Series(academicYearValues)
academicYearContent_series = pd.Series(academicYearContent)
academicYear_df = pd.concat([academicYearContent_series, academicYearValues_series], axis = 1)
academicYear_df.columns= ['Academic_year', 'Value']
academicYear_df = academicYear_df.sort_values(['Academic_year', 'Value'], ascending=[1, 0])
academicYear_df
# In[8]:
# Then, let's get all the pedagogic periods we need. It's a little bit more complicated here because we need to link the pedagogic period with a season (eg : Bachelor 1 is autumn, Bachelor 2 is spring etc.)
# Thus, we need more than the pedagogic values. For doing some tests to associate them with the right season, we need the actual textual value ("Bachelor semestre 1", "Bachelor semestre 2" etc.)
pedagogicPeriodsField = htmlContent.find('select', attrs={'name':'ww_x_PERIODE_PEDAGO'})
pedagogicPeriodsSet = pedagogicPeriodsField.findAll('option')
# Same as above, we'll store the values in a table
pedagogicPeriodValues = []
# We'll put the textual content in a table aswell ("Master semestre 1", "Master semestre 2"...)
pedagogicPeriodContent = []
for option in pedagogicPeriodsSet:
value = option.get('value')
if value != 'null':
pedagogicPeriodValues.append(value)
pedagogicPeriodContent.append(option.text)
# In[9]:
# Let's make the values and content meet each other
pedagogicPeriodContent_series = pd.Series(pedagogicPeriodContent)
pedagogicPeriodValues_series = pd.Series(pedagogicPeriodValues)
pedagogicPeriod_df = pd.concat([pedagogicPeriodContent_series, pedagogicPeriodValues_series], axis = 1);
pedagogicPeriod_df.columns = ['Pedagogic_period', 'Value']
# In[10]:
# We keep all semesters related to master students
pedagogicPeriod_df_master = pedagogicPeriod_df[[period.startswith('Master') for period in pedagogicPeriod_df.Pedagogic_period]]
pedagogicPeriod_df_minor = pedagogicPeriod_df[[period.startswith('Mineur') for period in pedagogicPeriod_df.Pedagogic_period]]
pedagogicPeriod_df_project = pedagogicPeriod_df[[period.startswith('Projet Master') for period in pedagogicPeriod_df.Pedagogic_period]]
pedagogicPeriod_df = pd.concat([pedagogicPeriod_df_master, pedagogicPeriod_df_minor, pedagogicPeriod_df_project])
pedagogicPeriod_df
# In[11]:
# Lastly, we need to extract the values associated with autumn and spring semesters.
semesterTypeField = htmlContent.find('select', attrs={'name':'ww_x_HIVERETE'})
semesterTypeSet = semesterTypeField.findAll('option')
# Again, we need to store the values in a table
semesterTypeValues = []
# We'll put the textual content in a table aswell
semesterTypeContent = []
for option in semesterTypeSet:
value = option.get('value')
if value != 'null':
semesterTypeValues.append(value)
semesterTypeContent.append(option.text)
# In[12]:
# Here are the values for autumn and spring semester :
semesterTypeValues_series = pd.Series(semesterTypeValues)
semesterTypeContent_series = pd.Series(semesterTypeContent)
semesterType_df = pd.concat([semesterTypeContent_series, semesterTypeValues_series], axis = 1)
semesterType_df.columns = ['Semester_type', 'Value']
semesterType_df
# Now, we got all the information to get all the master students !
# Let's make all the requests we need to build our data.
# We will try to do requests such as :
# - Get students from master semester 1 of 2007-2008
# - ...
# - Get students from master semester 4 of 2007-2008
# - Get students from mineur semester 1 of 2007-2008
# - Get students from mineur semester 2 of 2007-2008
# - Get students from master project semester 1 of 2007-2008
# - Get students from master project semester 2 of 2007-2008
#
# ... and so on for each academic year until 2015-2016, the last complete year.
# We can even take the first semester of 2016-2017 into account, to check if some students we though they finished last year are actually still studying. This can be for different reasons : doing a mineur, a project, repeating a semester...
# We can ask for a list of student in two formats : HTML or CSV.
# We choosed to get them in a HTML format because this is the first time that we wrangle data in HTML format, and that may be really useful to learn in order to work with most of the websites in the future !
# The request sent by the browser to IS Academia, to get a list of student in a HTML format, looks like this :
# http://isa.epfl.ch/imoniteur_ISAP/!GEDPUBLICREPORTS.html?arg1=xxx&arg2=yyy
# With "xxx" the value associated with the argument named "arg1", "yyy" the value associated with the argument named "arg2" etc. It uses to have a lot more arguments.
# For instance, we tried to send a request as a "human" through our browser and intercepted it with Postman interceptor.
# We found that the folowing arguments have to be sent :
# ww_x_GPS = -1
# ww_i_reportModel = 133685247
# ww_i_reportModelXsl = 133685270
# ww_x_UNITE_ACAD = 249847 (which is the value of computer science !)
# ww_x_PERIODE_ACAD = X (eg : the value corresponding to 2007-2008 would be 978181)
# ww_x_PERIODE_PEDAGO = Y (eg : 2230106 for Master semestre 1)
# ww_x_HIVERETE = Z (eg : 2936286 for autumn semester)
#
# The last three values X, Y and Z must be replaced with the ones we extracted previously. For instance, if we want to get students from Master, semester 1 (which is necessarily autumn semester) of 2007-2008, the "GET Request" would be the following :
#
# http://isa.epfl.ch/imoniteur_ISAP/!GEDPUBLICREPORTS.html?ww_x_GPS=-1&ww_i_reportModel=133685247&ww_i_reportModelXsl=133685270&ww_x_UNITE_ACAD=249847&ww_x_PERIODE_ACAD=978181&ww_x_PERIODE_PEDAGO=2230106&ww_x_HIVERETE=2936286
#
# So let's cook all the requests we're going to send !
# In[13]:
# Let's put the semester types aside, because we're going to need them
autumn_semester_value = semesterType_df.loc[semesterType_df['Semester_type'] == 'Semestre d\'automne', 'Value']
autumn_semester_value = autumn_semester_value.iloc[0]
spring_semester_value = semesterType_df.loc[semesterType_df['Semester_type'] == 'Semestre de printemps', 'Value']
spring_semester_value = spring_semester_value.iloc[0]
# In[14]:
# Here is the list of the GET requests we will sent to IS Academia
requestsToISAcademia = []
# We'll need to associate all the information associated with the requests to help wrangling data later :
academicYearRequests = []
pedagogicPeriodRequests = []
semesterTypeRequests = []
# Go all over the years ('2007-2008', '2008-2009' and so on)
for academicYear_row in academicYear_df.itertuples(index=True, name='Academic_year'):
# The year (eg: '2007-2008')
academicYear = academicYear_row.Academic_year
# The associated value (eg: '978181')
academicYear_value = academicYear_row.Value
# We get all the pedagogic periods associated with this academic year
for pegagogicPeriod_row in pedagogicPeriod_df.itertuples(index=True, name='Pedagogic_period'):
# The period (eg: 'Master semestre 1')
pedagogicPeriod = pegagogicPeriod_row.Pedagogic_period
# The associated value (eg: '2230106')
pegagogicPeriod_Value = pegagogicPeriod_row.Value
# We need to associate the corresponding semester type (eg: Master semester 1 is autumn, but Master semester 2 will be spring)
if (pedagogicPeriod.endswith('1') or pedagogicPeriod.endswith('3') or pedagogicPeriod.endswith('automne')):
semester_Value = autumn_semester_value
semester = 'Autumn'
else:
semester_Value = spring_semester_value
semester = 'Spring'
# This print line is only for debugging if you want to check something
# print("academic year = " + academicYear_value + ", pedagogic value = " + pegagogicPeriod_Value + ", pedagogic period is " + pedagogicPeriod + " (semester type value = " + semester_Value + ")")
# We're ready to cook the request !
request = 'http://isa.epfl.ch/imoniteur_ISAP/!GEDPUBLICREPORTS.html?ww_x_GPS=-1&ww_i_reportModel=133685247&ww_i_reportModelXsl=133685270&ww_x_UNITE_ACAD=' + computerScienceValue
request = request + '&ww_x_PERIODE_ACAD=' + academicYear_value
request = request + '&ww_x_PERIODE_PEDAGO=' + pegagogicPeriod_Value
request = request + '&ww_x_HIVERETE=' + semester_Value
# Add the newly created request to our wish list...
requestsToISAcademia.append(request)
# And we save the corresponding information for each request
pedagogicPeriodRequests.append(pedagogicPeriod)
academicYearRequests.append(academicYear)
semesterTypeRequests.append(semester)
# In[15]:
# Here is the list of all the requests we have to send !
# requestsToISAcademia
# In[16]:
# Here are the corresponding years for each request
# academicYearRequests
# In[17]:
# Same for associated pedagogic periods
# pedagogicPeriodRequests
# In[18]:
# Last but not the least, the semester types
# semesterTypeRequests
# In[19]:
academicYearRequests_series = pd.Series(academicYearRequests)
pedagogicPeriodRequests_series = pd.Series(pedagogicPeriodRequests)
requestsToISAcademia_series = pd.Series(requestsToISAcademia)
# Let's summarize everything in a dataframe...
requests_df = pd.concat([academicYearRequests_series, pedagogicPeriodRequests_series, requestsToISAcademia_series], axis = 1)
requests_df.columns = ['Academic_year', 'Pedagogic_period', 'Request']
requests_df
# In[ ]:
# The requests are now ready to be sent to IS Academia. Let's try it out !
# TIME OUT : We stopped right here for our homework. What is below should look like the beginning of a loop that gets students lists from IS Academia. It's not finished at all :(
# In[20]:
# WARNING : NEXT LINE IS COMMENTED FOR DEBGUGGING THE FIRST REQUEST ONLY. UNCOMMENT IT AND INDENT THE CODE CORRECTLY TO MAKE ALL THE REQUESTS
#for request in requestsToISAcademia: # LINE TO UNCOMMENT TO SEND ALL REQUESTS
request = requestsToISAcademia[0] # LINE TO COMMENT TO SEND ALL REQUESTS
print(request)
# Send the request to IS Academia
r = requests.get(request)
# Here is the HTML content of IS Academia's response
htmlContent = BeautifulSoup(r.content, 'html.parser')
# Let's extract some data...
computerScienceField = htmlContent.find('option', text='Informatique')
# In[21]:
# Getting the table of students
# Let's make the columns
columns = []
table = htmlContent.find('table')
th = table.find('th', text='Civilité')
columns.append(th.text)
# Go through the table until the last column
while th.findNext('').name == 'th':
th = th.findNext('')
columns.append(th.text)
# This array will contain all the students
studentsTable = []
# DON'T RUN THE NEXT CELL OR IT WILL CRASH ! :x
# In[22]:
# Getting the information about the student we're "looping on"
currentStudent = []
tr = th.findNext('tr')
children = tr.children
for child in children:
currentStudent.append(child.text)
# Add the student to the array
studentsTable.append(currentStudent)
# In[23]:
a = tr.findNext('tr')
a
# In[ ]:
while tr.findNext('tr') is not None:
tr = th.findNext('tr')
children = tr.children
for child in children:
currentStudent.append(child.text)
studentsTable.append(currentStudent)
studentsTable
# In[ ]:
#tr = th.parent
#td = th.findNext('td')
#td.text
#th.findNext('th')
#th.findNext('th')
#tr = tr.findNext('tr')
#tr
# In[ ]:
print(htmlContent.prettify())
| gpl-3.0 | 7,325,369,395,381,894,000 | 38.147757 | 850 | 0.737009 | false |
amitjamadagni/sympy | sympy/plotting/experimental_lambdify.py | 1 | 23947 | """ rewrite of lambdify - This stuff is not stable at all.
It is for internal use in the new plotting module.
It may (will! see the Q'n'A in the source) be rewritten.
It's completely self contained. Especially it does not use lambdarepr.
It does not aim to replace the current lambdify. Most importantly it will never
ever support anything else than sympy expressions (no Matrices, dictionaries
and so on).
"""
import re
from sympy import Symbol, NumberSymbol, I, zoo, oo
# We parse the expression string into a tree that identifies functions. Then
# we translate the names of the functions and we translate also some strings
# that are not names of functions (all this according to translation
# dictionaries).
# If the translation goes to another module (like numpy) the
# module is imported and 'func' is translated to 'module.func'.
# If a function can not be translated, the inner nodes of that part of the
# tree are not translated. So if we have Integral(sqrt(x)), sqrt is not
# translated to np.sqrt and the Integral does not crash.
# A namespace for all this is generated by crawling the (func, args) tree of
# the expression. The creation of this namespace involves many ugly
# workarounds.
# The namespace consists of all the names needed for the sympy expression and
# all the name of modules used for translation. Those modules are imported only
# as a name (import numpy as np) in order to keep the namespace small and
# manageable.
# Please, if there is a bug, do not try to fix it here! Rewrite this by using
# the method proposed in the last Q'n'A below. That way the new function will
# work just as well, be just as simple, but it wont need any new workarounds.
# If you insist on fixing it here, look at the workarounds in the function
# sympy_expression_namespace and in lambdify.
# Q: Why are you not using python abstract syntax tree?
# A: Because it is more complicated and not much more powerful in this case.
# Q: What if I have Symbol('sin') or g=Function('f')?
# A: You will break the algorithm. We should use srepr to defend against this?
# The problem with Symbol('sin') is that it will be printed as 'sin'. The
# parser will distinguish it from the function 'sin' because functions are
# detected thanks to the opening parenthesis, but the lambda expression won't
# understand the difference if we have also the sin function.
# The solution (complicated) is to use srepr and maybe ast.
# The problem with the g=Function('f') is that it will be printed as 'f' but in
# the global namespace we have only 'g'. But as the same printer is used in the
# constructor of the namespace there will be no problem.
# Q: What if some of the printers are not printing as expected?
# A: The algorithm wont work. You must use srepr for those cases. But even
# srepr may not print well. All problems with printers should be considered
# bugs.
# Q: What about _imp_ functions?
# A: Those are taken care for by evalf. A special case treatment will work
# faster but it's not worth the code complexity.
# Q: Will ast fix all possible problems?
# A: No. You will always have to use some printer. Even srepr may not work in
# some cases. But if the printer does not work, that should be considered a
# bug.
# Q: Is there same way to fix all possible problems?
# A: Probably by constructing our strings ourself by traversing the (func,
# args) tree and creating the namespace at the same time. That actually sounds
# good.
from sympy.external import import_module
np = import_module('numpy')
import warnings
#TODO debuging output
class vectorized_lambdify(object):
""" Return a sufficiently smart, vectorized and lambdified function.
Returns only reals.
This function uses experimental_lambdify to created a lambdified
expression ready to be used with numpy. Many of the functions in sympy
are not implemented in numpy so in some cases we resort to python cmath or
even to evalf.
The following translations are tried:
only numpy complex
- on errors raised by sympy trying to work with ndarray:
only python cmath and then vectorize complex128
When using python cmath there is no need for evalf or float/complex
because python cmath calls those.
This function never tries to mix numpy directly with evalf because numpy
does not understand sympy Float. If this is needed one can use the
float_wrap_evalf/complex_wrap_evalf options of experimental_lambdify or
better one can be explicit about the dtypes that numpy works with.
Check numpy bug http://projects.scipy.org/numpy/ticket/1013 to know what
types of errors to expect.
"""
def __init__(self, args, expr):
self.args = args
self.expr = expr
self.lambda_func = experimental_lambdify(args, expr, use_np=True)
self.vector_func = self.lambda_func
self.failure = False
def __call__(self, *args):
np_old_err = np.seterr(invalid='raise')
try:
temp_args = (np.array(a, dtype=np.complex) for a in args)
results = self.vector_func(*temp_args)
results = np.ma.masked_where(
np.abs(results.imag) != 0, results.real, copy=False)
except Exception, e:
#DEBUG: print 'Error', type(e), e
if ((isinstance(e, TypeError)
and 'unhashable type: \'numpy.ndarray\'' in str(e))
or
(isinstance(e, ValueError)
and ('Invalid limits given:' in str(e)
or 'negative dimensions are not allowed' in str(e) # XXX
or 'sequence too large; must be smaller than 32' in str(e)))): # XXX
# Almost all functions were translated to numpy, but some were
# left as sympy functions. They recieved an ndarray as an
# argument and failed.
# sin(ndarray(...)) raises "unhashable type"
# Integral(x, (x, 0, ndarray(...))) raises "Invalid limits"
# other ugly exceptions that are not well understood (marked with XXX)
# TODO: Cleanup the ugly special cases marked with xxx above.
# Solution: use cmath and vectorize the final lambda.
self.lambda_func = experimental_lambdify(
self.args, self.expr, use_python_cmath=True)
self.vector_func = np.vectorize(
self.lambda_func, otypes=[np.complex])
results = self.vector_func(*args)
results = np.ma.masked_where(
np.abs(results.imag) != 0, results.real, copy=False)
else:
# Complete failure. One last try with no translations, only
# wrapping in complex((...).evalf()) and returning the real
# part.
if self.failure:
raise e
else:
self.failure = True
self.lambda_func = experimental_lambdify(
self.args, self.expr, use_evalf=True,
complex_wrap_evalf=True)
self.vector_func = np.vectorize(
self.lambda_func, otypes=[np.complex])
results = self.vector_func(*args)
results = np.ma.masked_where(
np.abs(results.imag) != 0, results.real, copy=False)
warnings.warn('The evaluation of the expression is'
' problematic. We are trying a failback method'
' that may still work. Please report this as a bug.')
finally:
np.seterr(**np_old_err)
return results
class lambdify(object):
"""Returns the lambdified function.
This function uses experimental_lambdify to create a lambdified
expression. It uses cmath to lambdify the expression. If the function
is not implemented in python cmath, python cmath calls evalf on those
functions.
"""
def __init__(self, args, expr):
self.args = args
self.expr = expr
self.lambda_func = experimental_lambdify(args, expr, use_evalf=True,
use_python_cmath=True)
self.failure = False
def __call__(self, args):
args = complex(args)
try:
#The result can be sympy.Float. Hence wrap it with complex type.
result = complex(self.lambda_func(args))
if abs(result.imag) > 1e-7 * abs(result):
return None
else:
return result.real
except Exception, e:
# The exceptions raised by sympy, cmath are not consistent and
# hence it is not possible to specify all the exceptions that
# are to be caught. Presently there are no cases for which the code
# reaches this block other than ZeroDivisionError. Also the
# exception is caught only once. If the exception repeats itself,
# then it is not caught and the corresponding error is raised.
# XXX: Remove catching all exceptions once the plotting module
# is heavily tested.
if isinstance(e, ZeroDivisionError):
return None
else:
if self.failure:
raise e
#Failure
#Try wrapping it with complex(..).evalf()
self.failure = True
self.lambda_func = experimental_lambdify(self.args, self.expr,
use_evalf=True,
complex_wrap_evalf=True)
result = self.lambda_func(args)
warnings.warn('The evaluation of the expression is'
' problematic. We are trying a failback method'
' that may still work. Please report this as a bug.')
if abs(result.imag) > 0:
return None
else:
return result.real
def experimental_lambdify(*args, **kwargs):
l = Lambdifier(*args, **kwargs)
return l.lambda_func
class Lambdifier(object):
def __init__(self, args, expr, print_lambda=False, use_evalf=False,
float_wrap_evalf=False, complex_wrap_evalf=False,
use_np=False, use_python_math=False, use_python_cmath=False,
use_interval=False):
self.print_lambda = print_lambda
self.use_evalf = use_evalf
self.float_wrap_evalf = float_wrap_evalf
self.complex_wrap_evalf = complex_wrap_evalf
self.use_np = use_np
self.use_python_math = use_python_math
self.use_python_cmath = use_python_cmath
self.use_interval = use_interval
# Constructing the argument string
if not all([isinstance(a, Symbol) for a in args]):
raise ValueError('The arguments must be Symbols.')
else:
argstr = ', '.join([str(a) for a in args])
# Constructing the translation dictionaries and making the translation
self.dict_str = self.get_dict_str()
self.dict_fun = self.get_dict_fun()
exprstr = str(expr)
newexpr = self.tree2str_translate(self.str2tree(exprstr))
# Constructing the namespaces
namespace = {}
namespace.update(self.sympy_atoms_namespace(expr))
namespace.update(self.sympy_expression_namespace(expr))
# XXX Workaround
# Ugly workaround because Pow(a,Half) prints as sqrt(a)
# and sympy_expression_namespace can not catch it.
from sympy import sqrt
namespace.update({'sqrt': sqrt})
# End workaround.
if use_python_math:
namespace.update({'math': __import__('math')})
if use_python_cmath:
namespace.update({'cmath': __import__('cmath')})
if use_np:
try:
namespace.update({'np': __import__('numpy')})
except ImportError:
raise ImportError(
'experimental_lambdify failed to import numpy.')
if use_interval:
namespace.update({'imath': __import__(
'sympy.plotting.intervalmath', fromlist=['intervalmath'])})
namespace.update({'math': __import__('math')})
# Construct the lambda
if self.print_lambda:
print newexpr
eval_str = 'lambda %s : ( %s )' % (argstr, newexpr)
exec "from __future__ import division; MYNEWLAMBDA = %s" % eval_str in namespace
self.lambda_func = namespace['MYNEWLAMBDA']
##############################################################################
# Dicts for translating from sympy to other modules
##############################################################################
###
# builtins
###
# Functions with different names in builtins
builtin_functions_different = {
'Min': 'min',
'Max': 'max',
'Abs': 'abs',
}
# Strings that should be translated
builtin_not_functions = {
'I': '1j',
'oo': '1e400',
}
###
# numpy
###
# Functions that are the same in numpy
numpy_functions_same = [
'sin', 'cos', 'tan', 'sinh', 'cosh', 'tanh', 'exp', 'log',
'sqrt', 'floor', 'conjugate',
]
# Functions with different names in numpy
numpy_functions_different = {
"acos": "arccos",
"acosh": "arccosh",
"arg": "angle",
"asin": "arcsin",
"asinh": "arcsinh",
"atan": "arctan",
"atan2": "arctan2",
"atanh": "arctanh",
"ceiling": "ceil",
"im": "imag",
"ln": "log",
"Max": "amax",
"Min": "amin",
"re": "real",
"Abs": "abs",
}
# Strings that should be translated
numpy_not_functions = {
'pi': 'np.pi',
'oo': 'np.inf',
'E': 'np.e',
}
###
# python math
###
# Functions that are the same in math
math_functions_same = [
'sin', 'cos', 'tan', 'asin', 'acos', 'atan', 'atan2',
'sinh', 'cosh', 'tanh', 'asinh', 'acosh', 'atanh',
'exp', 'log', 'erf', 'sqrt', 'floor', 'factorial', 'gamma',
]
# Functions with different names in math
math_functions_different = {
'ceiling': 'ceil',
'ln': 'log',
'loggamma': 'lgamma'
}
# Strings that should be translated
math_not_functions = {
'pi': 'math.pi',
'E': 'math.e',
}
###
# python cmath
###
# Functions that are the same in cmath
cmath_functions_same = [
'sin', 'cos', 'tan', 'asin', 'acos', 'atan',
'sinh', 'cosh', 'tanh', 'asinh', 'acosh', 'atanh',
'exp', 'log', 'sqrt',
]
# Functions with different names in cmath
cmath_functions_different = {
'ln': 'log',
'arg': 'phase',
}
# Strings that should be translated
cmath_not_functions = {
'pi': 'cmath.pi',
'E': 'cmath.e',
}
###
# intervalmath
###
interval_not_functions = {
'pi': 'math.pi',
'E': 'math.e'
}
interval_functions_same = [
'sin', 'cos', 'exp', 'tan', 'atan', 'log',
'sqrt', 'cosh', 'sinh', 'tanh', 'floor',
'acos', 'asin', 'acosh', 'asinh', 'atanh',
'Abs', 'And', 'Or'
]
interval_functions_different = {
'Min': 'imin',
'Max': 'imax',
'ceiling': 'ceil',
}
###
# mpmath, etc
###
#TODO
###
# Create the final ordered tuples of dictionaries
###
# For strings
def get_dict_str(self):
dict_str = dict(self.builtin_not_functions)
if self.use_np:
dict_str.update(self.numpy_not_functions)
if self.use_python_math:
dict_str.update(self.math_not_functions)
if self.use_python_cmath:
dict_str.update(self.cmath_not_functions)
if self.use_interval:
dict_str.update(self.interval_not_functions)
return dict_str
# For functions
def get_dict_fun(self):
dict_fun = dict(self.builtin_functions_different)
if self.use_np:
for s in self.numpy_functions_same:
dict_fun[s] = 'np.' + s
for k, v in self.numpy_functions_different.iteritems():
dict_fun[k] = 'np.' + v
if self.use_python_math:
for s in self.math_functions_same:
dict_fun[s] = 'math.' + s
for k, v in self.math_functions_different.iteritems():
dict_fun[k] = 'math.' + v
if self.use_python_cmath:
for s in self.cmath_functions_same:
dict_fun[s] = 'cmath.' + s
for k, v in self.cmath_functions_different.iteritems():
dict_fun[k] = 'cmath.' + v
if self.use_interval:
for s in self.interval_functions_same:
dict_fun[s] = 'imath.' + s
for k, v in self.interval_functions_different.iteritems():
dict_fun[k] = 'imath.' + v
return dict_fun
##############################################################################
# The translator functions, tree parsers, etc.
##############################################################################
def str2tree(self, exprstr):
"""Converts an expression string to a tree.
Functions are represented by ('func_name(', tree_of_arguments).
Other expressions are (head_string, mid_tree, tail_str).
Expressions that do not contain functions are directly returned.
Examples:
>>> from sympy.abc import x, y, z
>>> from sympy import Integral, sin
>>> from sympy.plotting.experimental_lambdify import Lambdifier
>>> str2tree = Lambdifier([x], x).str2tree
>>> str2tree(str(Integral(x, (x, 1, y))))
('', ('Integral(', 'x, (x, 1, y)'), ')')
>>> str2tree(str(x+y))
'x + y'
>>> str2tree(str(x+y*sin(z)+1))
('x + y*', ('sin(', 'z'), ') + 1')
>>> str2tree('sin(y*(y + 1.1) + (sin(y)))')
('', ('sin(', ('y*(y + 1.1) + (', ('sin(', 'y'), '))')), ')')
"""
#matches the first 'function_name('
first_par = re.search(r'(\w+\()', exprstr)
if first_par is None:
return exprstr
else:
start = first_par.start()
end = first_par.end()
head = exprstr[:start]
func = exprstr[start:end]
tail = exprstr[end:]
count = 0
for i, c in enumerate(tail):
if c == '(':
count += 1
elif c == ')':
count -= 1
if count == -1:
break
func_tail = self.str2tree(tail[:i])
tail = self.str2tree(tail[i:])
return (head, (func, func_tail), tail)
@classmethod
def tree2str(cls, tree):
"""Converts a tree to string without translations.
Examples:
>>> from sympy.abc import x, y, z
>>> from sympy import Integral, sin
>>> from sympy.plotting.experimental_lambdify import Lambdifier
>>> str2tree = Lambdifier([x], x).str2tree
>>> tree2str = Lambdifier([x], x).tree2str
>>> tree2str(str2tree(str(x+y*sin(z)+1)))
'x + y*sin(z) + 1'
"""
if isinstance(tree, str):
return tree
else:
return ''.join(map(cls.tree2str, tree))
def tree2str_translate(self, tree):
"""Converts a tree to string with translations.
Function names are translated by translate_func.
Other strings are translated by translate_str.
"""
if isinstance(tree, str):
return self.translate_str(tree)
elif isinstance(tree, tuple) and len(tree) == 2:
return self.translate_func(tree[0][:-1], tree[1])
else:
return ''.join([self.tree2str_translate(t) for t in tree])
def translate_str(self, estr):
"""Translate substrings of estr using in order the dictionaries in
dict_tuple_str."""
for pattern, repl in self.dict_str.iteritems():
estr = re.sub(pattern, repl, estr)
return estr
def translate_func(self, func_name, argtree):
"""Translate function names and the tree of arguments.
If the function name is not in the dictionaries of dict_tuple_fun then the
function is surrounded by a float((...).evalf()).
The use of float is necessary as np.<function>(sympy.Float(..)) raises an
error."""
if func_name in self.dict_fun:
new_name = self.dict_fun[func_name]
argstr = self.tree2str_translate(argtree)
return new_name + '(' + argstr
else:
template = '(%s(%s)).evalf(' if self.use_evalf else '%s(%s'
if self.float_wrap_evalf:
template = 'float(%s)' % template
elif self.complex_wrap_evalf:
template = 'complex(%s)' % template
return template % (func_name, self.tree2str(argtree))
##############################################################################
# The namespace constructors
##############################################################################
@classmethod
def sympy_expression_namespace(cls, expr):
"""Traverses the (func, args) tree of an expression and creates a sympy
namespace. All other modules are imported only as a module name. That way
the namespace is not poluted and rests quite small. It probably causes much
more variable lookups and so it takes more time, but there are no tests on
that for the moment."""
if expr is None:
return {}
else:
funcname = str(expr.func)
# XXX Workaround
# Here we add an ugly workaround because str(func(x))
# is not always the same as str(func). Eg
# >>> str(Integral(x))
# "Integral(x)"
# >>> str(Integral)
# "<class 'sympy.integrals.integrals.Integral'>"
# >>> str(sqrt(x))
# "sqrt(x)"
# >>> str(sqrt)
# "<function sqrt at 0x3d92de8>"
# >>> str(sin(x))
# "sin(x)"
# >>> str(sin)
# "sin"
# Either one of those can be used but not all at the same time.
# The code considers the sin example as the right one.
regexlist = [
r'<class \'sympy[\w.]*?.([\w]*)\'>$',
# the example Integral
r'<function ([\w]*) at 0x[\w]*>$', # the example sqrt
]
for r in regexlist:
m = re.match(r, funcname)
if m is not None:
funcname = m.groups()[0]
# End of the workaround
# XXX debug: print funcname
args_dict = {}
for a in expr.args:
if (isinstance(a, Symbol) or
isinstance(a, NumberSymbol) or
a in [I, zoo, oo]):
continue
else:
args_dict.update(cls.sympy_expression_namespace(a))
args_dict.update({funcname: expr.func})
return args_dict
@staticmethod
def sympy_atoms_namespace(expr):
"""For no real reason this function is separated from
sympy_expression_namespace. It can be moved to it."""
atoms = expr.atoms(Symbol, NumberSymbol, I, zoo, oo)
d = {}
for a in atoms:
# XXX debug: print 'atom:' + str(a)
d[str(a)] = a
return d
| bsd-3-clause | -8,688,089,668,296,977,000 | 37.132166 | 91 | 0.555602 | false |
laslabs/odoo-project_jira | models/res_company.py | 1 | 1123 | # -*- coding: utf-8 -*-
##############################################################################
#
# Author: Dave Lasley <[email protected]>
# Copyright: 2015 LasLabs, Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp import models, fields
class ResCompany(models.Model):
_inherit = 'res.company'
jira_oauth_ids = fields.One2many('project.jira.oauth', 'company_id')
| agpl-3.0 | 2,600,987,763,477,031,000 | 42.192308 | 78 | 0.614426 | false |
chirilo/kuma | kuma/wiki/views/create.py | 1 | 5526 | # -*- coding: utf-8 -*-
import newrelic.agent
from django.core.exceptions import PermissionDenied
from django.http import HttpResponseRedirect
from django.shortcuts import render
from constance import config
from kuma.attachments.forms import AttachmentRevisionForm
from kuma.attachments.models import Attachment
from kuma.core.decorators import never_cache, login_required
from kuma.core.urlresolvers import reverse
from ..constants import (TEMPLATE_TITLE_PREFIX,
REVIEW_FLAG_TAGS_DEFAULT)
from ..decorators import check_readonly, prevent_indexing
from ..forms import DocumentForm, RevisionForm, RevisionValidationForm
from ..models import Document, Revision
from .utils import save_revision_and_notify
@login_required
@check_readonly
@prevent_indexing
@never_cache
@newrelic.agent.function_trace()
def new_document(request):
"""Create a new wiki document."""
initial_slug = request.GET.get('slug', '')
initial_title = initial_slug.replace('_', ' ')
initial_parent_id = ''
try:
initial_parent_id = int(request.GET.get('parent', ''))
except ValueError:
pass
clone_id = None
try:
clone_id = int(request.GET.get('clone', ''))
except ValueError:
pass
if not Document.objects.allows_add_by(request.user, initial_slug):
# Try to head off disallowed Template:* creation, right off the bat
raise PermissionDenied
is_template = initial_slug.startswith(TEMPLATE_TITLE_PREFIX)
# If a parent ID is provided via GET, confirm it exists
parent_slug = parent_path = ''
if initial_parent_id:
try:
parent_doc = Document.objects.get(pk=initial_parent_id)
parent_slug = parent_doc.slug
parent_path = parent_doc.get_absolute_url()
except Document.DoesNotExist:
pass
if request.method == 'GET':
initial_data = {}
initial_html = ''
initial_tags = ''
initial_toc = Revision.TOC_DEPTH_H4
if clone_id:
try:
clone_doc = Document.objects.get(pk=clone_id)
initial_title = clone_doc.title
initial_html = clone_doc.html
initial_tags = clone_doc.tags.all()
if clone_doc.current_revision:
initial_toc = clone_doc.current_revision.toc_depth
else:
initial_toc = 1
except Document.DoesNotExist:
pass
if parent_slug:
initial_data['parent_topic'] = initial_parent_id
if initial_slug:
initial_data['title'] = initial_title
initial_data['slug'] = initial_slug
if is_template:
review_tags = ('template',)
else:
review_tags = REVIEW_FLAG_TAGS_DEFAULT
doc_form = DocumentForm(initial=initial_data)
rev_form = RevisionForm(initial={
'slug': initial_slug,
'title': initial_title,
'content': initial_html,
'review_tags': review_tags,
'tags': initial_tags,
'toc_depth': initial_toc
})
allow_add_attachment = (
Attachment.objects.allow_add_attachment_by(request.user))
context = {
'is_template': is_template,
'parent_slug': parent_slug,
'parent_id': initial_parent_id,
'document_form': doc_form,
'revision_form': rev_form,
'WIKI_DOCUMENT_TAG_SUGGESTIONS': config.WIKI_DOCUMENT_TAG_SUGGESTIONS,
'initial_tags': initial_tags,
'allow_add_attachment': allow_add_attachment,
'attachment_form': AttachmentRevisionForm(),
'parent_path': parent_path}
return render(request, 'wiki/new_document.html', context)
post_data = request.POST.copy()
posted_slug = post_data['slug']
post_data.update({'locale': request.locale})
if parent_slug:
post_data.update({'parent_topic': initial_parent_id})
post_data.update({'slug': parent_slug + '/' + post_data['slug']})
doc_form = DocumentForm(post_data)
rev_form = RevisionValidationForm(request.POST.copy())
rev_form.parent_slug = parent_slug
if doc_form.is_valid() and rev_form.is_valid():
rev_form = RevisionForm(post_data)
if rev_form.is_valid():
slug = doc_form.cleaned_data['slug']
if not Document.objects.allows_add_by(request.user, slug):
raise PermissionDenied
doc = doc_form.save(None)
save_revision_and_notify(rev_form, request, doc)
if doc.current_revision.is_approved:
view = 'wiki.document'
else:
view = 'wiki.document_revisions'
return HttpResponseRedirect(reverse(view, args=[doc.slug]))
else:
doc_form.data['slug'] = posted_slug
else:
doc_form.data['slug'] = posted_slug
allow_add_attachment = (
Attachment.objects.allow_add_attachment_by(request.user))
context = {
'is_template': is_template,
'document_form': doc_form,
'revision_form': rev_form,
'WIKI_DOCUMENT_TAG_SUGGESTIONS': config.WIKI_DOCUMENT_TAG_SUGGESTIONS,
'allow_add_attachment': allow_add_attachment,
'attachment_form': AttachmentRevisionForm(),
'parent_slug': parent_slug,
'parent_path': parent_path,
}
return render(request, 'wiki/new_document.html', context)
| mpl-2.0 | -7,831,619,616,727,707,000 | 32.289157 | 82 | 0.608035 | false |
sch3m4/intelmq | intelmq/bots/parsers/dragonresearchgroup/parser_ssh.py | 1 | 1540 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import sys
from intelmq.lib import utils
from intelmq.lib.bot import Bot
from intelmq.lib.message import Event
class DragonResearchGroupSSHParserBot(Bot):
def process(self):
report = self.receive_message()
if report is None or not report.contains("raw"):
self.acknowledge_message()
return
raw_report = utils.base64_decode(report.value("raw"))
for row in raw_report.splitlines():
row = row.strip()
if len(row) == 0 or row.startswith('#'):
continue
splitted_row = row.split('|')
event = Event(report)
columns = ["source.asn", "source.as_name",
"source.ip", "time.source"]
for key, value in zip(columns, splitted_row):
value = value.strip()
if key == "time.source":
value += "T00:00:00+00:00"
if value == "NA":
continue
event.add(key, value, sanitize=True)
event.add("classification.type", "brute-force")
event.add("protocol.application", "ssh")
event.add("protocol.transport", "tcp")
event.add("destination.port", 22)
event.add("raw", row, sanitize=True)
self.send_message(event)
self.acknowledge_message()
if __name__ == "__main__":
bot = DragonResearchGroupSSHParserBot(sys.argv[1])
bot.start()
| agpl-3.0 | 7,095,525,332,861,788,000 | 26.5 | 61 | 0.543506 | false |
jendap/tensorflow | tensorflow/python/keras/callbacks_v1.py | 1 | 17877 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
# pylint: disable=g-import-not-at-top
"""Callbacks: utilities called at certain points during model training.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import numpy as np
from tensorflow.python.eager import context
from tensorflow.python.framework import dtypes
from tensorflow.python.keras import backend as K
from tensorflow.python.keras import callbacks
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import state_ops
from tensorflow.python.ops import summary_ops_v2
from tensorflow.python.ops import variables
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.summary import summary as tf_summary
from tensorflow.python.training import saver
from tensorflow.python.util.tf_export import keras_export
@keras_export(v1=['keras.callbacks.TensorBoard'])
class TensorBoard(callbacks.Callback):
# pylint: disable=line-too-long
"""TensorBoard basic visualizations.
This callback writes a log for TensorBoard, which allows
you to visualize dynamic graphs of your training and test
metrics, as well as activation histograms for the different
layers in your model.
TensorBoard is a visualization tool provided with TensorFlow.
If you have installed TensorFlow with pip, you should be able
to launch TensorBoard from the command line:
```sh
tensorboard --logdir=/full_path_to_your_logs
```
You can find more information about TensorBoard
[here](https://www.tensorflow.org/get_started/summaries_and_tensorboard).
Arguments:
log_dir: the path of the directory where to save the log files to be
parsed by TensorBoard.
histogram_freq: frequency (in epochs) at which to compute activation and
weight histograms for the layers of the model. If set to 0, histograms
won't be computed. Validation data (or split) must be specified for
histogram visualizations.
write_graph: whether to visualize the graph in TensorBoard. The log file
can become quite large when write_graph is set to True.
write_grads: whether to visualize gradient histograms in TensorBoard.
`histogram_freq` must be greater than 0.
batch_size: size of batch of inputs to feed to the network for histograms
computation.
write_images: whether to write model weights to visualize as image in
TensorBoard.
embeddings_freq: frequency (in epochs) at which selected embedding layers
will be saved. If set to 0, embeddings won't be computed. Data to be
visualized in TensorBoard's Embedding tab must be passed as
`embeddings_data`.
embeddings_layer_names: a list of names of layers to keep eye on. If None
or empty list all the embedding layer will be watched.
embeddings_metadata: a dictionary which maps layer name to a file name in
which metadata for this embedding layer is saved. See the
[details](https://www.tensorflow.org/how_tos/embedding_viz/#metadata_optional)
about metadata files format. In case if the same metadata file is
used for all embedding layers, string can be passed.
embeddings_data: data to be embedded at layers specified in
`embeddings_layer_names`. Numpy array (if the model has a single input)
or list of Numpy arrays (if the model has multiple inputs). Learn [more
about
embeddings](https://www.tensorflow.org/programmers_guide/embedding)
update_freq: `'batch'` or `'epoch'` or integer. When using `'batch'`,
writes the losses and metrics to TensorBoard after each batch. The same
applies for `'epoch'`. If using an integer, let's say `1000`, the
callback will write the metrics and losses to TensorBoard every 1000
samples. Note that writing too frequently to TensorBoard can slow down
your training.
Raises:
ValueError: If histogram_freq is set and no validation data is provided.
@compatibility(eager)
Using the `TensorBoard` callback will work when eager execution is enabled,
with the restriction that outputting histogram summaries of weights and
gradients is not supported. Consequently, `histogram_freq` will be ignored.
@end_compatibility
"""
# pylint: enable=line-too-long
def __init__(self,
log_dir='./logs',
histogram_freq=0,
batch_size=32,
write_graph=True,
write_grads=False,
write_images=False,
embeddings_freq=0,
embeddings_layer_names=None,
embeddings_metadata=None,
embeddings_data=None,
update_freq='epoch'):
super(TensorBoard, self).__init__()
self.log_dir = log_dir
self.histogram_freq = histogram_freq
if self.histogram_freq and context.executing_eagerly():
logging.warning(
UserWarning('Weight and gradient histograms not supported for eager'
'execution, setting `histogram_freq` to `0`.'))
self.histogram_freq = 0
self.merged = None
self.write_graph = write_graph
self.write_grads = write_grads
self.write_images = write_images
self.batch_size = batch_size
self._current_batch = 0
self._total_batches_seen = 0
self._total_val_batches_seen = 0
self.embeddings_freq = embeddings_freq
self.embeddings_layer_names = embeddings_layer_names
self.embeddings_metadata = embeddings_metadata
self.embeddings_data = embeddings_data
if update_freq == 'batch':
self.update_freq = 1
else:
self.update_freq = update_freq
self._samples_seen = 0
self._samples_seen_at_last_write = 0
def _init_writer(self, model):
"""Sets file writer."""
if context.executing_eagerly():
self.writer = summary_ops_v2.create_file_writer(self.log_dir)
if not model.run_eagerly and self.write_graph:
with self.writer.as_default():
summary_ops_v2.graph(K.get_graph())
elif self.write_graph:
self.writer = tf_summary.FileWriter(self.log_dir, K.get_graph())
else:
self.writer = tf_summary.FileWriter(self.log_dir)
def _make_histogram_ops(self, model):
"""Defines histogram ops when histogram_freq > 0."""
# only make histogram summary op if it hasn't already been made
if self.histogram_freq and self.merged is None:
for layer in self.model.layers:
for weight in layer.weights:
mapped_weight_name = weight.name.replace(':', '_')
tf_summary.histogram(mapped_weight_name, weight)
if self.write_images:
w_img = array_ops.squeeze(weight)
shape = K.int_shape(w_img)
if len(shape) == 2: # dense layer kernel case
if shape[0] > shape[1]:
w_img = array_ops.transpose(w_img)
shape = K.int_shape(w_img)
w_img = array_ops.reshape(w_img, [1, shape[0], shape[1], 1])
elif len(shape) == 3: # convnet case
if K.image_data_format() == 'channels_last':
# switch to channels_first to display
# every kernel as a separate image
w_img = array_ops.transpose(w_img, perm=[2, 0, 1])
shape = K.int_shape(w_img)
w_img = array_ops.reshape(w_img,
[shape[0], shape[1], shape[2], 1])
elif len(shape) == 1: # bias case
w_img = array_ops.reshape(w_img, [1, shape[0], 1, 1])
else:
# not possible to handle 3D convnets etc.
continue
shape = K.int_shape(w_img)
assert len(shape) == 4 and shape[-1] in [1, 3, 4]
tf_summary.image(mapped_weight_name, w_img)
if self.write_grads:
for weight in layer.trainable_weights:
mapped_weight_name = weight.name.replace(':', '_')
grads = model.optimizer.get_gradients(model.total_loss, weight)
def is_indexed_slices(grad):
return type(grad).__name__ == 'IndexedSlices'
grads = [
grad.values if is_indexed_slices(grad) else grad
for grad in grads
]
tf_summary.histogram('{}_grad'.format(mapped_weight_name), grads)
if hasattr(layer, 'output'):
if isinstance(layer.output, list):
for i, output in enumerate(layer.output):
tf_summary.histogram('{}_out_{}'.format(layer.name, i), output)
else:
tf_summary.histogram('{}_out'.format(layer.name), layer.output)
def set_model(self, model):
"""Sets Keras model and creates summary ops."""
self.model = model
self._init_writer(model)
# histogram summaries only enabled in graph mode
if not context.executing_eagerly():
self._make_histogram_ops(model)
self.merged = tf_summary.merge_all()
# If both embedding_freq and embeddings_data are available, we will
# visualize embeddings.
if self.embeddings_freq and self.embeddings_data is not None:
# Avoid circular dependency.
from tensorflow.python.keras.engine import training_utils # pylint: disable=g-import-not-at-top
self.embeddings_data = training_utils.standardize_input_data(
self.embeddings_data, model.input_names)
# If embedding_layer_names are not provided, get all of the embedding
# layers from the model.
embeddings_layer_names = self.embeddings_layer_names
if not embeddings_layer_names:
embeddings_layer_names = [
layer.name
for layer in self.model.layers
if type(layer).__name__ == 'Embedding'
]
self.assign_embeddings = []
embeddings_vars = {}
self.batch_id = batch_id = array_ops.placeholder(dtypes.int32)
self.step = step = array_ops.placeholder(dtypes.int32)
for layer in self.model.layers:
if layer.name in embeddings_layer_names:
embedding_input = self.model.get_layer(layer.name).output
embedding_size = np.prod(embedding_input.shape[1:])
embedding_input = array_ops.reshape(embedding_input,
(step, int(embedding_size)))
shape = (self.embeddings_data[0].shape[0], int(embedding_size))
embedding = variables.Variable(
array_ops.zeros(shape), name=layer.name + '_embedding')
embeddings_vars[layer.name] = embedding
batch = state_ops.assign(embedding[batch_id:batch_id + step],
embedding_input)
self.assign_embeddings.append(batch)
self.saver = saver.Saver(list(embeddings_vars.values()))
# Create embeddings_metadata dictionary
if isinstance(self.embeddings_metadata, str):
embeddings_metadata = {
layer_name: self.embeddings_metadata
for layer_name in embeddings_vars.keys()
}
else:
# If embedding_metadata is already a dictionary
embeddings_metadata = self.embeddings_metadata
try:
from tensorboard.plugins import projector
except ImportError:
raise ImportError('Failed to import TensorBoard. Please make sure that '
'TensorBoard integration is complete."')
# TODO(psv): Add integration tests to test embedding visualization
# with TensorBoard callback. We are unable to write a unit test for this
# because TensorBoard dependency assumes TensorFlow package is installed.
config = projector.ProjectorConfig()
for layer_name, tensor in embeddings_vars.items():
embedding = config.embeddings.add()
embedding.tensor_name = tensor.name
if (embeddings_metadata is not None and
layer_name in embeddings_metadata):
embedding.metadata_path = embeddings_metadata[layer_name]
projector.visualize_embeddings(self.writer, config)
def _fetch_callback(self, summary):
self.writer.add_summary(summary, self._total_val_batches_seen)
self._total_val_batches_seen += 1
def _write_custom_summaries(self, step, logs=None):
"""Writes metrics out as custom scalar summaries.
Arguments:
step: the global step to use for TensorBoard.
logs: dict. Keys are scalar summary names, values are
NumPy scalars.
"""
logs = logs or {}
if context.executing_eagerly():
# use v2 summary ops
with self.writer.as_default(), summary_ops_v2.always_record_summaries():
for name, value in logs.items():
if isinstance(value, np.ndarray):
value = value.item()
summary_ops_v2.scalar(name, value, step=step)
else:
# use FileWriter from v1 summary
for name, value in logs.items():
if isinstance(value, np.ndarray):
value = value.item()
summary = tf_summary.Summary()
summary_value = summary.value.add()
summary_value.simple_value = value
summary_value.tag = name
self.writer.add_summary(summary, step)
self.writer.flush()
def on_batch_end(self, batch, logs=None):
"""Writes scalar summaries for metrics on every training batch."""
# Don't output batch_size and batch number as TensorBoard summaries
logs = logs or {}
self._samples_seen += logs.get('size', 1)
samples_seen_since = self._samples_seen - self._samples_seen_at_last_write
if self.update_freq != 'epoch' and samples_seen_since >= self.update_freq:
batch_logs = {('batch_' + k): v
for k, v in logs.items()
if k not in ['batch', 'size', 'num_steps']}
self._write_custom_summaries(self._total_batches_seen, batch_logs)
self._samples_seen_at_last_write = self._samples_seen
self._total_batches_seen += 1
def on_epoch_begin(self, epoch, logs=None):
"""Add histogram op to Model eval_function callbacks, reset batch count."""
# check if histogram summary should be run for this epoch
if self.histogram_freq and epoch % self.histogram_freq == 0:
self._epoch = epoch
# pylint: disable=protected-access
# add the histogram summary op if it should run this epoch
self.model._make_eval_function()
if self.merged not in self.model._eval_function.fetches:
self.model._eval_function.fetches.append(self.merged)
self.model._eval_function.fetch_callbacks[
self.merged] = self._fetch_callback
# pylint: enable=protected-access
def on_epoch_end(self, epoch, logs=None):
"""Checks if summary ops should run next epoch, logs scalar summaries."""
# don't output batch_size and
# batch number as TensorBoard summaries
logs = {('epoch_' + k): v
for k, v in logs.items()
if k not in ['batch', 'size', 'num_steps']}
if self.update_freq == 'epoch':
step = epoch
else:
step = self._samples_seen
self._write_custom_summaries(step, logs)
# pop the histogram summary op after each epoch
if self.histogram_freq:
# pylint: disable=protected-access
if self.merged in self.model._eval_function.fetches:
self.model._eval_function.fetches.remove(self.merged)
if self.merged in self.model._eval_function.fetch_callbacks:
self.model._eval_function.fetch_callbacks.pop(self.merged)
# pylint: enable=protected-access
if self.embeddings_data is None and self.embeddings_freq:
raise ValueError('To visualize embeddings, embeddings_data must '
'be provided.')
if self.embeddings_freq and self.embeddings_data is not None:
if epoch % self.embeddings_freq == 0:
# We need a second forward-pass here because we're passing
# the `embeddings_data` explicitly. This design allows to pass
# arbitrary data as `embeddings_data` and results from the fact
# that we need to know the size of the `tf.Variable`s which
# hold the embeddings in `set_model`. At this point, however,
# the `validation_data` is not yet set.
embeddings_data = self.embeddings_data
n_samples = embeddings_data[0].shape[0]
i = 0
while i < n_samples:
step = min(self.batch_size, n_samples - i)
batch = slice(i, i + step)
if isinstance(self.model.input, list):
feed_dict = {
model_input: embeddings_data[idx][batch]
for idx, model_input in enumerate(self.model.input)
}
else:
feed_dict = {self.model.input: embeddings_data[0][batch]}
feed_dict.update({self.batch_id: i, self.step: step})
if not isinstance(K.learning_phase(), int):
feed_dict[K.learning_phase()] = False
self.sess.run(self.assign_embeddings, feed_dict=feed_dict)
self.saver.save(self.sess,
os.path.join(self.log_dir, 'keras_embedding.ckpt'),
epoch)
i += self.batch_size
def on_train_end(self, logs=None):
self.writer.close()
| apache-2.0 | -9,018,497,592,439,042,000 | 41.162736 | 102 | 0.644068 | false |
LettError/filibuster | Lib/filibuster/content/music.py | 1 | 9619 | # -*- coding: UTF-8 -*-
"""
history
Music genres, performers, recordings, venues
--------------------------------------------------------------------
3.0.0 - split all the content into babycontents
evb - note: only one dictionary named 'content' allowed per module
this limitation is to speed up loading
"""
__version__ = '3.0.0'
__author__ = "someone"
# ------------------------------------------------------
# music
#
content = {
'pop_genre': [
'<#pop_genre_px#><#pop_genre_sx#>',
'<#pop_genre_px#><#pop_genre_sx#>',
'<#pop_genre_px#><#pop_genre_sx#>',
'<#pop_genre_px#>-<#pop_genre_sx#>',
'<#pop_genre_px#>-<#pop_genre_sx#>',
'<#pop_genre_px#>-<#pop_genre_sx#>',
'<#pop_genre_px#><#pop_genre_sx#><#pop_genre_sx2#>',
'<#pop_genre_px#>-<#pop_genre_px#>-<#pop_genre_sx#>',
],
'pop_location': [
"New York",
"London",
"Liverpool",
"Amsterdam",
'Berlin',
'Chicago',
'Ibiza',
],
'pop_instrument': [
'sings',
'drums', "congas",
'bass', 'acoustic bass',
'guitar', 'mandolin',
],
'pop_names_people': [
'<#pop_names_first#>'
],
'pop_names_first': [
'<#names_first#>',
'<#names_first_absurdlyBritish#>',
'<#names_first_absurdlyGerman#>',
'<#names_first_female#>',
'<#names_first_male#>',
'<#names_first_purewhitetrash#>',
],
'pop_names_groups_classic': [
'<#pop_names_backing_classic#>',
'<#pop_names_people#> and the <#!^,pop_names_backing_classic#>',
'<#pop_names_people#> & the <#!^,pop_names_backing_classic#>',
],
'pop_names_backing_classic': [
'<#war_militias#>',
'Commitments', 'Communists', 'Republicans', 'Democrats',
'Things', 'Stopsigns', 'Accidents', 'Replacements',
'Village People', 'Monsters', 'Madmen', 'Rangers', 'Cosmonauts',
'Presidents',
],
'pop_genre_px': [
'easy', 'cosy', 'cuddly', 'classic',
"ambient", "bleep", "beat", "brit", "chicago", "death", "def", "druggy", "disco", "dub", "electro", "extended",
"feedback", "folk", "fox", "fresh", "garage", "industrial", "jangle", "jazz", 'casiotone', 'sample', 'digital',
"maxi", "mega", "metal", "MIDI", "new", "old-school", "super", "speed", "street", "surf",
"synth", "twang", ],
'pop_genre_sx': [
"house", "dance", "acid", "sound", "wave", "techno",
"thrash", "trash", "rap", "roots", "rock", 'hiphop', 'bebop'
"glam", "goth", ],
'pop_genre_sx2': [
"-adelic", "-core", "-pop",
],
'classic_genre': [],
'classic_oevrecounter': ['No.<-randint(1, 20)->'],
'classic_opus': ['op.<-randint(1, 20)->'],
'classic_work_nickname': ['Taras Bulba', 'Moonlight', 'Seguidilla', 'Unvolendete'],
'classic_work_name': [
'"<#!^,time_seasons#>"',
'"<#!^,lit_mythology#>"',
'"<#!^,sci_astro_planets#>"',
'"<#!^,classic_work_nickname#>"',
],
'classic_work': [
'<#classic_work_numbered#> in <#classic_key#> for <#classic_instrument#> and <#classic_instrument#>',
'<#classic_work_numbered#> in <#classic_key#> for <#classic_instrument#>',
'<#classic_work_section#> of the <#classic_work_numbered#>',
'<#classic_work_kind#> in <#classic_key#>, from <#!^, classic_work_name#>',
],
'classic_composer': [
# insert russian names here!
'Prokofiev',
'<#name_french#>',
'Beethoven', 'Bach', 'Mozart', 'Monteverdi', 'Schostakovitch',
'Satie', 'Rachmaninov', 'Hindemith', 'Janacek', 'Satie', 'Sousa',
'Telemann', 'Vivaldi', 'Paganini', 'Puccini', 'Moussorgski',
'Wagner', 'Sibelius', 'Villa-Lobos'
],
'classic_classification': ['', '', '', '', '', '', '', '', '', 'BWV<-randint(100,300)->', 'KV<-randint(100,300)->'],
'classic_instrument': ['<#classic_instrument_traditional#>','<#classic_instrument_traditional#>','<#classic_instrument_odd#>',],
'classic_instrument_traditional': [
'orchestra', 'piano', 'violin', 'horn', 'flute', 'organ',
'harp', 'harpsichord', 'choir', 'boys choir'
],
'classic_instrument_odd': [
'fiddle', 'theremin', 'cat', 'birdwhistle', 'fat lady', 'piccolo', 'saw'
],
'classic_work_section': [
'suite', 'overture', 'presto',
'largo<#classic_work_interjection#>',
'adagio<#classic_work_interjection#>',
'scherzo<#classic_work_interjection#>',
'allegro<#classic_work_interjection#>',
],
'classic_work_interjection': ['', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '',
' ma non troppo,', ' molto vivace,', ' molto e cantabile,', ' un poco maestoso,'],
'classic_work_numbered': [
'<#^,num_ord#> <#classic_work_kind#>',
'<#classic_work_kind#> <#classic_oevrecounter#>',
'<#num_ord#> <#classic_work_kind#>',
'<#classic_work_kind#> <#classic_oevrecounter#>',
'<#^,num_ord#> <#classic_work_kind#>',
'<#classic_work_kind#> <#classic_oevrecounter#>',
'<#^,num_ord#> <#classic_work_kind#> <#classic_opus#>',
'<#classic_work_kind#> <#classic_oevrecounter#> <#classic_opus#>',
],
'classic_work_kind': [
'rhapsody',
'symphony',
'sonata',
'etude',
'concerto',
],
'classic_chord': ['A', 'B', 'C', 'D', 'E', 'F'],
'classic_key': ['<#classic_chord#>-short', '<#classic_chord#>-major', '<#classic_chord#>-minor', '<#classic_chord#>'],
'classic_orchestra_adj': ['New ', 'Radio ', 'Broadcast ', 'Historical ', '','','','','','','','','','','','','','',],
'classic_director': [
'Viktor Askenazy',
'Herbert von Karajan',
'<#names_first#> <#name_french#>',
'<#names_first#> <#name_japanese#>',
],
'classic_orchestra': [
'<#classic_orchestra_adj#><#city#> Symphonic Orchestra',
'<#classic_orchestra_adj#><#city#> Philharmonic',
'<#city#> Cacaphony',
'<#city#> Polyphony',
'<#city#> Philharmonic',
'<#city#> Sinfonia',
'Concertgebouw Orchestra',
'<#classic_orchestra_adj#><#city#> Chamber Orchestra',
'<#university#> Marching Band',
'<#pop_names_groups_classic#>'
],
'classic_orchestra_more': [
'','','','','','',
' on authentic instruments',
' at the Royal Albert Hall',
' at Carnegie Hall',
],
'classic_recording_highbrow': [
"Works by <#classic_composer#>",
u"<#classic_composer#>’s <#classic_work#>, by the <#classic_orchestra#>. <#classic_classification#>",
u"<#classic_composer#>’s <#classic_work#>, by the <#classic_orchestra#>, directed by <#classic_director#>. <#classic_classification#>",
"<#!^^,classic_work#> by <#classic_composer#>, recorded by the <#classic_orchestra#><#classic_orchestra_more#>, conducted by <#classic_director#>. <#classic_classification#>",
"<#!^^,classic_work#> by <#classic_composer#>, recorded by the <#classic_orchestra#><#classic_orchestra_more#>. <#classic_classification#>",
"<#!^^,classic_work#> by <#classic_composer#>, a recording by the <#classic_orchestra#><#classic_orchestra_more#>. <#classic_classification#>"
],
'classic_recording_lowbrow': [
"<#name#> Goes Classic",
"<#name#> Goes Classic <#num_roman#>",
"<#classic_composer#> for Dummies",
"Pre-natal <#classic_composer#>",
"<#classic_composer#> For Massage - Music With A Soft Gentle Touch",
"<#classic_composer#> At Bedtime",
"<#classic_composer#> For Relaxation",
"<#classic_composer#> For Your Baby",
"<#classic_composer#> Greatest Hits",
"<#classic_orchestra#> Hollywood Tunes",
'Music for the <#classic_composer#> Effect',
'Portrait of <#classic_composer#>',
'The <#classic_composer#> Collection',
'<#classic_composer#>: The Works',
'<#classic_orchestra#> Music For Commuters',
],
}
| mit | 9,120,599,643,572,176,000 | 47.807107 | 191 | 0.443994 | false |
barentsen/reproject | reproject/mosaicking/background.py | 1 | 4696 | # Licensed under a 3-clause BSD style license - see LICENSE.rst
from math import exp
import numpy as np
__all__ = ['solve_corrections_sgd', 'determine_offset_matrix']
def determine_offset_matrix(arrays):
"""
Given a list of ReprojectedArraySubset, determine the offset
matrix between all arrays.
"""
N = len(arrays)
# Set up matrix to record differences
offset_matrix = np.ones((N, N)) * np.nan
# Loop over all pairs of images and check for overlap
for i1, array1 in enumerate(arrays):
for i2, array2 in enumerate(arrays):
if i2 <= i1:
continue
if array1.overlaps(array2):
difference = array1 - array2
if np.any(difference.footprint):
values = difference.array[difference.footprint]
offset_matrix[i1, i2] = np.median(values)
offset_matrix[i2, i1] = -offset_matrix[i1, i2]
return offset_matrix
def solve_corrections_sgd(offset_matrix, eta_initial=1, eta_half_life=100,
rtol=1e-10, atol=0):
r"""
Given a matrix of offsets from each image to each other image, find the
optimal offsets to use using Stochastic Gradient Descent.
Given N images, we can construct an NxN matrix Oij giving the typical (e.g.
mean, median, or other statistic) offset from each image to each other
image. This can be a reasonably sparse matrix since not all images
necessarily overlap. From this we then want to find a vector of N
corrections Ci to apply to each image to minimize the differences.
We do this by using the Stochastic Gradient Descent algorithm:
https://en.wikipedia.org/wiki/Stochastic_gradient_descent
Essentially what we are trying to minimize is the difference between Dij
and a matrix of the same shape constructed from the Oi values.
The learning rate is decreased using a decaying exponential:
$$\eta = \eta_{\rm initial} * \exp{(-i/t_{\eta})}$$
Parameters
----------
offset_matrix : `~numpy.ndarray`
The NxN matrix giving the offsets between all images (or NaN if
an offset could not be determined)
eta_initial : float
The initial learning rate to use
eta_half_life : float
The number of iterations after which the learning rate should be
decreased by a factor $e$.
rtol : float
The relative tolerance to use to determine if the corrections have
converged.
atol : float
The absolute tolerance to use to determine if the corrections have
converged.
"""
if (offset_matrix.ndim != 2 or
offset_matrix.shape[0] != offset_matrix.shape[1]):
raise ValueError("offset_matrix should be a square NxN matrix")
N = offset_matrix.shape[0]
indices = np.arange(N)
corrections = np.zeros(N)
# Keep track of previous corrections to know whether the algorithm
# has converged
previous_corrections = None
for iteration in range(int(eta_half_life * 10)):
# Shuffle the indices to avoid cyclical behavior
np.random.shuffle(indices)
# Update learning rate
eta = eta_initial * exp(-iteration/eta_half_life)
# Loop over each index and update the offset. What we call rows and
# columns is arbitrary, but for the purpose of the comments below, we
# treat this as iterating over rows of the matrix.
for i in indices:
if np.isnan(corrections[i]):
continue
# Since the matrix might be sparse, we consider only columns which
# are not NaN
keep = ~np.isnan(offset_matrix[i, :])
# Compute the row of the offset matrix one would get with the
# current offsets
fitted_offset_matrix_row = corrections[i] - corrections[keep]
# The difference between the actual row in the matrix and this
# fitted row gives us a measure of the gradient, so we then
# adjust the solution in that direction.
corrections[i] += eta * np.mean(offset_matrix[i, keep]
- fitted_offset_matrix_row)
# Subtract the mean offset from the offsets to make sure that the
# corrections stay centered around zero
corrections -= np.nanmean(corrections)
if previous_corrections is not None:
if np.allclose(corrections, previous_corrections,
rtol=rtol, atol=atol):
break # the algorithm has converged
previous_corrections = corrections.copy()
return corrections
| bsd-3-clause | 1,066,453,239,772,210,600 | 34.847328 | 79 | 0.635009 | false |
ploneintranet/ploneintranet.workspace | src/ploneintranet/workspace/tests/test_viewlets.py | 1 | 3061 | from collective.workspace.interfaces import IWorkspace
from plone import api
from ploneintranet.workspace.browser.viewlets import JoinViewlet
from ploneintranet.workspace.tests.base import BaseTestCase
from ploneintranet.workspace.browser.viewlets import SharingViewlet
from ploneintranet.workspace.policies import PARTICIPANT_POLICY
class TestSelfJoinViewlet(BaseTestCase):
def setUp(self):
super(TestSelfJoinViewlet, self).setUp()
self.login_as_portal_owner()
self.workspace = api.content.create(
self.portal,
'ploneintranet.workspace.workspacefolder',
'demo-workspace',
title='Demo Workspace'
)
self.folder = api.content.create(
self.workspace,
'Folder',
'inner-one',
title='Inner folder'
)
self.user = api.user.create(
email='[email protected]',
username='demo',
password='demon',
)
def test_viewlet_invisible_while_not_in_workspace(self):
self.workspace.join_policy = 'self'
self.workspace.visibility = 'open'
viewlet = JoinViewlet(self.portal, self.request, None, None)
self.assertFalse(viewlet.visible())
def test_viewlet_invisible_in_other_than_self_join_policy(self):
viewlet = JoinViewlet(self.folder, self.request, None, None)
self.assertTrue(viewlet.in_workspace())
self.assertFalse(viewlet.visible())
def test_viewlet_invisible_if_user_is_member(self):
self.workspace.join_policy = 'self'
self.workspace.visibility = 'open'
viewlet = JoinViewlet(self.folder, self.request, None, None)
IWorkspace(self.workspace).add_to_team(user='demo')
self.login('demo')
self.assertFalse(viewlet.visible())
def test_viewlet_visibility(self):
viewlet = JoinViewlet(self.folder, self.request, None, None)
self.workspace.join_policy = 'self'
self.workspace.visibility = 'open'
self.login('demo')
self.assertTrue(viewlet.visible())
def test_viewlet(self):
viewlet = JoinViewlet(self.folder, self.request, None, None)
url = '%s/%s' % (self.workspace.absolute_url(), 'joinme')
self.assertEqual(viewlet.join_url(), url)
class TestSharingViewlet(BaseTestCase):
def setUp(self):
super(TestSharingViewlet, self).setUp()
self.portal = self.layer['portal']
self.request = self.layer['request']
self.login_as_portal_owner()
self.workspace = api.content.create(
self.portal,
'ploneintranet.workspace.workspacefolder',
'demoworkspace',
title='Demo Workspace'
)
def test_viewlet_message_is_correct(self):
self.workspace.participant_policy = 'moderators'
viewlet = SharingViewlet(self.workspace, self.request, None, None)
self.assertEqual(
viewlet.active_participant_policy(),
PARTICIPANT_POLICY['moderators']['title']
)
| gpl-2.0 | 5,816,379,395,865,682,000 | 35.440476 | 74 | 0.642274 | false |
cseed/hail | batch/batch/worker/flock.py | 1 | 1551 | import fcntl
import os
import argparse
import subprocess as sp
from pathlib import Path
from hailtop.utils import blocking_to_async
class Flock:
def __init__(self, path, pool=None, nonblock=False):
self.path = Path(path).resolve()
self.lock_path = self.path.parent
self.pool = pool
self.flock_flags = fcntl.LOCK_EX
if nonblock:
self.flock_flags |= fcntl.LOCK_NB
self.fd = -1
def __enter__(self):
self.lock_path.mkdir(parents=True, exist_ok=True)
self.fd = os.open(self.lock_path, os.O_RDONLY)
fcntl.flock(self.fd, self.flock_flags)
return self
def __exit__(self, type, value, traceback):
fcntl.flock(self.fd, fcntl.LOCK_UN)
os.close(self.fd)
async def __aenter__(self):
assert self.pool
return await blocking_to_async(self.pool, self.__enter__)
async def __aexit__(self, exc_type, exc_val, exc_tb):
assert self.pool
return await blocking_to_async(self.pool, self.__exit__, exc_type, exc_val, exc_tb)
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('path', type=str)
parser.add_argument('-c', dest='command', type=str, required=True)
parser.add_argument('-n', dest='nonblock', action='store_true')
args = parser.parse_args()
with Flock(args.path):
try:
sp.check_output(args.command, stderr=sp.STDOUT, shell=True)
except sp.CalledProcessError as e:
print(e.output)
raise e
| mit | 8,024,200,573,664,678,000 | 29.411765 | 91 | 0.615087 | false |
broadinstitute/toothpick | toothpick/errors.py | 1 | 2792 | import inflection
class Errors(object):
'''
The `Errors` object is a container of validation errors stored on an
instance of a :class:`Base` subclass. An object's validation
state (and the response to :meth:`Base.valid()`) is governed by the number
of errors collected: 0 means valid, and more means invalid.
The errors are represented as tuples of (attribute_name, error_message).
This class provides several methods for accessing the errors it stores:
>>> model.errors
[('name', "can't be blank"), ('age', "must be greater than 18")]
>>> model.errors['name']
["can't be blank"]
>>> model.errors.messages()
["Name can't be blank", "Age must be greater than 18"]
>>> model.errors.messages('name')
["Name can't be blank"]
'''
def __init__(self):
self.clear()
def __getitem__(self, attribute_name):
'''
Returns a list of error messages for a given attribute.
'''
return [i[1] for i in self._store if i[0] == attribute_name]
def __len__(self):
return len(self._store)
def __iter__(self):
return iter(self._store)
def __repr__(self):
return repr(self._store)
def add(self, field_name, error_msg):
'''
Adds an error to this instance.
'''
self._store.append((field_name, error_msg))
def clear(self):
'''
Removes all errors from this instance
'''
self._store = []
def messages(self, attribute_name=None):
'''
Returns a humanized string representing the errors contained in this
instance. if `attribute_name` is passed, then only errors for that
attribute are considered.
'''
if attribute_name:
return [self._messagify(i[0], i[1], inflection.humanize)
for i in self._store if i[0] == attribute_name]
else:
return [self._messagify(i[0], i[1], inflection.humanize)
for i in self._store]
def api_messages(self, attribute_name=None):
'''
Like `#messages()`, but keeps the field names in their non-humanized form.
'''
if attribute_name:
return [self._messagify(i[0], i[1]) for i in self._store
if i[0] == attribute_name]
else:
return [self._messagify(i[0], i[1]) for i in self._store]
@classmethod
def _messagify(cls, field_name, error_msg, transformer=None):
if not transformer:
transformer = lambda x: "'%s'" % x
if not field_name:
return error_msg
return "%s %s" % (
transformer(field_name),
error_msg
)
| bsd-3-clause | 3,059,718,520,421,041,700 | 27.489796 | 82 | 0.55659 | false |
deapplegate/wtgpipeline | cal_extinctions.save.py | 1 | 3796 | import threading
def ebv_list(list_submit,list_dict,i,ppid):
import os
lineindex = 0
timehold = time.time()
list_out = []
out = open('/tmp/tmpf_' + str(i) + '_' + str(ppid),'w')
for line in list_submit:
tt = re.split('\s+',line)
ra = float(tt[0])
dec = float(tt[1])
EBV = calc_EBV(float(ra),float(dec),i)
list_out.append(EBV)
#print EBV
lineindex += 1
out.write(str(EBV) + '\n')
if lineindex % 100 == 0:
print 'thread ' + str(i), lineindex, len(list_submit), time.time() - timehold
timehold = time.time()
list_dict[str(i)]['list_out'] = list_out
out.close()
def calc_EBV(coord_in_ra,coord_in_dec,i):
#coord_in_ra='12:51:26.28'
#coord_in_dec='27:07:42.'
coord = Equatorial( str(coord_in_ra*(24./360.)), str(coord_in_dec), epoch='2000') # input needs to be in HOURS as a STRING
g = Galactic(coord, epoch='2000') # output is in degrees not hours--it's latitude/longitude
spt = re.split('\:',str(g.lat))
#print spt, abs(float(spt[0])), float(spt[1])/60.
gallat = float(spt[0]) / abs(float(spt[0])) * (abs(float(spt[0])) + float(spt[1])/60. + float(spt[2])/3600. )
#print gallat
#print g.long
spt = re.split('\:',str(g.long))
#print spt
gallong = float(spt[0]) / abs(float(spt[0])) * (abs(float(spt[0])) + float(spt[1])/60. + float(spt[2])/3600. )
#print gallong
#coordtest = Equatorial(Galactic(g.long,g.lat, epoch='2000'), epoch='2000')
output = commands.getoutput('dust_getval ' + str(gallong) + ' ' + str(gallat) + ' interp=y ipath=/nfs/slac/g/ki/ki03/xoc/pkelly/DUST/maps_' + str(i) )
spt = re.split('\s',output)
#print spt
EBV = spt[-1]
#print EBV, float(coord_in_ra), float(coord_in_dec)
return EBV
class MyThread ( threading.Thread ):
def __init__ ( self, list_submit,list_dict, i, ppid):
self.i = i
self.list_submit = list_submit
self.list_dict = list_dict
self.ppid = ppid
threading.Thread.__init__(self)
def run ( self ):
ebv_list(self.list_submit,list_dict,self.i,self.ppid)
return
#add E(B-V) to ldac table
import re, commands, sys, bashreader, os
from ephem import *
dict = bashreader.parseFile('progs.ini')
table = sys.argv[1]
import time
tempfile = '/tmp/outkey'
ebvfile = '/tmp/outebv'
os.system('rm ' + ebvfile)
ppid = os.getppid()
print ppid
command = "ldactoasc -b -i " + table + " -t OBJECTS -k ALPHA_J2000 DELTA_J2000 > " + ebvfile
print command
os.system(command)
list = []
import re
outkey=open(tempfile,'w')
lines = open(ebvfile,'r').readlines()
number_interval = 4
length_int = len(lines)/number_interval
start = 0
my_threads = []
list_dict = {}
for i in range(number_interval):
end = start + length_int
if i + 1 == number_interval:
list_submit = lines[start:]
else:
list_submit = lines[start:end]
start = end
list_dict[str(i)] = {'list_submit':list_submit}
#s = MyThread(list_submit,list_dict,i,ppid)
#stat = os.fork()
print i, 'started'
s = os.fork()
if not s:
ebv_list(list_submit,list_dict,i,ppid)
sys.exit()
#s.start()
my_threads.append(s)
print my_threads
#print threading.enumerate()
for s in my_threads:
os.waitpid(s,0)
print 'done'
list_out = []
for i in range(number_interval):
list_out = list_out + list_dict[str(i)]['list_out']
print len(lines), len(list_out)
print lines[0:2], list_out[0:2]
# READ IN COLUMN INFO
for val in list_out:
outkey.write(str(val) + '\n')
outkey.close()
command = "asctoldac -i " + tempfile + " -o " + tempfile + ".cat -c " + dict['photconf'] + "/EBV.conf -t OBJECTS "
os.system(command)
command = "ldacjoinkey -o test -i " + table + " -p " + tempfile + ".cat -t OBJECTS -k EBV"
os.system(command)
| mit | -1,447,512,690,213,546,200 | 24.47651 | 159 | 0.608799 | false |
ismtabo/file-mixer | file_mixer/main_view/model.py | 1 | 5227 | import os
import stat
from .errors import NoneCurrentProblem, UnsavedModifiedProblem
from .models import FolderTreeElement, Problem, NumberedProblem
class MainViewModel:
def __init__(self):
self._path = None
self._pathtree = None
self._is_modified = False
self._problem = None
self._input_extensions = []
self._answer_extensions = []
@property
def current_path(self):
return self._path
@current_path.setter
def current_path(self, new_path):
print("Change path {} to {}".format(self._path, new_path))
self._path = new_path
self.current_pathtree = new_path
def get_file_path(self, basepath):
print("Real path for {}, in {}".format(basepath, self._path))
return os.path.join(self._path, basepath)
@property
def current_pathtree(self):
return self._pathtree
@current_pathtree.setter
def current_pathtree(self, new_path: str):
def dirwalk(path):
pathtree = []
# Iterate over the contents of the specified path
for f in os.listdir(path):
# Get the absolute path of the item
fullname = os.path.join(path, f)
# Extract metadata from the item
fdata = os.stat(fullname)
# Determine if the item is a folder
is_folder = stat.S_ISDIR(fdata.st_mode)
# If the item is a folder, descend into it
children = dirwalk(fullname) if is_folder else []
# Append the item to the TreeStore
li = FolderTreeElement(f, fdata.st_size, is_folder, fullname, children)
pathtree.append(li)
return pathtree
self._pathtree = dirwalk(new_path)
@property
def current_problem(self):
if not self._problem:
raise NoneCurrentProblem('There is not current problem.')
return self._problem
def set_new_problem(self, problem):
if self._is_modified:
raise UnsavedModifiedProblem('Current problem should be saved before open new one.')
del self._problem
self._problem = problem
self._is_modified = False
def set_problem_number(self, new_problem_number):
if not self._problem:
raise NoneCurrentProblem('There is not current problem.')
self._problem.number = new_problem_number
self._is_modified = True
@property
def input_extensions(self):
return self._input_extensions
def add_input_extensions(self, *new_input_extensions):
self._input_extensions += new_input_extensions
@property
def answer_extensions(self):
return self._answer_extensions
def add_answer_extensions(self, *new_answer_extensions):
self._answer_extensions += new_answer_extensions
def add_choosen_file(self, input_file_name, answer_file_name):
print('Choosen file: ', input_file_name)
if not self._problem:
raise NoneCurrentProblem("There is not current problem.\nPlease entry problem number.")
self._problem.add_used_files(input_file_name, answer_file_name)
self._is_modified = True
def remove_choosen_file(self, file_name=None, file_index=None):
if file_name is None and file_index is None:
raise Exception('Either file name or file index has to be given')
print('Remove choosen file: ', file_name)
if not self._problem:
raise NoneCurrentProblem("There is not current problem.\nPlease entry problem number.")
self._problem.remove_used_files(file_index)
self._is_modified = True
self._problem.generate()
def sort_choosen_files(self):
if not self._problem:
raise NoneCurrentProblem("There is not current problem.\nPlease entry problem number.")
self._problem.sort()
self._problem.generate()
def shuffle_choosen_files(self):
if not self._problem:
raise NoneCurrentProblem("There is not current problem.\nPlease entry problem number.")
self._problem.shuffle()
self._problem.generate()
def clear_choosen_files(self):
if not self._problem:
raise NoneCurrentProblem("There is not current problem.\nPlease entry problem number.")
self._problem.clear()
@property
def current_problem_files_content(self):
return self._problem.files_content
@property
def current_problem_choosenfiles(self):
return self._problem.used_files
@property
def current_problem_path(self):
return self._problem.path
@current_problem_path.setter
def current_problem_path(self, new_problem_save_path):
self._problem.path = new_problem_save_path
self._is_modified = True
def set_problem_saved(self):
self._is_modified = False
def toggle_case_numbered_problem(self, case_numbered=False):
if not case_numbered:
self._problem = self.current_problem.get_problem()
else:
self._problem = NumberedProblem.from_problem(self.current_problem)
self._problem.generate()
| mit | -5,864,439,062,170,376,000 | 29.213873 | 99 | 0.626172 | false |
Ferroman/configuration.py | configuration_py/parser_lookup.py | 1 | 1521 | import inspect
import os
import pkgutil
import sys
from configuration_py.parsers.base_parser import BaseConfigParser
PARSERS_DIR_NAME = 'parsers'
def get_supported_extensions():
available_parsers = _lookup_for_available_parsers()
for parser_class in available_parsers:
for extension in parser_class.extensions:
yield extension
def get_parser(extension):
parser_class = _find_parser_class_by_extension(extension)
return parser_class()
def _find_parser_class_by_extension(extension):
available_parsers = _lookup_for_available_parsers()
for parser_class in available_parsers:
if extension in parser_class.extensions:
return parser_class
raise EnvironmentError('No parsers for {extension} found'.format(extension=extension))
def _lookup_for_available_parsers():
parsers_folder = _get_parsers_folder_path()
for importer, package_name, _ in pkgutil.iter_modules([parsers_folder]):
full_package_name = '%s.%s' % (parsers_folder, package_name)
if full_package_name not in sys.modules:
module = importer.find_module(package_name).load_module(package_name)
for _, obj in inspect.getmembers(module):
if inspect.isclass(obj) and issubclass(obj, BaseConfigParser) and obj is not BaseConfigParser:
yield obj
def _get_parsers_folder_path():
current_dir_path = os.path.dirname(os.path.realpath(__file__))
return os.path.join(current_dir_path, PARSERS_DIR_NAME)
| mit | 4,643,955,388,156,396,000 | 30.6875 | 110 | 0.69691 | false |
plamut/superdesk-core | superdesk/ws.py | 1 | 4280 | #!/usr/bin/env python
# -*- coding: utf-8; -*-
#
# This file is part of Superdesk.
#
# Copyright 2013, 2014, 2015 Sourcefabric z.u. and contributors.
#
# For the full copyright and license information, please see the
# AUTHORS and LICENSE files distributed with this source code, or
# at https://www.sourcefabric.org/superdesk/license
import json
import signal
import asyncio
import logging
import websockets
import logging.handlers
beat_delay = 5
clients = set()
logging.basicConfig(level=logging.DEBUG)
logger = logging.getLogger(__name__)
def configure_syslog(config):
"""Configure syslog logging handler.
:param config: config dictionary
"""
debug_log_format = ('%(levelname)s:%(module)s:%(message)s\n')
handler = logging.handlers.SysLogHandler(address=(config['LOG_SERVER_ADDRESS'], config['LOG_SERVER_PORT']))
handler.setFormatter(logging.Formatter(debug_log_format))
logger.addHandler(handler)
@asyncio.coroutine
def client_loop(websocket):
"""Client loop - send it ping every `beat_delay` seconds to keep it alive.
Nginx would close the connection after 2 minutes of inactivity, that's why.
Also it does the health check - if socket was closed by client it will
break the loop and let server deregister the client.
:param websocket: websocket protocol instance
"""
pings = 0
while True:
yield from asyncio.sleep(beat_delay)
if not websocket.open:
break
pings += 1
yield from websocket.send(json.dumps({'ping': pings, 'clients': len(websocket.ws_server.websockets)}))
@asyncio.coroutine
def broadcast(message):
"""Broadcast message to all clients.
:param message: message as it was received - no encoding/decoding.
"""
logger.debug('broadcast %s' % message)
for websocket in clients:
if websocket.open:
yield from websocket.send(message)
@asyncio.coroutine
def server_loop(websocket):
"""Server loop - wait for message and broadcast it.
When message is none it means the socket is closed
and there will be no messages so we break the loop.
:param websocket: websocket protocol instance
"""
while True:
message = yield from websocket.recv()
if message is None:
break
yield from broadcast(message)
def log(message, websocket):
"""Log message with some websocket data like address.
:param message: message string
:param websocket: websocket protocol instance
"""
host, port = websocket.remote_address
logger.info('%s address=%s:%s' % (message, host, port))
@asyncio.coroutine
def connection_handler(websocket, path):
"""Handle incomming connections.
When this function returns the session is over and it closes the socket,
so there must be some loops..
:param websocket: websocket protocol instance
:param path: url path used by client - used to identify client/server connections
"""
if 'server' in path:
log('server open', websocket)
yield from server_loop(websocket)
log('server done', websocket)
else:
log('client open', websocket)
clients.add(websocket)
yield from client_loop(websocket)
clients.remove(websocket)
log('client done', websocket)
def create_server(config):
"""Create websocket server and run it until it gets Ctrl+C or SIGTERM.
:param config: config dictionary
"""
try:
host = config['WS_HOST']
port = config['WS_PORT']
loop = asyncio.get_event_loop()
server = loop.run_until_complete(websockets.serve(connection_handler, host, port))
loop.add_signal_handler(signal.SIGTERM, loop.stop)
logger.info('listening on %s:%s' % (host, port))
loop.run_forever()
except KeyboardInterrupt:
pass
finally:
logger.info('closing server')
server.close()
loop.run_until_complete(server.wait_closed())
loop.stop()
loop.run_forever()
loop.close()
if __name__ == '__main__':
config = {
'WS_HOST': '0.0.0.0',
'WS_PORT': '5100',
'LOG_SERVER_ADDRESS': 'localhost',
'LOG_SERVER_PORT': '5555'
}
configure_syslog(config)
create_server(config)
| agpl-3.0 | -7,511,321,612,840,697,000 | 27.533333 | 111 | 0.662383 | false |
uclouvain/osis_louvain | base/business/education_group.py | 1 | 17182 | ##############################################################################
#
# OSIS stands for Open Student Information System. It's an application
# designed to manage the core business of higher education institutions,
# such as universities, faculties, institutes and professional schools.
# The core business involves the administration of students, teachers,
# courses, programs and so on.
#
# Copyright (C) 2015-2018 Université catholique de Louvain (http://www.uclouvain.be)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# A copy of this license - GNU General Public License - is available
# at the root of the source code of this program. If not,
# see http://www.gnu.org/licenses/.
#
##############################################################################
from django.core.exceptions import PermissionDenied, MultipleObjectsReturned
from django.db.models import Prefetch
from django.utils.translation import ugettext_lazy as _
from base.business.education_groups.perms import check_permission
from base.business.xls import get_name_or_username, convert_boolean
from base.models.enums import academic_calendar_type
from base.models.enums import education_group_categories
from base.models.enums import mandate_type as mandate_types
from base.models.mandate import Mandate
from base.models.offer_year_calendar import OfferYearCalendar
from base.models.person import Person
from base.models.program_manager import is_program_manager
from osis_common.document import xls_build
# List of key that a user can modify
DATE_FORMAT = '%d-%m-%Y'
DATE_TIME_FORMAT = '%d-%m-%Y %H:%M'
DESC = "desc"
WORKSHEET_TITLE = 'education_groups'
XLS_FILENAME = 'education_groups_filename'
XLS_DESCRIPTION = "list_education_groups"
EDUCATION_GROUP_TITLES = [str(_('academic_year_small')), str(_('code')), str(_('title')), str(_('type')),
str(_('entity')), str(_('code'))]
ORDER_COL = 'order_col'
ORDER_DIRECTION = 'order_direction'
#
WORKSHEET_TITLE_ADMINISTRATIVE = 'trainings'
XLS_FILENAME_ADMINISTRATIVE = 'training_administrative_data'
XLS_DESCRIPTION_ADMINISTRATIVE = "List of trainings, with administrative data"
# Column for xls with administrative data
MANAGEMENT_ENTITY_COL = 'management_entity'
TRANING_COL = 'TRAINING'
TYPE_COL = 'type'
ACADEMIC_YEAR_COL = 'Validity'
START_COURSE_REGISTRATION_COL = 'Begining of course registration'
END_COURSE_REGISTRATION_COL = 'Ending of course registration'
START_EXAM_REGISTRATION_COL = 'Begining of exam registration'
END_EXAM_REGISTRATION_COL = 'Ending of exam registration'
MARKS_PRESENTATION_COL = 'marks_presentation'
DISSERTATION_PRESENTATION_COL = 'dissertation_presentation'
DELIBERATION_COL = 'DELIBERATION'
SCORES_DIFFUSION_COL = 'scores_diffusion'
WEIGHTING_COL = 'Weighting'
DEFAULT_LEARNING_UNIT_ENROLLMENT_COL = 'Default learning unit enrollment'
CHAIR_OF_THE_EXAM_BOARD_COL = 'chair_of_the_exam_board'
EXAM_BOARD_SECRETARY_COL = 'exam_board_secretary'
EXAM_BOARD_SIGNATORY_COL = 'Exam board signatory'
SIGNATORY_QUALIFICATION_COL = 'signatory_qualification'
SESSIONS_COLUMNS = 'sessions_columns'
SESSIONS_NUMBER = 3
SESSION_HEADERS = [
START_EXAM_REGISTRATION_COL,
END_EXAM_REGISTRATION_COL,
MARKS_PRESENTATION_COL,
DISSERTATION_PRESENTATION_COL,
DELIBERATION_COL,
SCORES_DIFFUSION_COL
]
EDUCATION_GROUP_TITLES_ADMINISTRATIVE = [
MANAGEMENT_ENTITY_COL,
TRANING_COL,
TYPE_COL,
ACADEMIC_YEAR_COL,
START_COURSE_REGISTRATION_COL,
END_COURSE_REGISTRATION_COL,
SESSIONS_COLUMNS, # this columns will be duplicate by SESSIONS_NUMBER [content: SESSION_HEADERS]
WEIGHTING_COL,
DEFAULT_LEARNING_UNIT_ENROLLMENT_COL,
CHAIR_OF_THE_EXAM_BOARD_COL,
EXAM_BOARD_SECRETARY_COL,
EXAM_BOARD_SIGNATORY_COL,
SIGNATORY_QUALIFICATION_COL,
]
SIGNATORIES = 'signatories'
SECRETARIES = 'secretaries'
PRESIDENTS = 'presidents'
NUMBER_SESSIONS = 3
def can_user_edit_administrative_data(a_user, an_education_group_year, raise_exception=False):
"""
Edition of administrative data is allowed for user which have permission AND
if CENTRAL_MANAGER: Check attached entities [person_entity]
else Check if user is program manager of education group
"""
# Tricky solution to make compatible several uses
if isinstance(a_user, Person):
person = a_user
a_user = person.user
else:
person = Person.objects.get(user=a_user)
if not check_permission(person, "base.can_edit_education_group_administrative_data", raise_exception):
return False
if person.is_central_manager() and _is_management_entity_linked_to_user(person, an_education_group_year):
return True
return is_program_manager(a_user, education_group=an_education_group_year.education_group)
def _is_management_entity_linked_to_user(person, an_education_group_year):
return person.is_attached_entity(an_education_group_year.management_entity)
def assert_category_of_education_group_year(education_group_year, authorized_categories):
if education_group_year.education_group_type.category not in authorized_categories:
raise PermissionDenied("Education group category is not correct.")
def create_xls(user, found_education_groups_param, filters, order_data):
found_education_groups = ordering_data(found_education_groups_param, order_data)
working_sheets_data = prepare_xls_content(found_education_groups)
parameters = {xls_build.DESCRIPTION: XLS_DESCRIPTION,
xls_build.USER: get_name_or_username(user),
xls_build.FILENAME: XLS_FILENAME,
xls_build.HEADER_TITLES: EDUCATION_GROUP_TITLES,
xls_build.WS_TITLE: WORKSHEET_TITLE}
return xls_build.generate_xls(xls_build.prepare_xls_parameters_list(working_sheets_data, parameters), filters)
def prepare_xls_content(found_education_groups):
return [extract_xls_data_from_education_group(eg) for eg in found_education_groups]
def extract_xls_data_from_education_group(an_education_group):
return [
an_education_group.academic_year.name,
an_education_group.acronym,
an_education_group.title,
an_education_group.education_group_type,
an_education_group.management_entity_version.acronym,
an_education_group.partial_acronym
]
def ordering_data(object_list, order_data):
order_col = order_data.get(ORDER_COL)
order_direction = order_data.get(ORDER_DIRECTION)
reverse_direction = order_direction == DESC
return sorted(list(object_list), key=lambda t: _get_field_value(t, order_col), reverse=reverse_direction)
def _get_field_value(instance, field):
field_path = field.split('.')
attr = instance
for elem in field_path:
try:
attr = getattr(attr, elem) or ''
except AttributeError:
return None
return attr
def create_xls_administrative_data(user, education_group_years_qs, filters, order_data):
# Make select_related/prefetch_related in order to have low DB HIT
education_group_years = education_group_years_qs.filter(
education_group_type__category=education_group_categories.TRAINING
).select_related(
'education_group_type',
'academic_year',
).prefetch_related(
Prefetch(
'education_group__mandate_set',
queryset=Mandate.objects.prefetch_related('mandatary_set')
),
Prefetch(
'offeryearcalendar_set',
queryset=OfferYearCalendar.objects.select_related('academic_calendar__sessionexamcalendar')
)
)
found_education_groups = ordering_data(education_group_years, order_data)
working_sheets_data = prepare_xls_content_administrative(found_education_groups)
header_titles = _get_translated_header_titles()
parameters = {
xls_build.DESCRIPTION: XLS_DESCRIPTION_ADMINISTRATIVE,
xls_build.USER: get_name_or_username(user),
xls_build.FILENAME: XLS_FILENAME_ADMINISTRATIVE,
xls_build.HEADER_TITLES: header_titles,
xls_build.WS_TITLE: WORKSHEET_TITLE_ADMINISTRATIVE
}
return xls_build.generate_xls(xls_build.prepare_xls_parameters_list(working_sheets_data, parameters), filters)
def _get_translated_header_titles():
translated_hearders = []
for title in EDUCATION_GROUP_TITLES_ADMINISTRATIVE:
if title != SESSIONS_COLUMNS:
translated_hearders.append(str(_(title)))
else:
translated_hearders.extend(_get_translated_header_session_columns())
return translated_hearders
def _get_translated_header_session_columns():
translated_session_headers = []
for title in SESSION_HEADERS:
translated_session_headers.append(str(_(title)))
# Duplicate translation by nb_session + append nb_session to title
all_headers_sessions = []
for session_number in range(1, SESSIONS_NUMBER + 1):
all_headers_sessions += ["{} {} ".format(translated_title, session_number) for translated_title in
translated_session_headers]
return all_headers_sessions
def prepare_xls_content_administrative(education_group_years):
xls_data = []
for education_group_year in education_group_years:
main_data = _extract_main_data(education_group_year)
administrative_data = _extract_administrative_data(education_group_year)
mandatary_data = _extract_mandatary_data(education_group_year)
# Put all dict together and ordered it by EDUCATION_GROUP_TITLES_ADMINISTRATIVE
row = _convert_data_to_xls_row(
education_group_year_data={**main_data, **administrative_data, **mandatary_data},
header_list=EDUCATION_GROUP_TITLES_ADMINISTRATIVE
)
xls_data.append(row)
return xls_data
def _extract_main_data(an_education_group_year):
return {
MANAGEMENT_ENTITY_COL: an_education_group_year.management_entity_version.acronym,
TRANING_COL: an_education_group_year.acronym,
TYPE_COL: an_education_group_year.education_group_type,
ACADEMIC_YEAR_COL: an_education_group_year.academic_year.name,
WEIGHTING_COL: convert_boolean(an_education_group_year.weighting),
DEFAULT_LEARNING_UNIT_ENROLLMENT_COL: convert_boolean(an_education_group_year.default_learning_unit_enrollment)
}
def _extract_administrative_data(an_education_group_year):
course_enrollment_calendar = _get_offer_year_calendar_from_prefetched_data(
an_education_group_year,
academic_calendar_type.COURSE_ENROLLMENT
)
administrative_data = {
START_COURSE_REGISTRATION_COL: _format_date(course_enrollment_calendar, 'start_date', DATE_FORMAT),
END_COURSE_REGISTRATION_COL: _format_date(course_enrollment_calendar, 'end_date', DATE_FORMAT),
SESSIONS_COLUMNS: [
_extract_session_data(an_education_group_year, session_number) for
session_number in range(1, SESSIONS_NUMBER + 1)
]
}
return administrative_data
def _extract_session_data(education_group_year, session_number):
session_academic_cal_type = [
academic_calendar_type.EXAM_ENROLLMENTS,
academic_calendar_type.SCORES_EXAM_SUBMISSION,
academic_calendar_type.DISSERTATION_SUBMISSION,
academic_calendar_type.DELIBERATION,
academic_calendar_type.SCORES_EXAM_DIFFUSION
]
offer_year_cals = {}
for academic_cal_type in session_academic_cal_type:
offer_year_cals[academic_cal_type] = _get_offer_year_calendar_from_prefetched_data(
education_group_year,
academic_cal_type,
session_number=session_number
)
return {
START_EXAM_REGISTRATION_COL: _format_date(offer_year_cals[academic_calendar_type.EXAM_ENROLLMENTS],
'start_date', DATE_FORMAT),
END_EXAM_REGISTRATION_COL: _format_date(offer_year_cals[academic_calendar_type.EXAM_ENROLLMENTS], 'end_date',
DATE_FORMAT),
MARKS_PRESENTATION_COL: _format_date(offer_year_cals[academic_calendar_type.SCORES_EXAM_SUBMISSION],
'start_date', DATE_FORMAT),
DISSERTATION_PRESENTATION_COL: _format_date(offer_year_cals[academic_calendar_type.DISSERTATION_SUBMISSION],
'start_date', DATE_FORMAT),
DELIBERATION_COL: _format_date(offer_year_cals[academic_calendar_type.DELIBERATION], 'start_date',
DATE_TIME_FORMAT),
SCORES_DIFFUSION_COL: _format_date(offer_year_cals[academic_calendar_type.SCORES_EXAM_DIFFUSION], 'start_date',
DATE_TIME_FORMAT),
}
def _extract_mandatary_data(education_group_year):
representatives = {mandate_types.PRESIDENT: [], mandate_types.SECRETARY: [], mandate_types.SIGNATORY: []}
for mandate in education_group_year.education_group.mandate_set.all():
representatives = _get_representatives(education_group_year, mandate, representatives)
return {
CHAIR_OF_THE_EXAM_BOARD_COL: names(representatives[mandate_types.PRESIDENT]),
EXAM_BOARD_SECRETARY_COL: names(representatives[mandate_types.SECRETARY]),
EXAM_BOARD_SIGNATORY_COL: names(representatives[mandate_types.SIGNATORY]),
SIGNATORY_QUALIFICATION_COL: qualification(representatives[mandate_types.SIGNATORY]),
}
def _get_representatives(education_group_year, mandate, representatives_param):
representatives = representatives_param
for mandataries in mandate.mandatary_set.all():
if _is_valid_mandate(mandataries, education_group_year):
if mandataries.mandate.function == mandate_types.PRESIDENT:
representatives.get(mandate_types.PRESIDENT).append(mandataries)
if mandataries.mandate.function == mandate_types.SECRETARY:
representatives.get(mandate_types.SECRETARY).append(mandataries)
if mandataries.mandate.function == mandate_types.SIGNATORY:
representatives.get(mandate_types.SIGNATORY).append(mandataries)
return representatives
def _convert_data_to_xls_row(education_group_year_data, header_list):
xls_row = []
for header in header_list:
if header == SESSIONS_COLUMNS:
session_datas = education_group_year_data.get(header, [])
xls_row.extend(_convert_session_data_to_xls_row(session_datas))
else:
value = education_group_year_data.get(header, '')
xls_row.append(value)
return xls_row
def _convert_session_data_to_xls_row(session_datas):
xls_session_rows = []
for session_number in range(0, SESSIONS_NUMBER):
session_formated = _convert_data_to_xls_row(session_datas[session_number], SESSION_HEADERS)
xls_session_rows.extend(session_formated)
return xls_session_rows
def _get_offer_year_calendar_from_prefetched_data(an_education_group_year, academic_calendar_type, session_number=None):
offer_year_cals = _get_all_offer_year_calendar_from_prefetched_data(
an_education_group_year,
academic_calendar_type
)
if session_number:
offer_year_cals = [
offer_year_cal for offer_year_cal in offer_year_cals
if offer_year_cal.academic_calendar.sessionexamcalendar and
offer_year_cal.academic_calendar.sessionexamcalendar.number_session == session_number
]
if len(offer_year_cals) > 1:
raise MultipleObjectsReturned
return offer_year_cals[0] if offer_year_cals else None
def _get_all_offer_year_calendar_from_prefetched_data(an_education_group_year, academic_calendar_type):
return [
offer_year_calendar for offer_year_calendar in an_education_group_year.offeryearcalendar_set.all()
if offer_year_calendar.academic_calendar.reference == academic_calendar_type
]
def _format_date(obj, date_key, date_form):
date = getattr(obj, date_key, None) if obj else None
if date:
return date.strftime(date_form)
return '-'
def _is_valid_mandate(mandate, education_group_yr):
return mandate.start_date <= education_group_yr.academic_year.start_date and \
mandate.end_date >= education_group_yr.academic_year.end_date
def names(representatives):
return ', '.join(sorted(str(mandatory.person.full_name) for mandatory in representatives))
def qualification(signatories):
return ', '.join(sorted(signatory.mandate.qualification for signatory in signatories
if signatory.mandate.qualification))
| agpl-3.0 | -4,671,695,049,412,746,000 | 41.422222 | 120 | 0.693964 | false |
AXAz0r/apex-sigma-core | sigma/core/mechanics/event.py | 1 | 2405 | import discord
from sigma.core.mechanics.logger import create_logger
class SigmaEvent(object):
def __init__(self, bot, event, plugin_info, event_info):
self.bot = bot
self.db = self.bot.db
self.event = event
self.plugin_info = plugin_info
self.event_info = event_info
self.event_type = self.event_info['type']
self.alts = None
self.usage = None
self.desc = None
self.name = self.event_info['name']
self.category = self.plugin_info['category']
self.log = create_logger(self.name.upper())
self.nsfw = False
self.owner = False
self.partner = False
self.dmable = False
self.requirements = None
self.insert_command_info()
def insert_command_info(self):
if 'alts' in self.event_info:
self.alts = self.event_info['alts']
if 'usage' in self.event_info:
self.usage = self.event_info['usage']
self.usage = self.usage.replace('{pfx}', self.bot.cfg.pref.prefix)
self.usage = self.usage.replace('{cmd}', self.name)
if 'desc' in self.event_info:
self.desc = self.event_info['desc']
if 'requirements' in self.event_info:
self.requirements = self.event_info['requirements']
if 'permissions' in self.event_info:
permissions = self.event_info['permissions']
if 'nsfw' in permissions:
self.nsfw = permissions['nsfw']
if 'owner' in permissions:
self.owner = permissions['owner']
if 'partner' in permissions:
self.partner = permissions['partner']
if 'dmable' in permissions:
self.dmable = permissions['dmable']
def get_exception(self):
if self.bot.cfg.pref.dev_mode:
cmd_exception = SyntaxError
else:
cmd_exception = Exception
return cmd_exception
def log_error(self, exception):
log_text = f'ERROR: {exception} | TRACE: {exception.with_traceback}'
self.log.error(log_text)
async def execute(self, *args):
if self.bot.ready:
try:
await getattr(self.event, self.name)(self, *args)
except discord.Forbidden:
pass
except self.get_exception() as e:
self.log_error(e)
| gpl-3.0 | 834,906,843,449,019,000 | 34.895522 | 78 | 0.569231 | false |
openwfm/wrfxweb | src/utils.py | 1 | 4271 | from __future__ import absolute_import
import fcntl, errno, logging, json, os, sys
import os.path as osp
class Dict(dict):
"""
A dictionary that allows member access to its keys.
A convenience class.
"""
def __init__(self, d):
"""
Updates itself with d.
"""
self.update(d)
def __getattr__(self, item):
return self[item]
def __setattr__(self, item, value):
self[item] = value
class lock():
"""
Lock file for exclusive access
"""
def __init__(self,path):
self.lock_path = path
logging.info('Initializing lock on %s' % self.lock_path)
self.lock_file=open(self.lock_path,'w',0)
self.locked=False
def islocked(self):
return(self.locked)
def acquire(self):
"""
Block until exclusive lock can be acquired.
Used before code that should be executed by one process at a time only,
such as updating the catalog.
"""
if self.locked:
logging.warning('lock.acquire: already locked %s' % self.lock_path)
try:
fcntl.flock(self.lock_file,fcntl.LOCK_EX|fcntl.LOCK_NB)
except IOError as e:
if e.errno == errno.EACCES or e.errno == errno.EAGAIN:
logging.warning('Waiting for lock on %s' % self.lock_path)
else:
logging.error("I/O error %s: %s" % (e.errno, e.strerror))
fcntl.flock(self.lock_file,fcntl.LOCK_EX)
logging.info('Acquired lock on %s' % self.lock_path)
self.locked=True
def release(self):
if not self.locked:
logging.warning('lock.release: not yet locked %s' % self.lock_path)
logging.info('Releasing lock on %s' % self.lock_path)
fcntl.flock(self.lock_file,fcntl.LOCK_UN)
self.locked=False
def update_nested_dict(d,u,level=0):
"""
Recursively update nested dictionary. Does not overwrite any values.
Identical key is allowed only if both values are dictionaries and the
update can continue recursively.
:param d: update: dictionary to be updated
:param u: input: dictionary with the update
:param level: internal, for error reporting only
:param key: internal, for error reporting only
Example:
from utils import update_nested_dict
d = {1: {2: 3}, 2: {4: 5}}
u = {1: {8: 9}, 3: {10: 11}}
update_nested_dict(d,u)
d
{1: {8: 9, 2: 3}, 2: {4: 5}, 3: {10: 11}}
update_nested_dict(d,u)
ValueError: update_nested_dict: level 1: values for common key 8 must be dictionaries
"""
# print ('update_nested_dict: level %s entering with d=%s u=%s' % (level,d,u))
if type(d) is not dict or type(u) is not dict:
raise ValueError ('update_nested_dict: level %s: both arguments must be dictionaries' % level)
for k in u.keys():
# print ('update_nested_dict: level %s found key %s in u' % (level,k))
if k in d:
# print ('update_nested_dict: level %s key %s in both u and d' % (level,k))
# print ('update_nested_dict: level %s recursive update in d=%s and u=%s' % (level,d,u))
if type(d[k]) is not dict or type(u[k]) is not dict:
raise ValueError ('update_nested_dict: level %s: values for common key %s must be dictionaries' % (level,k))
update_nested_dict(d[k],u[k],level+1)
# print ('update_nested_dict: level %s got updated d=%s' % (level,d))
else:
# print ('update_nested_dict: level %s key %s from u not found in d' % (level,k))
# print ('update_nested_dict: level %s adding item to d=%s from u=%s' % (level,d,u))
d[k]=u[k]
# print ('update_nested_dict: level %s got updated d=%s' % (level,d))
# print ('update_nested_dict: level %s exiting with updated d=%s' % (level,d))
def load_sys_cfg():
# load the system configuration
sys_cfg = None
try:
sys_cfg = Dict(json.load(open('etc/conf.json')))
except IOError:
logging.critical('Cannot find system configuration, have you created etc/conf.json?')
sys.exit(2)
# set defaults
sys = sys_cfg.sys_install_path = sys_cfg.get('sys_install_path',os.getcwd())
return sys_cfg
| mit | -2,771,383,232,434,513,400 | 34.890756 | 124 | 0.59307 | false |
nsmoooose/csp | csp/data/ui/tutorials/takeoff/takeoff.py | 1 | 2115 | #!/usr/bin/python
# Combat Simulator Project
# Copyright (C) 2002-2009 The Combat Simulator Project
# http://csp.sourceforge.net
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
"""
Combat Simulator Project : Take off script
"""
import csp.cspsim
from csp.data.ui.tutorials.mission import Mission
from csp.data.ui.scripts.gamescreenmanager import GameScreenManager
from csp.data.ui.scripts.utils import SlotManager
class TakeOff(Mission):
def __init__(self, cspsim):
Mission.__init__(self, cspsim)
def name(self):
return "${tutorials_takeoff}"
def describingUI(self):
return "takeoff/mission.xml"
def startMission(self):
Mission.startMission(self)
def modifyTheatreForMission(self):
# We add a single aircraft at the airport.
vec = csp.csplib.Vector3
f16dj = "sim:vehicles.aircraft.f16dj"
self.cspsim.createVehicle(f16dj, vec(-29495, -10530, 91.3), vec(0, 0, 0), vec(0.0, 0.0, 180.0), True)
# Set the date and time. We want daylight for the first mission.
date = csp.csplib.SimDate(2007,6,12,10,0,0)
self.cspsim.setCurrentTime(date)
def onStart(self):
windowManager = self.cspsim.getWindowManager()
window = csp.cspsim.Window()
serializer = csp.cspsim.Serialization()
serializer.load(window, 'tutorials/takeoff/help.xml')
windowManager.show(window)
| gpl-2.0 | 5,528,677,412,418,689,000 | 34.25 | 109 | 0.689835 | false |
TomasTomecek/compose | compose/project.py | 1 | 13569 | from __future__ import absolute_import
from __future__ import unicode_literals
import logging
from functools import reduce
from docker.errors import APIError
from docker.errors import NotFound
from . import parallel
from .config import ConfigurationError
from .config.sort_services import get_service_name_from_net
from .const import DEFAULT_TIMEOUT
from .const import LABEL_ONE_OFF
from .const import LABEL_PROJECT
from .const import LABEL_SERVICE
from .container import Container
from .service import ContainerNet
from .service import ConvergenceStrategy
from .service import Net
from .service import Service
from .service import ServiceNet
log = logging.getLogger(__name__)
class Project(object):
"""
A collection of services.
"""
def __init__(self, name, services, client, use_networking=False, network_driver=None):
self.name = name
self.services = services
self.client = client
self.use_networking = use_networking
self.network_driver = network_driver
def labels(self, one_off=False):
return [
'{0}={1}'.format(LABEL_PROJECT, self.name),
'{0}={1}'.format(LABEL_ONE_OFF, "True" if one_off else "False"),
]
@classmethod
def from_dicts(cls, name, service_dicts, client, use_networking=False, network_driver=None):
"""
Construct a ServiceCollection from a list of dicts representing services.
"""
project = cls(name, [], client, use_networking=use_networking, network_driver=network_driver)
if use_networking:
remove_links(service_dicts)
for service_dict in service_dicts:
links = project.get_links(service_dict)
volumes_from = project.get_volumes_from(service_dict)
net = project.get_net(service_dict)
project.services.append(
Service(
client=client,
project=name,
use_networking=use_networking,
links=links,
net=net,
volumes_from=volumes_from,
**service_dict))
return project
@property
def service_names(self):
return [service.name for service in self.services]
def get_service(self, name):
"""
Retrieve a service by name. Raises NoSuchService
if the named service does not exist.
"""
for service in self.services:
if service.name == name:
return service
raise NoSuchService(name)
def validate_service_names(self, service_names):
"""
Validate that the given list of service names only contains valid
services. Raises NoSuchService if one of the names is invalid.
"""
valid_names = self.service_names
for name in service_names:
if name not in valid_names:
raise NoSuchService(name)
def get_services(self, service_names=None, include_deps=False):
"""
Returns a list of this project's services filtered
by the provided list of names, or all services if service_names is None
or [].
If include_deps is specified, returns a list including the dependencies for
service_names, in order of dependency.
Preserves the original order of self.services where possible,
reordering as needed to resolve dependencies.
Raises NoSuchService if any of the named services do not exist.
"""
if service_names is None or len(service_names) == 0:
return self.get_services(
service_names=self.service_names,
include_deps=include_deps
)
else:
unsorted = [self.get_service(name) for name in service_names]
services = [s for s in self.services if s in unsorted]
if include_deps:
services = reduce(self._inject_deps, services, [])
uniques = []
[uniques.append(s) for s in services if s not in uniques]
return uniques
def get_services_without_duplicate(self, service_names=None, include_deps=False):
services = self.get_services(service_names, include_deps)
for service in services:
service.remove_duplicate_containers()
return services
def get_links(self, service_dict):
links = []
if 'links' in service_dict:
for link in service_dict.get('links', []):
if ':' in link:
service_name, link_name = link.split(':', 1)
else:
service_name, link_name = link, None
try:
links.append((self.get_service(service_name), link_name))
except NoSuchService:
raise ConfigurationError(
'Service "%s" has a link to service "%s" which does not '
'exist.' % (service_dict['name'], service_name))
del service_dict['links']
return links
def get_volumes_from(self, service_dict):
volumes_from = []
if 'volumes_from' in service_dict:
for volume_from_spec in service_dict.get('volumes_from', []):
# Get service
try:
service = self.get_service(volume_from_spec.source)
volume_from_spec = volume_from_spec._replace(source=service)
except NoSuchService:
try:
container = Container.from_id(self.client, volume_from_spec.source)
volume_from_spec = volume_from_spec._replace(source=container)
except APIError:
raise ConfigurationError(
'Service "%s" mounts volumes from "%s", which is '
'not the name of a service or container.' % (
service_dict['name'],
volume_from_spec.source))
volumes_from.append(volume_from_spec)
del service_dict['volumes_from']
return volumes_from
def get_net(self, service_dict):
net = service_dict.pop('net', None)
if not net:
if self.use_networking:
return Net(self.name)
return Net(None)
net_name = get_service_name_from_net(net)
if not net_name:
return Net(net)
try:
return ServiceNet(self.get_service(net_name))
except NoSuchService:
pass
try:
return ContainerNet(Container.from_id(self.client, net_name))
except APIError:
raise ConfigurationError(
'Service "%s" is trying to use the network of "%s", '
'which is not the name of a service or container.' % (
service_dict['name'],
net_name))
def start(self, service_names=None, **options):
containers = []
for service in self.get_services(service_names):
service_containers = service.start(**options)
containers.extend(service_containers)
return containers
def stop(self, service_names=None, **options):
parallel.parallel_stop(self.containers(service_names), options)
def pause(self, service_names=None, **options):
containers = self.containers(service_names)
parallel.parallel_pause(reversed(containers), options)
return containers
def unpause(self, service_names=None, **options):
containers = self.containers(service_names)
parallel.parallel_unpause(containers, options)
return containers
def kill(self, service_names=None, **options):
parallel.parallel_kill(self.containers(service_names), options)
def remove_stopped(self, service_names=None, **options):
parallel.parallel_remove(self.containers(service_names, stopped=True), options)
def restart(self, service_names=None, **options):
containers = self.containers(service_names, stopped=True)
parallel.parallel_restart(containers, options)
return containers
def build(self, service_names=None, no_cache=False, pull=False, force_rm=False):
for service in self.get_services(service_names):
if service.can_be_built():
service.build(no_cache, pull, force_rm)
else:
log.info('%s uses an image, skipping' % service.name)
def create(self, service_names=None, strategy=ConvergenceStrategy.changed, do_build=True):
services = self.get_services_without_duplicate(service_names, include_deps=True)
plans = self._get_convergence_plans(services, strategy)
for service in services:
service.execute_convergence_plan(plans[service.name], do_build, detached=True, start=False)
def up(self,
service_names=None,
start_deps=True,
strategy=ConvergenceStrategy.changed,
do_build=True,
timeout=DEFAULT_TIMEOUT,
detached=False):
services = self.get_services_without_duplicate(service_names, include_deps=start_deps)
plans = self._get_convergence_plans(services, strategy)
if self.use_networking and self.uses_default_network():
self.ensure_network_exists()
return [
container
for service in services
for container in service.execute_convergence_plan(
plans[service.name],
do_build=do_build,
timeout=timeout,
detached=detached
)
]
def _get_convergence_plans(self, services, strategy):
plans = {}
for service in services:
updated_dependencies = [
name
for name in service.get_dependency_names()
if name in plans
and plans[name].action in ('recreate', 'create')
]
if updated_dependencies and strategy.allows_recreate:
log.debug('%s has upstream changes (%s)',
service.name,
", ".join(updated_dependencies))
plan = service.convergence_plan(ConvergenceStrategy.always)
else:
plan = service.convergence_plan(strategy)
plans[service.name] = plan
return plans
def pull(self, service_names=None, ignore_pull_failures=False):
for service in self.get_services(service_names, include_deps=False):
service.pull(ignore_pull_failures)
def containers(self, service_names=None, stopped=False, one_off=False):
if service_names:
self.validate_service_names(service_names)
else:
service_names = self.service_names
containers = list(filter(None, [
Container.from_ps(self.client, container)
for container in self.client.containers(
all=stopped,
filters={'label': self.labels(one_off=one_off)})]))
def matches_service_names(container):
return container.labels.get(LABEL_SERVICE) in service_names
return [c for c in containers if matches_service_names(c)]
def get_network(self):
try:
return self.client.inspect_network(self.name)
except NotFound:
return None
def ensure_network_exists(self):
# TODO: recreate network if driver has changed?
if self.get_network() is None:
driver_name = 'the default driver'
if self.network_driver:
driver_name = 'driver "{}"'.format(self.network_driver)
log.info(
'Creating network "{}" with {}'
.format(self.name, driver_name)
)
self.client.create_network(self.name, driver=self.network_driver)
def remove_network(self):
network = self.get_network()
if network:
self.client.remove_network(network['Id'])
def uses_default_network(self):
return any(service.net.mode == self.name for service in self.services)
def _inject_deps(self, acc, service):
dep_names = service.get_dependency_names()
if len(dep_names) > 0:
dep_services = self.get_services(
service_names=list(set(dep_names)),
include_deps=True
)
else:
dep_services = []
dep_services.append(service)
return acc + dep_services
def remove_links(service_dicts):
services_with_links = [s for s in service_dicts if 'links' in s]
if not services_with_links:
return
if len(services_with_links) == 1:
prefix = '"{}" defines'.format(services_with_links[0]['name'])
else:
prefix = 'Some services ({}) define'.format(
", ".join('"{}"'.format(s['name']) for s in services_with_links))
log.warn(
'\n{} links, which are not compatible with Docker networking and will be ignored.\n'
'Future versions of Docker will not support links - you should remove them for '
'forwards-compatibility.\n'.format(prefix))
for s in services_with_links:
del s['links']
class NoSuchService(Exception):
def __init__(self, name):
self.name = name
self.msg = "No such service: %s" % self.name
def __str__(self):
return self.msg
| apache-2.0 | -7,479,709,630,229,278,000 | 34.802111 | 103 | 0.588253 | false |
honza/heroku-sprunge | sprunge/app.py | 1 | 1970 | import os
import pygments.lexers
from pygments import highlight
from pygments.formatters import HtmlFormatter
from flask import Flask, request, make_response
from db import insert, find
from bson.errors import InvalidId
from settings import *
app = Flask(__name__)
HOME = """
<style> a { text-decoration: none } </style>
<pre>
heroku-sprunge(1) HEROKU-SPRUNGE heroku-sprunge(1)
NAME
heroku-sprunge: command line pastebin:
SYNOPSIS
<command> | curl -F '%s=<-' %s
DESCRIPTION
add <a href='http://pygments.org/docs/lexers/'>?<lang></a> to resulting url for line numbers and syntax highlighting
EXAMPLES
$ cat some/file.txt | curl -F '%s=<-' %s
%s/VZiY
$ firefox %s/VZiY?py#n-7
SEE ALSO
https://github.com/honza/heroku-sprunge
</pre>"""
@app.route('/', methods=['POST', 'GET'])
def hello():
if request.method == 'GET':
return HOME % (NAME, HOST, NAME, HOST, HOST, HOST)
data = request.form.get('sprunge', None)
if not data:
return 'fail'
uid = insert(data)
return '%s/%s\n' % (HOST, uid)
@app.route('/<uid>')
def snip(uid):
try:
data = find(uid)
except InvalidId:
return '404'
if not data:
return '404'
try:
syntax = request.args.keys()[0]
except IndexError:
syntax = None
if syntax:
try:
lexer = pygments.lexers.get_lexer_by_name(syntax)
except:
lexer = pygments.lexers.TextLexer()
formatter = HtmlFormatter(full=True,
style=STYLE, linenos='inline',
encoding='utf-8')
return highlight(data['content'], lexer, formatter)
else:
response = make_response(data['content'])
response.headers['Content-Type'] = 'text/plain';
return response
if __name__ == '__main__':
port = int(os.environ.get('PORT', 8080))
app.run(host='0.0.0.0', port=port, debug=True)
| bsd-2-clause | 5,201,614,514,209,401,000 | 21.906977 | 126 | 0.599492 | false |
nilsFK/py-privatekonomi | py_privatekonomi/core/apps/example2.py | 1 | 6699 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
from py_privatekonomi.core.mappers.economy_mapper import EconomyMapper
from py_privatekonomi.utilities import helper
from py_privatekonomi.utilities.common import decode
from py_privatekonomi.utilities import resolver
from py_privatekonomi.utilities.models import rebuild_tables
from py_privatekonomi.utilities import helper, common
from py_privatekonomi.core import loader
from sqlalchemy import desc, asc
"""
This app extends the functionality of example1.py
by adding another function: persist(), which is the
main ground to persist output data to the database.
Note that we are all ready connected to the database
when we enter the persist function and the only thing
we have to bother about is how to insert that data to
the database.
"""
def execute(sources, parser, formatter, configs):
contents = helper.execute(sources, parser, formatter, False)
for content in contents:
print(content)
return content
def persist(output, configs):
models = rebuild_tables(loader.load_models(EconomyMapper.getModelNames()))
# Insert all items
models.Organization.insert([
{
'name' : 'Swedbank'
},
{
'name' : 'Avanza'
}
])
models.Provider.insert([
{
'name' : 'Collector'
},
{
'name' : 'Handelsbanken'
}
])
models.Currency.insert([
{
'code' : 'SEK',
'symbol' : 'kr',
'country' : 'SE'
},
{
'code' : 'USD',
'symbol' : '$',
'country' : 'US'
}
])
models.AccountCategory.insert([
{
'name' : 'Lönekonto'
},
{
'name' : 'Sparkonto'
}
])
models.Account.insert([
{
'name' : 'Mitt Lönekonto',
'account_code' : '123-123',
'account_number' : '123456789',
'current_balance' : 12000.12,
'future_balance' : 13000.13,
'account_category_id' : 1,
'organization_id' : 1,
'provider_id' : None
},
{
'name' : 'Mitt Sparkonto',
'account_code' : '123-123',
'account_number' : '012345678',
'current_balance' : 2000,
'future_balance' : None,
'account_category_id' : 2,
'organization_id' : 1,
'provider_id' : 1
}
])
models.TransactionType.insert([
{
'name' : 'Insättning',
},
{
'name' : 'Intjänad ränta',
},
{
'name' : 'Kortköp/uttag'
}
])
models.TransactionCategory.insert([
{
'name' : 'Donationer'
},
{
'name' : 'Matvaror'
},
{
'name' : 'Fondköp'
},
{
'name' : 'Fondsälj'
}
])
models.Transaction.insert([
{
'group' : 1,
'accounting_date' : '2015-01-20',
'transaction_date' : '2015-01-20',
'amount' : -8.02,
'reference' : 'PATREON.COM',
'account_id' : 1,
'transaction_category_id' : 1,
'transaction_type_id' : 3,
'currency_id' : 1
},
{
'group' : 1,
'accounting_date' : '2015-01-20',
'transaction_date' : '2015-01-20',
'amount' : -12.00,
'reference' : 'SPOTIFY spotify',
'account_id' : 1,
'transaction_category_id' : None,
'transaction_type_id' : 3,
'currency_id' : 1
},
{
'group' : 2,
'accounting_date' : '2015-01-20',
'transaction_date' : '2015-01-20',
'amount' : -100.00,
'reference' : 'Överföring sparkonto',
'account_id' : 1,
'transaction_category_id' : None,
'transaction_type_id' : 3,
'currency_id' : 1
},
{
'group' : 2,
'accounting_date' : '2015-01-20',
'transaction_date' : '2015-01-20',
'amount' : 100.00,
'reference' : 'Överföring sparkonto',
'account_id' : 2,
'transaction_category_id' : None,
'transaction_type_id' : 1,
'currency_id' : 1
}
])
# Update a few items
models.Transaction.update({
'amount' : -6.66,
'accounting_date' : '2015-01-18'
}, models.Transaction.col('id').in_([1,2]))
# Delete a couple of items
models.Provider.delete(models.Provider.col('id')==1)
# Get some items
transactions = models.Transaction.selectAll()
for t in transactions:
print(("id:", t[models.Transaction.col('id')]))
print(("group:", t[models.Transaction.col('group')]))
print(("accounting_date:", t[models.Transaction.col('accounting_date')]))
print(("transaction_date:", t[models.Transaction.col('transaction_date')]))
print(("amount:", t[models.Transaction.col('amount')]))
print(("reference:", decode(t[models.Transaction.col('reference')])))
print(("account_id:", t[models.Transaction.col('account_id')]))
print(("transaction_category_id:", t[models.Transaction.col('transaction_category_id')]))
print(("transaction_type_id:", t[models.Transaction.col('transaction_type_id')]))
print(("currency_id:", t[models.Transaction.col('currency_id')]))
print(("-"*80))
# Get some items and order them descending/ascending
stmt = models.Transaction.getSelectStatement(models.Transaction.cols(['id', 'accounting_date'])).order_by(desc(models.Transaction.c().accounting_date))
transactions = models.Transaction.execute(stmt)
print("transactions ordered by accounting date descending")
for t in transactions:
print(("id:", t[models.Transaction.col('id')]))
print(("accounting date:", t[models.Transaction.col('accounting_date')]))
print(("-"*80))
stmt = models.Transaction.getSelectStatement(models.Transaction.cols(['id', 'accounting_date'])).order_by(asc(models.Transaction.c().accounting_date))
transactions = models.Transaction.execute(stmt)
print("transactions ordered by accounting date ascending")
for t in transactions:
print(("id:", t[models.Transaction.col('id')]))
print(("accounting date:", t[models.Transaction.col('accounting_date')]))
| mit | 271,856,470,520,782,340 | 30.842857 | 155 | 0.542545 | false |
UTSA-ICS/keystone-kerberos | keystone/tests/unit/test_sql_upgrade.py | 1 | 71681 | # Copyright 2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
To run these tests against a live database:
1. Modify the file ``keystone/tests/unit/config_files/backend_sql.conf`` to use
the connection for your live database.
2. Set up a blank, live database
3. Run the tests using::
tox -e py27 -- keystone.tests.unit.test_sql_upgrade
WARNING::
Your database will be wiped.
Do not do this against a database with valuable data as
all data will be lost.
"""
import copy
import json
import uuid
from migrate.versioning import api as versioning_api
from oslo_config import cfg
from oslo_db import exception as db_exception
from oslo_db.sqlalchemy import migration
from oslo_db.sqlalchemy import session as db_session
import six
from sqlalchemy.engine import reflection
import sqlalchemy.exc
from sqlalchemy import schema
from keystone.assignment.backends import sql as assignment_sql
from keystone.common import sql
from keystone.common.sql import migrate_repo
from keystone.common.sql import migration_helpers
from keystone.contrib import federation
from keystone.contrib import revoke
from keystone import exception
from keystone.tests import unit as tests
from keystone.tests.unit import default_fixtures
from keystone.tests.unit.ksfixtures import database
CONF = cfg.CONF
DEFAULT_DOMAIN_ID = CONF.identity.default_domain_id
# NOTE(morganfainberg): This should be updated when each DB migration collapse
# is done to mirror the expected structure of the DB in the format of
# { <DB_TABLE_NAME>: [<COLUMN>, <COLUMN>, ...], ... }
INITIAL_TABLE_STRUCTURE = {
'credential': [
'id', 'user_id', 'project_id', 'blob', 'type', 'extra',
],
'domain': [
'id', 'name', 'enabled', 'extra',
],
'endpoint': [
'id', 'legacy_endpoint_id', 'interface', 'region', 'service_id', 'url',
'extra',
],
'group': [
'id', 'domain_id', 'name', 'description', 'extra',
],
'group_domain_metadata': [
'group_id', 'domain_id', 'data',
],
'group_project_metadata': [
'group_id', 'project_id', 'data',
],
'policy': [
'id', 'type', 'blob', 'extra',
],
'project': [
'id', 'name', 'extra', 'description', 'enabled', 'domain_id',
],
'role': [
'id', 'name', 'extra',
],
'service': [
'id', 'type', 'extra',
],
'token': [
'id', 'expires', 'extra', 'valid', 'trust_id', 'user_id',
],
'trust': [
'id', 'trustor_user_id', 'trustee_user_id', 'project_id',
'impersonation', 'deleted_at', 'expires_at', 'extra',
],
'trust_role': [
'trust_id', 'role_id',
],
'user': [
'id', 'name', 'extra', 'password', 'enabled', 'domain_id',
'default_project_id',
],
'user_domain_metadata': [
'user_id', 'domain_id', 'data',
],
'user_group_membership': [
'user_id', 'group_id',
],
'user_project_metadata': [
'user_id', 'project_id', 'data',
],
}
INITIAL_EXTENSION_TABLE_STRUCTURE = {
'revocation_event': [
'id', 'domain_id', 'project_id', 'user_id', 'role_id',
'trust_id', 'consumer_id', 'access_token_id',
'issued_before', 'expires_at', 'revoked_at', 'audit_id',
'audit_chain_id',
],
}
EXTENSIONS = {'federation': federation,
'revoke': revoke}
class SqlMigrateBase(tests.SQLDriverOverrides, tests.TestCase):
def initialize_sql(self):
self.metadata = sqlalchemy.MetaData()
self.metadata.bind = self.engine
def config_files(self):
config_files = super(SqlMigrateBase, self).config_files()
config_files.append(tests.dirs.tests_conf('backend_sql.conf'))
return config_files
def repo_package(self):
return sql
def setUp(self):
super(SqlMigrateBase, self).setUp()
database.initialize_sql_session()
conn_str = CONF.database.connection
if (conn_str != tests.IN_MEM_DB_CONN_STRING and
conn_str.startswith('sqlite') and
conn_str[10:] == tests.DEFAULT_TEST_DB_FILE):
# Override the default with a DB that is specific to the migration
# tests only if the DB Connection string is the same as the global
# default. This is required so that no conflicts occur due to the
# global default DB already being under migrate control. This is
# only needed if the DB is not-in-memory
db_file = tests.dirs.tmp('keystone_migrate_test.db')
self.config_fixture.config(
group='database',
connection='sqlite:///%s' % db_file)
# create and share a single sqlalchemy engine for testing
self.engine = sql.get_engine()
self.Session = db_session.get_maker(self.engine, autocommit=False)
self.initialize_sql()
self.repo_path = migration_helpers.find_migrate_repo(
self.repo_package())
self.schema = versioning_api.ControlledSchema.create(
self.engine,
self.repo_path, self.initial_db_version)
# auto-detect the highest available schema version in the migrate_repo
self.max_version = self.schema.repository.version().version
def tearDown(self):
sqlalchemy.orm.session.Session.close_all()
meta = sqlalchemy.MetaData()
meta.bind = self.engine
meta.reflect(self.engine)
with self.engine.begin() as conn:
inspector = reflection.Inspector.from_engine(self.engine)
metadata = schema.MetaData()
tbs = []
all_fks = []
for table_name in inspector.get_table_names():
fks = []
for fk in inspector.get_foreign_keys(table_name):
if not fk['name']:
continue
fks.append(
schema.ForeignKeyConstraint((), (), name=fk['name']))
table = schema.Table(table_name, metadata, *fks)
tbs.append(table)
all_fks.extend(fks)
for fkc in all_fks:
conn.execute(schema.DropConstraint(fkc))
for table in tbs:
conn.execute(schema.DropTable(table))
sql.cleanup()
super(SqlMigrateBase, self).tearDown()
def select_table(self, name):
table = sqlalchemy.Table(name,
self.metadata,
autoload=True)
s = sqlalchemy.select([table])
return s
def assertTableExists(self, table_name):
try:
self.select_table(table_name)
except sqlalchemy.exc.NoSuchTableError:
raise AssertionError('Table "%s" does not exist' % table_name)
def assertTableDoesNotExist(self, table_name):
"""Asserts that a given table exists cannot be selected by name."""
# Switch to a different metadata otherwise you might still
# detect renamed or dropped tables
try:
temp_metadata = sqlalchemy.MetaData()
temp_metadata.bind = self.engine
sqlalchemy.Table(table_name, temp_metadata, autoload=True)
except sqlalchemy.exc.NoSuchTableError:
pass
else:
raise AssertionError('Table "%s" already exists' % table_name)
def upgrade(self, *args, **kwargs):
self._migrate(*args, **kwargs)
def downgrade(self, *args, **kwargs):
self._migrate(*args, downgrade=True, **kwargs)
def _migrate(self, version, repository=None, downgrade=False,
current_schema=None):
repository = repository or self.repo_path
err = ''
version = versioning_api._migrate_version(self.schema,
version,
not downgrade,
err)
if not current_schema:
current_schema = self.schema
changeset = current_schema.changeset(version)
for ver, change in changeset:
self.schema.runchange(ver, change, changeset.step)
self.assertEqual(self.schema.version, version)
def assertTableColumns(self, table_name, expected_cols):
"""Asserts that the table contains the expected set of columns."""
self.initialize_sql()
table = self.select_table(table_name)
actual_cols = [col.name for col in table.columns]
# Check if the columns are equal, but allow for a different order,
# which might occur after an upgrade followed by a downgrade
self.assertItemsEqual(expected_cols, actual_cols,
'%s table' % table_name)
@property
def initial_db_version(self):
return getattr(self, '_initial_db_version', 0)
class SqlUpgradeTests(SqlMigrateBase):
_initial_db_version = migrate_repo.DB_INIT_VERSION
def test_blank_db_to_start(self):
self.assertTableDoesNotExist('user')
def test_start_version_db_init_version(self):
version = migration.db_version(sql.get_engine(), self.repo_path,
migrate_repo.DB_INIT_VERSION)
self.assertEqual(
version,
migrate_repo.DB_INIT_VERSION,
'DB is not at version %s' % migrate_repo.DB_INIT_VERSION)
def test_two_steps_forward_one_step_back(self):
"""You should be able to cleanly undo and re-apply all upgrades.
Upgrades are run in the following order::
Starting with the initial version defined at
keystone.common.migrate_repo.DB_INIT_VERSION
INIT +1 -> INIT +2 -> INIT +1 -> INIT +2 -> INIT +3 -> INIT +2 ...
^---------------------^ ^---------------------^
Downgrade to the DB_INIT_VERSION does not occur based on the
requirement that the base version be DB_INIT_VERSION + 1 before
migration can occur. Downgrade below DB_INIT_VERSION + 1 is no longer
supported.
DB_INIT_VERSION is the number preceding the release schema version from
two releases prior. Example, Juno releases with the DB_INIT_VERSION
being 35 where Havana (Havana was two releases before Juno) release
schema version is 36.
The migrate utility requires the db must be initialized under version
control with the revision directly before the first version to be
applied.
"""
for x in range(migrate_repo.DB_INIT_VERSION + 1,
self.max_version + 1):
self.upgrade(x)
downgrade_ver = x - 1
# Don't actually downgrade to the init version. This will raise
# a not-implemented error.
if downgrade_ver != migrate_repo.DB_INIT_VERSION:
self.downgrade(x - 1)
self.upgrade(x)
def test_upgrade_add_initial_tables(self):
self.upgrade(migrate_repo.DB_INIT_VERSION + 1)
self.check_initial_table_structure()
def check_initial_table_structure(self):
for table in INITIAL_TABLE_STRUCTURE:
self.assertTableColumns(table, INITIAL_TABLE_STRUCTURE[table])
# Ensure the default domain was properly created.
default_domain = migration_helpers.get_default_domain()
meta = sqlalchemy.MetaData()
meta.bind = self.engine
domain_table = sqlalchemy.Table('domain', meta, autoload=True)
session = self.Session()
q = session.query(domain_table)
refs = q.all()
self.assertEqual(1, len(refs))
for k in default_domain.keys():
self.assertEqual(default_domain[k], getattr(refs[0], k))
def test_downgrade_to_db_init_version(self):
self.upgrade(self.max_version)
if self.engine.name == 'mysql':
self._mysql_check_all_tables_innodb()
self.downgrade(migrate_repo.DB_INIT_VERSION + 1)
self.check_initial_table_structure()
meta = sqlalchemy.MetaData()
meta.bind = self.engine
meta.reflect(self.engine)
initial_table_set = set(INITIAL_TABLE_STRUCTURE.keys())
table_set = set(meta.tables.keys())
# explicitly remove the migrate_version table, this is not controlled
# by the migration scripts and should be exempt from this check.
table_set.remove('migrate_version')
self.assertSetEqual(initial_table_set, table_set)
# Downgrade to before Havana's release schema version (036) is not
# supported. A NotImplementedError should be raised when attempting to
# downgrade.
self.assertRaises(NotImplementedError, self.downgrade,
migrate_repo.DB_INIT_VERSION)
def insert_dict(self, session, table_name, d, table=None):
"""Naively inserts key-value pairs into a table, given a dictionary."""
if table is None:
this_table = sqlalchemy.Table(table_name, self.metadata,
autoload=True)
else:
this_table = table
insert = this_table.insert().values(**d)
session.execute(insert)
session.commit()
def test_region_migration(self):
self.assertTableDoesNotExist('region')
self.upgrade(37)
self.assertTableExists('region')
self.downgrade(36)
self.assertTableDoesNotExist('region')
def test_assignment_table_migration(self):
def create_base_data(session):
domain_table = sqlalchemy.Table('domain', self.metadata,
autoload=True)
user_table = sqlalchemy.Table('user', self.metadata, autoload=True)
group_table = sqlalchemy.Table('group', self.metadata,
autoload=True)
role_table = sqlalchemy.Table('role', self.metadata, autoload=True)
project_table = sqlalchemy.Table(
'project', self.metadata, autoload=True)
base_data = {}
# Create a Domain
base_data['domain'] = {'id': uuid.uuid4().hex,
'name': uuid.uuid4().hex,
'enabled': True}
session.execute(domain_table.insert().values(base_data['domain']))
# Create another Domain
base_data['domain2'] = {'id': uuid.uuid4().hex,
'name': uuid.uuid4().hex,
'enabled': True}
session.execute(domain_table.insert().values(base_data['domain2']))
# Create a Project
base_data['project'] = {'id': uuid.uuid4().hex,
'name': uuid.uuid4().hex,
'domain_id': base_data['domain']['id'],
'extra': "{}"}
session.execute(
project_table.insert().values(base_data['project']))
# Create another Project
base_data['project2'] = {'id': uuid.uuid4().hex,
'name': uuid.uuid4().hex,
'domain_id': base_data['domain']['id'],
'extra': "{}"}
session.execute(
project_table.insert().values(base_data['project2']))
# Create a User
base_data['user'] = {'id': uuid.uuid4().hex,
'name': uuid.uuid4().hex,
'domain_id': base_data['domain']['id'],
'password': uuid.uuid4().hex,
'enabled': True,
'extra': "{}"}
session.execute(user_table.insert().values(base_data['user']))
# Create a Group
base_data['group'] = {'id': uuid.uuid4().hex,
'name': uuid.uuid4().hex,
'domain_id': base_data['domain']['id'],
'extra': "{}"}
session.execute(group_table.insert().values(base_data['group']))
# Create roles
base_data['roles'] = []
for _ in range(9):
role = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
session.execute(role_table.insert().values(role))
base_data['roles'].append(role)
return base_data
def populate_grants(session, base_data):
user_project_table = sqlalchemy.Table(
'user_project_metadata', self.metadata, autoload=True)
user_domain_table = sqlalchemy.Table(
'user_domain_metadata', self.metadata, autoload=True)
group_project_table = sqlalchemy.Table(
'group_project_metadata', self.metadata, autoload=True)
group_domain_table = sqlalchemy.Table(
'group_domain_metadata', self.metadata, autoload=True)
# Grant a role to user on project
grant = {'user_id': base_data['user']['id'],
'project_id': base_data['project']['id'],
'data': json.dumps(
{'roles': [{'id': base_data['roles'][0]['id']}]})}
session.execute(user_project_table.insert().values(grant))
# Grant two roles to user on project2
grant = {'user_id': base_data['user']['id'],
'project_id': base_data['project2']['id'],
'data': json.dumps(
{'roles': [{'id': base_data['roles'][1]['id']},
{'id': base_data['roles'][2]['id']}]})}
session.execute(user_project_table.insert().values(grant))
# Grant role to group on project
grant = {'group_id': base_data['group']['id'],
'project_id': base_data['project']['id'],
'data': json.dumps(
{'roles': [{'id': base_data['roles'][3]['id']}]})}
session.execute(group_project_table.insert().values(grant))
# Grant two roles to group on project2
grant = {'group_id': base_data['group']['id'],
'project_id': base_data['project2']['id'],
'data': json.dumps(
{'roles': [{'id': base_data['roles'][4]['id']},
{'id': base_data['roles'][5]['id']}]})}
session.execute(group_project_table.insert().values(grant))
# Grant two roles to group on domain, one inherited, one not
grant = {'group_id': base_data['group']['id'],
'domain_id': base_data['domain']['id'],
'data': json.dumps(
{'roles': [{'id': base_data['roles'][6]['id']},
{'id': base_data['roles'][7]['id'],
'inherited_to': 'projects'}]})}
session.execute(group_domain_table.insert().values(grant))
# Grant inherited role to user on domain
grant = {'user_id': base_data['user']['id'],
'domain_id': base_data['domain']['id'],
'data': json.dumps(
{'roles': [{'id': base_data['roles'][8]['id'],
'inherited_to': 'projects'}]})}
session.execute(user_domain_table.insert().values(grant))
# Grant two non-inherited roles to user on domain2, using roles
# that are also assigned to other actors/targets
grant = {'user_id': base_data['user']['id'],
'domain_id': base_data['domain2']['id'],
'data': json.dumps(
{'roles': [{'id': base_data['roles'][6]['id']},
{'id': base_data['roles'][7]['id']}]})}
session.execute(user_domain_table.insert().values(grant))
session.commit()
def check_grants(session, base_data):
user_project_table = sqlalchemy.Table(
'user_project_metadata', self.metadata, autoload=True)
user_domain_table = sqlalchemy.Table(
'user_domain_metadata', self.metadata, autoload=True)
group_project_table = sqlalchemy.Table(
'group_project_metadata', self.metadata, autoload=True)
group_domain_table = sqlalchemy.Table(
'group_domain_metadata', self.metadata, autoload=True)
s = sqlalchemy.select([user_project_table.c.data]).where(
(user_project_table.c.user_id == base_data['user']['id']) &
(user_project_table.c.project_id ==
base_data['project']['id']))
r = session.execute(s)
data = json.loads(r.fetchone()['data'])
self.assertEqual(1, len(data['roles']))
self.assertIn({'id': base_data['roles'][0]['id']}, data['roles'])
s = sqlalchemy.select([user_project_table.c.data]).where(
(user_project_table.c.user_id == base_data['user']['id']) &
(user_project_table.c.project_id ==
base_data['project2']['id']))
r = session.execute(s)
data = json.loads(r.fetchone()['data'])
self.assertEqual(2, len(data['roles']))
self.assertIn({'id': base_data['roles'][1]['id']}, data['roles'])
self.assertIn({'id': base_data['roles'][2]['id']}, data['roles'])
s = sqlalchemy.select([group_project_table.c.data]).where(
(group_project_table.c.group_id == base_data['group']['id']) &
(group_project_table.c.project_id ==
base_data['project']['id']))
r = session.execute(s)
data = json.loads(r.fetchone()['data'])
self.assertEqual(1, len(data['roles']))
self.assertIn({'id': base_data['roles'][3]['id']}, data['roles'])
s = sqlalchemy.select([group_project_table.c.data]).where(
(group_project_table.c.group_id == base_data['group']['id']) &
(group_project_table.c.project_id ==
base_data['project2']['id']))
r = session.execute(s)
data = json.loads(r.fetchone()['data'])
self.assertEqual(2, len(data['roles']))
self.assertIn({'id': base_data['roles'][4]['id']}, data['roles'])
self.assertIn({'id': base_data['roles'][5]['id']}, data['roles'])
s = sqlalchemy.select([group_domain_table.c.data]).where(
(group_domain_table.c.group_id == base_data['group']['id']) &
(group_domain_table.c.domain_id == base_data['domain']['id']))
r = session.execute(s)
data = json.loads(r.fetchone()['data'])
self.assertEqual(2, len(data['roles']))
self.assertIn({'id': base_data['roles'][6]['id']}, data['roles'])
self.assertIn({'id': base_data['roles'][7]['id'],
'inherited_to': 'projects'}, data['roles'])
s = sqlalchemy.select([user_domain_table.c.data]).where(
(user_domain_table.c.user_id == base_data['user']['id']) &
(user_domain_table.c.domain_id == base_data['domain']['id']))
r = session.execute(s)
data = json.loads(r.fetchone()['data'])
self.assertEqual(1, len(data['roles']))
self.assertIn({'id': base_data['roles'][8]['id'],
'inherited_to': 'projects'}, data['roles'])
s = sqlalchemy.select([user_domain_table.c.data]).where(
(user_domain_table.c.user_id == base_data['user']['id']) &
(user_domain_table.c.domain_id == base_data['domain2']['id']))
r = session.execute(s)
data = json.loads(r.fetchone()['data'])
self.assertEqual(2, len(data['roles']))
self.assertIn({'id': base_data['roles'][6]['id']}, data['roles'])
self.assertIn({'id': base_data['roles'][7]['id']}, data['roles'])
def check_assignments(session, base_data):
def check_assignment_type(refs, type):
for ref in refs:
self.assertEqual(type, ref.type)
assignment_table = sqlalchemy.Table(
'assignment', self.metadata, autoload=True)
refs = session.query(assignment_table).all()
self.assertEqual(11, len(refs))
q = session.query(assignment_table)
q = q.filter_by(actor_id=base_data['user']['id'])
q = q.filter_by(target_id=base_data['project']['id'])
refs = q.all()
self.assertEqual(1, len(refs))
self.assertEqual(base_data['roles'][0]['id'], refs[0].role_id)
self.assertFalse(refs[0].inherited)
check_assignment_type(refs,
assignment_sql.AssignmentType.USER_PROJECT)
q = session.query(assignment_table)
q = q.filter_by(actor_id=base_data['user']['id'])
q = q.filter_by(target_id=base_data['project2']['id'])
refs = q.all()
self.assertEqual(2, len(refs))
role_ids = [base_data['roles'][1]['id'],
base_data['roles'][2]['id']]
self.assertIn(refs[0].role_id, role_ids)
self.assertIn(refs[1].role_id, role_ids)
self.assertFalse(refs[0].inherited)
self.assertFalse(refs[1].inherited)
check_assignment_type(refs,
assignment_sql.AssignmentType.USER_PROJECT)
q = session.query(assignment_table)
q = q.filter_by(actor_id=base_data['group']['id'])
q = q.filter_by(target_id=base_data['project']['id'])
refs = q.all()
self.assertEqual(1, len(refs))
self.assertEqual(base_data['roles'][3]['id'], refs[0].role_id)
self.assertFalse(refs[0].inherited)
check_assignment_type(refs,
assignment_sql.AssignmentType.GROUP_PROJECT)
q = session.query(assignment_table)
q = q.filter_by(actor_id=base_data['group']['id'])
q = q.filter_by(target_id=base_data['project2']['id'])
refs = q.all()
self.assertEqual(2, len(refs))
role_ids = [base_data['roles'][4]['id'],
base_data['roles'][5]['id']]
self.assertIn(refs[0].role_id, role_ids)
self.assertIn(refs[1].role_id, role_ids)
self.assertFalse(refs[0].inherited)
self.assertFalse(refs[1].inherited)
check_assignment_type(refs,
assignment_sql.AssignmentType.GROUP_PROJECT)
q = session.query(assignment_table)
q = q.filter_by(actor_id=base_data['group']['id'])
q = q.filter_by(target_id=base_data['domain']['id'])
refs = q.all()
self.assertEqual(2, len(refs))
role_ids = [base_data['roles'][6]['id'],
base_data['roles'][7]['id']]
self.assertIn(refs[0].role_id, role_ids)
self.assertIn(refs[1].role_id, role_ids)
if refs[0].role_id == base_data['roles'][7]['id']:
self.assertTrue(refs[0].inherited)
self.assertFalse(refs[1].inherited)
else:
self.assertTrue(refs[1].inherited)
self.assertFalse(refs[0].inherited)
check_assignment_type(refs,
assignment_sql.AssignmentType.GROUP_DOMAIN)
q = session.query(assignment_table)
q = q.filter_by(actor_id=base_data['user']['id'])
q = q.filter_by(target_id=base_data['domain']['id'])
refs = q.all()
self.assertEqual(1, len(refs))
self.assertEqual(base_data['roles'][8]['id'], refs[0].role_id)
self.assertTrue(refs[0].inherited)
check_assignment_type(refs,
assignment_sql.AssignmentType.USER_DOMAIN)
q = session.query(assignment_table)
q = q.filter_by(actor_id=base_data['user']['id'])
q = q.filter_by(target_id=base_data['domain2']['id'])
refs = q.all()
self.assertEqual(2, len(refs))
role_ids = [base_data['roles'][6]['id'],
base_data['roles'][7]['id']]
self.assertIn(refs[0].role_id, role_ids)
self.assertIn(refs[1].role_id, role_ids)
self.assertFalse(refs[0].inherited)
self.assertFalse(refs[1].inherited)
check_assignment_type(refs,
assignment_sql.AssignmentType.USER_DOMAIN)
self.upgrade(37)
session = self.Session()
self.assertTableDoesNotExist('assignment')
base_data = create_base_data(session)
populate_grants(session, base_data)
check_grants(session, base_data)
session.commit()
session.close()
self.upgrade(40)
session = self.Session()
self.assertTableExists('assignment')
self.assertTableDoesNotExist('user_project_metadata')
self.assertTableDoesNotExist('group_project_metadata')
self.assertTableDoesNotExist('user_domain_metadata')
self.assertTableDoesNotExist('group_domain_metadata')
check_assignments(session, base_data)
session.close()
self.downgrade(37)
session = self.Session()
self.assertTableDoesNotExist('assignment')
check_grants(session, base_data)
session.close()
def test_limited_trusts_upgrade(self):
# make sure that the remaining_uses column is created
self.upgrade(41)
self.assertTableColumns('trust',
['id', 'trustor_user_id',
'trustee_user_id',
'project_id', 'impersonation',
'deleted_at',
'expires_at', 'extra',
'remaining_uses'])
def test_limited_trusts_downgrade(self):
# make sure that the remaining_uses column is removed
self.upgrade(41)
self.downgrade(40)
self.assertTableColumns('trust',
['id', 'trustor_user_id',
'trustee_user_id',
'project_id', 'impersonation',
'deleted_at',
'expires_at', 'extra'])
def test_limited_trusts_downgrade_trusts_cleanup(self):
# make sure that only trusts with unlimited uses are kept in the
# downgrade
self.upgrade(41)
session = self.Session()
limited_trust = {
'id': uuid.uuid4().hex,
'trustor_user_id': uuid.uuid4().hex,
'trustee_user_id': uuid.uuid4().hex,
'project_id': uuid.uuid4().hex,
'impersonation': True,
'remaining_uses': 5
}
consumed_trust = {
'id': uuid.uuid4().hex,
'trustor_user_id': uuid.uuid4().hex,
'trustee_user_id': uuid.uuid4().hex,
'project_id': uuid.uuid4().hex,
'impersonation': True,
'remaining_uses': 0
}
unlimited_trust = {
'id': uuid.uuid4().hex,
'trustor_user_id': uuid.uuid4().hex,
'trustee_user_id': uuid.uuid4().hex,
'project_id': uuid.uuid4().hex,
'impersonation': True,
'remaining_uses': None
}
self.insert_dict(session, 'trust', limited_trust)
self.insert_dict(session, 'trust', consumed_trust)
self.insert_dict(session, 'trust', unlimited_trust)
trust_table = sqlalchemy.Table(
'trust', self.metadata, autoload=True)
# we should have 3 trusts in base
self.assertEqual(3, session.query(trust_table).count())
session.close()
self.downgrade(40)
session = self.Session()
trust_table = sqlalchemy.Table(
'trust', self.metadata, autoload=True)
# Now only one trust remains ...
self.assertEqual(1, session.query(trust_table.columns.id).count())
# ... and this trust is the one that was not limited in uses
self.assertEqual(
unlimited_trust['id'],
session.query(trust_table.columns.id).one()[0])
def test_upgrade_service_enabled_cols(self):
"""Migration 44 added `enabled` column to `service` table."""
self.upgrade(44)
# Verify that there's an 'enabled' field.
exp_cols = ['id', 'type', 'extra', 'enabled']
self.assertTableColumns('service', exp_cols)
def test_downgrade_service_enabled_cols(self):
"""Check columns when downgrade to migration 43.
The downgrade from migration 44 removes the `enabled` column from the
`service` table.
"""
self.upgrade(44)
self.downgrade(43)
exp_cols = ['id', 'type', 'extra']
self.assertTableColumns('service', exp_cols)
def test_upgrade_service_enabled_data(self):
"""Migration 44 has to migrate data from `extra` to `enabled`."""
def add_service(**extra_data):
service_id = uuid.uuid4().hex
service = {
'id': service_id,
'type': uuid.uuid4().hex,
'extra': json.dumps(extra_data),
}
self.insert_dict(session, 'service', service)
return service_id
self.upgrade(43)
session = self.Session()
# Different services with expected enabled and extra values, and a
# description.
random_attr_name = uuid.uuid4().hex
random_attr_value = uuid.uuid4().hex
random_attr = {random_attr_name: random_attr_value}
random_attr_str = "%s='%s'" % (random_attr_name, random_attr_value)
random_attr_enabled_false = {random_attr_name: random_attr_value,
'enabled': False}
random_attr_enabled_false_str = 'enabled=False,%s' % random_attr_str
services = [
# Some values for True.
(add_service(), (True, {}), 'no enabled'),
(add_service(enabled=True), (True, {}), 'enabled=True'),
(add_service(enabled='true'), (True, {}), "enabled='true'"),
(add_service(**random_attr),
(True, random_attr), random_attr_str),
(add_service(enabled=None), (True, {}), 'enabled=None'),
# Some values for False.
(add_service(enabled=False), (False, {}), 'enabled=False'),
(add_service(enabled='false'), (False, {}), "enabled='false'"),
(add_service(enabled='0'), (False, {}), "enabled='0'"),
(add_service(**random_attr_enabled_false),
(False, random_attr), random_attr_enabled_false_str),
]
session.close()
self.upgrade(44)
session = self.Session()
# Verify that the services have the expected values.
self.metadata.clear()
service_table = sqlalchemy.Table('service', self.metadata,
autoload=True)
def fetch_service(service_id):
cols = [service_table.c.enabled, service_table.c.extra]
f = service_table.c.id == service_id
s = sqlalchemy.select(cols).where(f)
ep = session.execute(s).fetchone()
return ep.enabled, json.loads(ep.extra)
for service_id, exp, msg in services:
exp_enabled, exp_extra = exp
enabled, extra = fetch_service(service_id)
self.assertEqual(exp_enabled, enabled, msg)
self.assertEqual(exp_extra, extra, msg)
def test_downgrade_service_enabled_data(self):
"""Downgrade from migration 44 migrates data.
Downgrade from migration 44 migrates data from `enabled` to
`extra`. Any disabled services have 'enabled': False put into 'extra'.
"""
def add_service(enabled=True, **extra_data):
service_id = uuid.uuid4().hex
service = {
'id': service_id,
'type': uuid.uuid4().hex,
'extra': json.dumps(extra_data),
'enabled': enabled
}
self.insert_dict(session, 'service', service)
return service_id
self.upgrade(44)
session = self.Session()
# Insert some services using the new format.
# We'll need a service entry since it's the foreign key for services.
service_id = add_service(True)
new_service = (lambda enabled, **extra_data:
add_service(enabled, **extra_data))
# Different services with expected extra values, and a
# description.
services = [
# True tests
(new_service(True), {}, 'enabled'),
(new_service(True, something='whatever'),
{'something': 'whatever'},
"something='whatever'"),
# False tests
(new_service(False), {'enabled': False}, 'enabled=False'),
(new_service(False, something='whatever'),
{'enabled': False, 'something': 'whatever'},
"enabled=False, something='whatever'"),
]
session.close()
self.downgrade(43)
session = self.Session()
# Verify that the services have the expected values.
self.metadata.clear()
service_table = sqlalchemy.Table('service', self.metadata,
autoload=True)
def fetch_service(service_id):
cols = [service_table.c.extra]
f = service_table.c.id == service_id
s = sqlalchemy.select(cols).where(f)
ep = session.execute(s).fetchone()
return json.loads(ep.extra)
for service_id, exp_extra, msg in services:
extra = fetch_service(service_id)
self.assertEqual(exp_extra, extra, msg)
def test_upgrade_endpoint_enabled_cols(self):
"""Migration 42 added `enabled` column to `endpoint` table."""
self.upgrade(42)
# Verify that there's an 'enabled' field.
exp_cols = ['id', 'legacy_endpoint_id', 'interface', 'region',
'service_id', 'url', 'extra', 'enabled']
self.assertTableColumns('endpoint', exp_cols)
def test_downgrade_endpoint_enabled_cols(self):
"""Check columns when downgrade from migration 41.
The downgrade from migration 42 removes the `enabled` column from the
`endpoint` table.
"""
self.upgrade(42)
self.downgrade(41)
exp_cols = ['id', 'legacy_endpoint_id', 'interface', 'region',
'service_id', 'url', 'extra']
self.assertTableColumns('endpoint', exp_cols)
def test_upgrade_endpoint_enabled_data(self):
"""Migration 42 has to migrate data from `extra` to `enabled`."""
def add_service():
service_id = uuid.uuid4().hex
service = {
'id': service_id,
'type': uuid.uuid4().hex
}
self.insert_dict(session, 'service', service)
return service_id
def add_endpoint(service_id, **extra_data):
endpoint_id = uuid.uuid4().hex
endpoint = {
'id': endpoint_id,
'interface': uuid.uuid4().hex[:8],
'service_id': service_id,
'url': uuid.uuid4().hex,
'extra': json.dumps(extra_data)
}
self.insert_dict(session, 'endpoint', endpoint)
return endpoint_id
self.upgrade(41)
session = self.Session()
# Insert some endpoints using the old format where `enabled` is in
# `extra` JSON.
# We'll need a service entry since it's the foreign key for endpoints.
service_id = add_service()
new_ep = lambda **extra_data: add_endpoint(service_id, **extra_data)
# Different endpoints with expected enabled and extra values, and a
# description.
random_attr_name = uuid.uuid4().hex
random_attr_value = uuid.uuid4().hex
random_attr = {random_attr_name: random_attr_value}
random_attr_str = "%s='%s'" % (random_attr_name, random_attr_value)
random_attr_enabled_false = {random_attr_name: random_attr_value,
'enabled': False}
random_attr_enabled_false_str = 'enabled=False,%s' % random_attr_str
endpoints = [
# Some values for True.
(new_ep(), (True, {}), 'no enabled'),
(new_ep(enabled=True), (True, {}), 'enabled=True'),
(new_ep(enabled='true'), (True, {}), "enabled='true'"),
(new_ep(**random_attr),
(True, random_attr), random_attr_str),
(new_ep(enabled=None), (True, {}), 'enabled=None'),
# Some values for False.
(new_ep(enabled=False), (False, {}), 'enabled=False'),
(new_ep(enabled='false'), (False, {}), "enabled='false'"),
(new_ep(enabled='0'), (False, {}), "enabled='0'"),
(new_ep(**random_attr_enabled_false),
(False, random_attr), random_attr_enabled_false_str),
]
session.close()
self.upgrade(42)
session = self.Session()
# Verify that the endpoints have the expected values.
self.metadata.clear()
endpoint_table = sqlalchemy.Table('endpoint', self.metadata,
autoload=True)
def fetch_endpoint(endpoint_id):
cols = [endpoint_table.c.enabled, endpoint_table.c.extra]
f = endpoint_table.c.id == endpoint_id
s = sqlalchemy.select(cols).where(f)
ep = session.execute(s).fetchone()
return ep.enabled, json.loads(ep.extra)
for endpoint_id, exp, msg in endpoints:
exp_enabled, exp_extra = exp
enabled, extra = fetch_endpoint(endpoint_id)
# NOTE(henry-nash): Different databases may return enabled as a
# real boolean of 0/1 - so we use assertEqual not assertIs here.
self.assertEqual(exp_enabled, enabled, msg)
self.assertEqual(exp_extra, extra, msg)
def test_downgrade_endpoint_enabled_data(self):
"""Downgrade from migration 42 migrates data.
Downgrade from migration 42 migrates data from `enabled` to
`extra`. Any disabled endpoints have 'enabled': False put into 'extra'.
"""
def add_service():
service_id = uuid.uuid4().hex
service = {
'id': service_id,
'type': uuid.uuid4().hex
}
self.insert_dict(session, 'service', service)
return service_id
def add_endpoint(service_id, enabled, **extra_data):
endpoint_id = uuid.uuid4().hex
endpoint = {
'id': endpoint_id,
'interface': uuid.uuid4().hex[:8],
'service_id': service_id,
'url': uuid.uuid4().hex,
'extra': json.dumps(extra_data),
'enabled': enabled
}
self.insert_dict(session, 'endpoint', endpoint)
return endpoint_id
self.upgrade(42)
session = self.Session()
# Insert some endpoints using the new format.
# We'll need a service entry since it's the foreign key for endpoints.
service_id = add_service()
new_ep = (lambda enabled, **extra_data:
add_endpoint(service_id, enabled, **extra_data))
# Different endpoints with expected extra values, and a
# description.
endpoints = [
# True tests
(new_ep(True), {}, 'enabled'),
(new_ep(True, something='whatever'), {'something': 'whatever'},
"something='whatever'"),
# False tests
(new_ep(False), {'enabled': False}, 'enabled=False'),
(new_ep(False, something='whatever'),
{'enabled': False, 'something': 'whatever'},
"enabled=False, something='whatever'"),
]
session.close()
self.downgrade(41)
session = self.Session()
# Verify that the endpoints have the expected values.
self.metadata.clear()
endpoint_table = sqlalchemy.Table('endpoint', self.metadata,
autoload=True)
def fetch_endpoint(endpoint_id):
cols = [endpoint_table.c.extra]
f = endpoint_table.c.id == endpoint_id
s = sqlalchemy.select(cols).where(f)
ep = session.execute(s).fetchone()
return json.loads(ep.extra)
for endpoint_id, exp_extra, msg in endpoints:
extra = fetch_endpoint(endpoint_id)
self.assertEqual(exp_extra, extra, msg)
def test_upgrade_region_non_unique_description(self):
"""Test upgrade to migration 43.
This migration should occur with no unique constraint on the region
description column.
Create two regions with the same description.
"""
def add_region():
region_uuid = uuid.uuid4().hex
region = {
'id': region_uuid,
'description': ''
}
self.insert_dict(session, 'region', region)
return region_uuid
self.upgrade(43)
session = self.Session()
# Write one region to the database
add_region()
# Write another region to the database with the same description
add_region()
def test_upgrade_region_unique_description(self):
"""Test upgrade to migration 43.
This test models a migration where there is a unique constraint on the
description column.
Create two regions with the same description.
"""
def add_region(table):
region_uuid = uuid.uuid4().hex
region = {
'id': region_uuid,
'description': ''
}
self.insert_dict(session, 'region', region, table=table)
return region_uuid
def get_metadata():
meta = sqlalchemy.MetaData()
meta.bind = self.engine
return meta
# Migrate to version 42
self.upgrade(42)
session = self.Session()
region_table = sqlalchemy.Table('region',
get_metadata(),
autoload=True)
# create the unique constraint and load the new version of the
# reflection cache
idx = sqlalchemy.Index('description', region_table.c.description,
unique=True)
idx.create(self.engine)
region_unique_table = sqlalchemy.Table('region',
get_metadata(),
autoload=True)
add_region(region_unique_table)
self.assertEqual(1, session.query(region_unique_table).count())
# verify the unique constraint is enforced
self.assertRaises(
# FIXME (I159): Since oslo_db wraps all the database exceptions
# into more specific exception objects, we should catch both of
# sqlalchemy and oslo_db exceptions. If an old oslo_db version
# is installed, IntegrityError is raised. If >=0.4.0 version of
# oslo_db is installed, DBError is raised.
# When the global requirements is updated with
# the version fixes exceptions wrapping, IntegrityError must be
# removed from the tuple.
# NOTE(henry-nash): The above re-creation of the (now erased from
# history) unique constraint doesn't appear to work well with the
# Postgresql SQA driver, leading to it throwing a ValueError, so
# we also catch that here.
(sqlalchemy.exc.IntegrityError, db_exception.DBError, ValueError),
add_region,
table=region_unique_table)
# migrate to 43, unique constraint should be dropped
session.close()
self.upgrade(43)
session = self.Session()
# reload the region table from the schema
region_nonunique = sqlalchemy.Table('region',
get_metadata(),
autoload=True)
self.assertEqual(1, session.query(region_nonunique).count())
# Write a second region to the database with the same description
add_region(region_nonunique)
self.assertEqual(2, session.query(region_nonunique).count())
def test_id_mapping(self):
self.upgrade(50)
self.assertTableDoesNotExist('id_mapping')
self.upgrade(51)
self.assertTableExists('id_mapping')
self.downgrade(50)
self.assertTableDoesNotExist('id_mapping')
def test_region_url_upgrade(self):
self.upgrade(52)
self.assertTableColumns('region',
['id', 'description', 'parent_region_id',
'extra', 'url'])
def test_region_url_downgrade(self):
self.upgrade(52)
self.downgrade(51)
self.assertTableColumns('region',
['id', 'description', 'parent_region_id',
'extra'])
def test_region_url_cleanup(self):
# make sure that the url field is dropped in the downgrade
self.upgrade(52)
session = self.Session()
beta = {
'id': uuid.uuid4().hex,
'description': uuid.uuid4().hex,
'parent_region_id': uuid.uuid4().hex,
'url': uuid.uuid4().hex
}
acme = {
'id': uuid.uuid4().hex,
'description': uuid.uuid4().hex,
'parent_region_id': uuid.uuid4().hex,
'url': None
}
self.insert_dict(session, 'region', beta)
self.insert_dict(session, 'region', acme)
region_table = sqlalchemy.Table('region', self.metadata, autoload=True)
self.assertEqual(2, session.query(region_table).count())
session.close()
self.downgrade(51)
session = self.Session()
self.metadata.clear()
region_table = sqlalchemy.Table('region', self.metadata, autoload=True)
self.assertEqual(2, session.query(region_table).count())
region = session.query(region_table)[0]
self.assertRaises(AttributeError, getattr, region, 'url')
def test_endpoint_region_upgrade_columns(self):
self.upgrade(53)
self.assertTableColumns('endpoint',
['id', 'legacy_endpoint_id', 'interface',
'service_id', 'url', 'extra', 'enabled',
'region_id'])
region_table = sqlalchemy.Table('region', self.metadata, autoload=True)
self.assertEqual(region_table.c.id.type.length, 255)
self.assertEqual(region_table.c.parent_region_id.type.length, 255)
endpoint_table = sqlalchemy.Table('endpoint',
self.metadata,
autoload=True)
self.assertEqual(endpoint_table.c.region_id.type.length, 255)
def test_endpoint_region_downgrade_columns(self):
self.upgrade(53)
self.downgrade(52)
self.assertTableColumns('endpoint',
['id', 'legacy_endpoint_id', 'interface',
'service_id', 'url', 'extra', 'enabled',
'region'])
region_table = sqlalchemy.Table('region', self.metadata, autoload=True)
self.assertEqual(region_table.c.id.type.length, 64)
self.assertEqual(region_table.c.parent_region_id.type.length, 64)
endpoint_table = sqlalchemy.Table('endpoint',
self.metadata,
autoload=True)
self.assertEqual(endpoint_table.c.region.type.length, 255)
def test_endpoint_region_migration(self):
self.upgrade(52)
session = self.Session()
_small_region_name = '0' * 30
_long_region_name = '0' * 255
_clashing_region_name = '0' * 70
def add_service():
service_id = uuid.uuid4().hex
service = {
'id': service_id,
'type': uuid.uuid4().hex
}
self.insert_dict(session, 'service', service)
return service_id
def add_endpoint(service_id, region):
endpoint_id = uuid.uuid4().hex
endpoint = {
'id': endpoint_id,
'interface': uuid.uuid4().hex[:8],
'service_id': service_id,
'url': uuid.uuid4().hex,
'region': region
}
self.insert_dict(session, 'endpoint', endpoint)
return endpoint_id
_service_id_ = add_service()
add_endpoint(_service_id_, region=_long_region_name)
add_endpoint(_service_id_, region=_long_region_name)
add_endpoint(_service_id_, region=_clashing_region_name)
add_endpoint(_service_id_, region=_small_region_name)
add_endpoint(_service_id_, region=None)
# upgrade to 53
session.close()
self.upgrade(53)
session = self.Session()
self.metadata.clear()
region_table = sqlalchemy.Table('region', self.metadata, autoload=True)
self.assertEqual(1, session.query(region_table).
filter_by(id=_long_region_name).count())
self.assertEqual(1, session.query(region_table).
filter_by(id=_clashing_region_name).count())
self.assertEqual(1, session.query(region_table).
filter_by(id=_small_region_name).count())
endpoint_table = sqlalchemy.Table('endpoint',
self.metadata,
autoload=True)
self.assertEqual(5, session.query(endpoint_table).count())
self.assertEqual(2, session.query(endpoint_table).
filter_by(region_id=_long_region_name).count())
self.assertEqual(1, session.query(endpoint_table).
filter_by(region_id=_clashing_region_name).count())
self.assertEqual(1, session.query(endpoint_table).
filter_by(region_id=_small_region_name).count())
# downgrade to 52
session.close()
self.downgrade(52)
session = self.Session()
self.metadata.clear()
region_table = sqlalchemy.Table('region', self.metadata, autoload=True)
self.assertEqual(1, session.query(region_table).count())
self.assertEqual(1, session.query(region_table).
filter_by(id=_small_region_name).count())
endpoint_table = sqlalchemy.Table('endpoint',
self.metadata,
autoload=True)
self.assertEqual(5, session.query(endpoint_table).count())
self.assertEqual(2, session.query(endpoint_table).
filter_by(region=_long_region_name).count())
self.assertEqual(1, session.query(endpoint_table).
filter_by(region=_clashing_region_name).count())
self.assertEqual(1, session.query(endpoint_table).
filter_by(region=_small_region_name).count())
def test_add_actor_id_index(self):
self.upgrade(53)
self.upgrade(54)
table = sqlalchemy.Table('assignment', self.metadata, autoload=True)
index_data = [(idx.name, idx.columns.keys()) for idx in table.indexes]
self.assertIn(('ix_actor_id', ['actor_id']), index_data)
def test_token_user_id_and_trust_id_index_upgrade(self):
self.upgrade(54)
self.upgrade(55)
table = sqlalchemy.Table('token', self.metadata, autoload=True)
index_data = [(idx.name, idx.columns.keys()) for idx in table.indexes]
self.assertIn(('ix_token_user_id', ['user_id']), index_data)
self.assertIn(('ix_token_trust_id', ['trust_id']), index_data)
def test_token_user_id_and_trust_id_index_downgrade(self):
self.upgrade(55)
self.downgrade(54)
table = sqlalchemy.Table('token', self.metadata, autoload=True)
index_data = [(idx.name, idx.columns.keys()) for idx in table.indexes]
self.assertNotIn(('ix_token_user_id', ['user_id']), index_data)
self.assertNotIn(('ix_token_trust_id', ['trust_id']), index_data)
def test_remove_actor_id_index(self):
self.upgrade(54)
self.downgrade(53)
table = sqlalchemy.Table('assignment', self.metadata, autoload=True)
index_data = [(idx.name, idx.columns.keys()) for idx in table.indexes]
self.assertNotIn(('ix_actor_id', ['actor_id']), index_data)
def test_project_parent_id_upgrade(self):
self.upgrade(61)
self.assertTableColumns('project',
['id', 'name', 'extra', 'description',
'enabled', 'domain_id', 'parent_id'])
def test_project_parent_id_downgrade(self):
self.upgrade(61)
self.downgrade(60)
self.assertTableColumns('project',
['id', 'name', 'extra', 'description',
'enabled', 'domain_id'])
def test_project_parent_id_cleanup(self):
# make sure that the parent_id field is dropped in the downgrade
self.upgrade(61)
session = self.Session()
domain = {'id': uuid.uuid4().hex,
'name': uuid.uuid4().hex,
'enabled': True}
acme = {
'id': uuid.uuid4().hex,
'description': uuid.uuid4().hex,
'domain_id': domain['id'],
'name': uuid.uuid4().hex,
'parent_id': None
}
beta = {
'id': uuid.uuid4().hex,
'description': uuid.uuid4().hex,
'domain_id': domain['id'],
'name': uuid.uuid4().hex,
'parent_id': acme['id']
}
self.insert_dict(session, 'domain', domain)
self.insert_dict(session, 'project', acme)
self.insert_dict(session, 'project', beta)
proj_table = sqlalchemy.Table('project', self.metadata, autoload=True)
self.assertEqual(2, session.query(proj_table).count())
session.close()
self.downgrade(60)
session = self.Session()
self.metadata.clear()
proj_table = sqlalchemy.Table('project', self.metadata, autoload=True)
self.assertEqual(2, session.query(proj_table).count())
project = session.query(proj_table)[0]
self.assertRaises(AttributeError, getattr, project, 'parent_id')
def test_drop_assignment_role_fk(self):
self.upgrade(61)
self.assertTrue(self.does_fk_exist('assignment', 'role_id'))
self.upgrade(62)
if self.engine.name != 'sqlite':
# sqlite does not support FK deletions (or enforcement)
self.assertFalse(self.does_fk_exist('assignment', 'role_id'))
self.downgrade(61)
self.assertTrue(self.does_fk_exist('assignment', 'role_id'))
def does_fk_exist(self, table, fk_column):
inspector = reflection.Inspector.from_engine(self.engine)
for fk in inspector.get_foreign_keys(table):
if fk_column in fk['constrained_columns']:
return True
return False
def test_drop_region_url_upgrade(self):
self.upgrade(63)
self.assertTableColumns('region',
['id', 'description', 'parent_region_id',
'extra'])
def test_drop_region_url_downgrade(self):
self.upgrade(63)
self.downgrade(62)
self.assertTableColumns('region',
['id', 'description', 'parent_region_id',
'extra', 'url'])
def test_drop_domain_fk(self):
self.upgrade(63)
self.assertTrue(self.does_fk_exist('group', 'domain_id'))
self.assertTrue(self.does_fk_exist('user', 'domain_id'))
self.upgrade(64)
if self.engine.name != 'sqlite':
# sqlite does not support FK deletions (or enforcement)
self.assertFalse(self.does_fk_exist('group', 'domain_id'))
self.assertFalse(self.does_fk_exist('user', 'domain_id'))
self.downgrade(63)
self.assertTrue(self.does_fk_exist('group', 'domain_id'))
self.assertTrue(self.does_fk_exist('user', 'domain_id'))
def populate_user_table(self, with_pass_enab=False,
with_pass_enab_domain=False):
# Populate the appropriate fields in the user
# table, depending on the parameters:
#
# Default: id, name, extra
# pass_enab: Add password, enabled as well
# pass_enab_domain: Add password, enabled and domain as well
#
this_table = sqlalchemy.Table("user",
self.metadata,
autoload=True)
for user in default_fixtures.USERS:
extra = copy.deepcopy(user)
extra.pop('id')
extra.pop('name')
if with_pass_enab:
password = extra.pop('password', None)
enabled = extra.pop('enabled', True)
ins = this_table.insert().values(
{'id': user['id'],
'name': user['name'],
'password': password,
'enabled': bool(enabled),
'extra': json.dumps(extra)})
else:
if with_pass_enab_domain:
password = extra.pop('password', None)
enabled = extra.pop('enabled', True)
extra.pop('domain_id')
ins = this_table.insert().values(
{'id': user['id'],
'name': user['name'],
'domain_id': user['domain_id'],
'password': password,
'enabled': bool(enabled),
'extra': json.dumps(extra)})
else:
ins = this_table.insert().values(
{'id': user['id'],
'name': user['name'],
'extra': json.dumps(extra)})
self.engine.execute(ins)
def populate_tenant_table(self, with_desc_enab=False,
with_desc_enab_domain=False):
# Populate the appropriate fields in the tenant or
# project table, depending on the parameters
#
# Default: id, name, extra
# desc_enab: Add description, enabled as well
# desc_enab_domain: Add description, enabled and domain as well,
# plus use project instead of tenant
#
if with_desc_enab_domain:
# By this time tenants are now projects
this_table = sqlalchemy.Table("project",
self.metadata,
autoload=True)
else:
this_table = sqlalchemy.Table("tenant",
self.metadata,
autoload=True)
for tenant in default_fixtures.TENANTS:
extra = copy.deepcopy(tenant)
extra.pop('id')
extra.pop('name')
if with_desc_enab:
desc = extra.pop('description', None)
enabled = extra.pop('enabled', True)
ins = this_table.insert().values(
{'id': tenant['id'],
'name': tenant['name'],
'description': desc,
'enabled': bool(enabled),
'extra': json.dumps(extra)})
else:
if with_desc_enab_domain:
desc = extra.pop('description', None)
enabled = extra.pop('enabled', True)
extra.pop('domain_id')
ins = this_table.insert().values(
{'id': tenant['id'],
'name': tenant['name'],
'domain_id': tenant['domain_id'],
'description': desc,
'enabled': bool(enabled),
'extra': json.dumps(extra)})
else:
ins = this_table.insert().values(
{'id': tenant['id'],
'name': tenant['name'],
'extra': json.dumps(extra)})
self.engine.execute(ins)
def _mysql_check_all_tables_innodb(self):
database = self.engine.url.database
connection = self.engine.connect()
# sanity check
total = connection.execute("SELECT count(*) "
"from information_schema.TABLES "
"where TABLE_SCHEMA='%(database)s'" %
dict(database=database))
self.assertTrue(total.scalar() > 0, "No tables found. Wrong schema?")
noninnodb = connection.execute("SELECT table_name "
"from information_schema.TABLES "
"where TABLE_SCHEMA='%(database)s' "
"and ENGINE!='InnoDB' "
"and TABLE_NAME!='migrate_version'" %
dict(database=database))
names = [x[0] for x in noninnodb]
self.assertEqual([], names,
"Non-InnoDB tables exist")
connection.close()
class VersionTests(SqlMigrateBase):
_initial_db_version = migrate_repo.DB_INIT_VERSION
def test_core_initial(self):
"""Get the version before migrated, it's the initial DB version."""
version = migration_helpers.get_db_version()
self.assertEqual(migrate_repo.DB_INIT_VERSION, version)
def test_core_max(self):
"""When get the version after upgrading, it's the new version."""
self.upgrade(self.max_version)
version = migration_helpers.get_db_version()
self.assertEqual(self.max_version, version)
def test_extension_not_controlled(self):
"""When get the version before controlling, raises DbMigrationError."""
self.assertRaises(db_exception.DbMigrationError,
migration_helpers.get_db_version,
extension='federation')
def test_extension_initial(self):
"""When get the initial version of an extension, it's 0."""
for name, extension in six.iteritems(EXTENSIONS):
abs_path = migration_helpers.find_migrate_repo(extension)
migration.db_version_control(sql.get_engine(), abs_path)
version = migration_helpers.get_db_version(extension=name)
self.assertEqual(0, version,
'Migrate version for %s is not 0' % name)
def test_extension_migrated(self):
"""When get the version after migrating an extension, it's not 0."""
for name, extension in six.iteritems(EXTENSIONS):
abs_path = migration_helpers.find_migrate_repo(extension)
migration.db_version_control(sql.get_engine(), abs_path)
migration.db_sync(sql.get_engine(), abs_path)
version = migration_helpers.get_db_version(extension=name)
self.assertTrue(
version > 0,
"Version for %s didn't change after migrated?" % name)
def test_extension_downgraded(self):
"""When get the version after downgrading an extension, it is 0."""
for name, extension in six.iteritems(EXTENSIONS):
abs_path = migration_helpers.find_migrate_repo(extension)
migration.db_version_control(sql.get_engine(), abs_path)
migration.db_sync(sql.get_engine(), abs_path)
version = migration_helpers.get_db_version(extension=name)
self.assertTrue(
version > 0,
"Version for %s didn't change after migrated?" % name)
migration.db_sync(sql.get_engine(), abs_path, version=0)
version = migration_helpers.get_db_version(extension=name)
self.assertEqual(0, version,
'Migrate version for %s is not 0' % name)
def test_unexpected_extension(self):
"""The version for an extension that doesn't exist raises ImportError.
"""
extension_name = uuid.uuid4().hex
self.assertRaises(ImportError,
migration_helpers.get_db_version,
extension=extension_name)
def test_unversioned_extension(self):
"""The version for extensions without migrations raise an exception.
"""
self.assertRaises(exception.MigrationNotProvided,
migration_helpers.get_db_version,
extension='admin_crud')
def test_initial_with_extension_version_None(self):
"""When performing a default migration, also migrate extensions."""
migration_helpers.sync_database_to_version(extension=None,
version=None)
for table in INITIAL_EXTENSION_TABLE_STRUCTURE:
self.assertTableColumns(table,
INITIAL_EXTENSION_TABLE_STRUCTURE[table])
def test_initial_with_extension_version_max(self):
"""When migrating to max version, do not migrate extensions."""
migration_helpers.sync_database_to_version(extension=None,
version=self.max_version)
for table in INITIAL_EXTENSION_TABLE_STRUCTURE:
self.assertTableDoesNotExist(table)
| apache-2.0 | -1,600,424,370,265,515,800 | 39.589468 | 79 | 0.541134 | false |
michal-ruzicka/archivematica | src/MCPClient/lib/clientScripts/moveDspaceMetsFilesToDSpaceMETS.py | 1 | 2718 | #!/usr/bin/python -OO
# This file is part of Archivematica.
#
# Copyright 2010-2013 Artefactual Systems Inc. <http://artefactual.com>
#
# Archivematica is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Archivematica is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Archivematica. If not, see <http://www.gnu.org/licenses/>.
# @package Archivematica
# @subpackage archivematicaClientScript
# @author Joseph Perry <[email protected]>
import os
import sys
import lxml.etree as etree
# archivematicaCommon
from custom_handlers import get_script_logger
from fileOperations import updateFileLocation
from fileOperations import renameAsSudo
def verifyMetsFileSecChecksums(metsFile, date, taskUUID, transferDirectory, transferUUID, relativeDirectory="./"):
print metsFile
DSpaceMets = "metadata/submissionDocumentation/DSpaceMets"
try:
path = os.path.join(transferDirectory, DSpaceMets)
if not os.path.isdir(path):
os.mkdir(path)
except:
print "error creating DSpaceMets directory."
exitCode = 0
metsDirectory = os.path.basename(os.path.dirname(metsFile))
if metsDirectory == "DSpace_export":
outputDirectory = path
else:
outputDirectory = os.path.join(path, metsDirectory)
if not os.path.isdir(outputDirectory):
os.mkdir(outputDirectory)
dest = os.path.join(outputDirectory, "mets.xml")
renameAsSudo(metsFile, dest)
src = metsFile.replace(transferDirectory, "%transferDirectory%")
dst = dest.replace(transferDirectory, "%transferDirectory%")
eventDetail = ""
eventOutcomeDetailNote = "moved from=\"" + src + "\"; moved to=\"" + dst + "\""
updateFileLocation(src, dst, "movement", date, eventDetail, transferUUID=transferUUID, eventOutcomeDetailNote = eventOutcomeDetailNote)
return exitCode
if __name__ == '__main__':
logger = get_script_logger("archivematica.mcp.client.moveDspaceMetsFilesToDSpaceMETS")
metsFile = sys.argv[1]
date = sys.argv[2]
taskUUID = sys.argv[3]
transferDirectory = sys.argv[4]
transferUUID = sys.argv[5]
ret = verifyMetsFileSecChecksums(metsFile, date, taskUUID, transferDirectory, transferUUID, relativeDirectory=os.path.dirname(metsFile) + "/")
quit(ret)
| agpl-3.0 | -4,781,895,504,976,434,000 | 35.24 | 146 | 0.729581 | false |
stevearc/pyramid_duh | pyramid_duh/view.py | 1 | 5396 | """ Utilities for view configuration """
import fnmatch
import re
import functools
import inspect
import six
from pyramid.httpexceptions import HTTPFound
from .params import is_request
def match(pattern, path, flags):
"""
Check if a pattern matches a path
Parameters
----------
pattern : str
Glob or PCRE
path : str or None
The path to check, or None if no path
flags : {'r', 'i', 'a', '?'}
Special match flags. These may be combined (e.g. 'ri?'). See the notes
for an explanation of the different values.
Returns
-------
match : bool or SRE_Match
A boolean indicating the match status, or the regex match object if
there was a successful PCRE match.
Notes
-----
==== ==============================================
Flag Description
==== ==============================================
r Match using PCRE (default glob)
i Case-insensitive match (must be used with 'r')
a ASCII-only match (must be used with 'r', python 3 only)
? Path is optional (return True if path is None)
==== ==============================================
"""
if path is None:
if '?' in flags:
return True
else:
return False
if 'r' in flags:
re_flags = 0
for char in flags:
if char == 'i':
re_flags |= re.I
elif char == 'a' and hasattr(re, 'A'): # pragma: no cover
re_flags |= re.A # pylint: disable=E1101
return re.match('^%s$' % pattern, path, re_flags)
else:
return fnmatch.fnmatchcase(path, pattern)
class SubpathPredicate(object):
"""
Generate a custom predicate that matches subpaths
Parameters
----------
*paths : list
List of match specs.
Notes
-----
A match spec may take one of three forms:
.. code-block:: python
'glob'
'name/glob'
'name/glob/flags'
The name is optional, but if you wish to specify flags then you have to
include the leading slash:
.. code-block:: python
# A match spec with flags and no name
'/foo.*/r'
The names will be accessible from the ``request.named_subpaths`` attribute.
.. code-block:: python
@view_config(context=Root, name='simple', subpath=('package/*', 'version/*/?'))
def simple(request)
pkg = request.named_subpaths['package']
version = request.named_subpaths.get('version')
request.response.body = '<h1>%s</h1>' % package
if version is not None:
request.response.body += '<h4>version: %s</h4>' % version
return request.response
See :meth:`.match` for more information on match flags`
"""
def __init__(self, paths, config):
if isinstance(paths, six.string_types):
paths = (paths,)
self.paths = paths
self.config = config
def text(self):
""" Display name """
return 'subpath = %s' % (self.paths,)
phash = text
def __call__(self, context, request):
named_subpaths = {}
if len(request.subpath) > len(self.paths):
return False
for i in range(len(self.paths)):
spec = self.paths[i]
pieces = spec.split('/', 2)
if len(pieces) == 1:
name, pattern, flags = None, pieces[0], ''
elif len(pieces) == 2:
name, pattern, flags = pieces[0], pieces[1], ''
else:
name, pattern, flags = pieces
if i < len(request.subpath):
path = request.subpath[i]
else:
path = None
result = match(pattern, path, flags)
if not result:
return False
if name and path is not None:
named_subpaths[name] = path
if hasattr(result, 'groupdict'):
named_subpaths.update(result.groupdict())
request.named_subpaths = named_subpaths
return True
def addslash(fxn):
"""
View decorator that adds a trailing slash
Notes
-----
Usage:
.. code-block:: python
@view_config(context=MyCtxt, renderer='json')
@addslash
def do_view(request):
return 'cool data'
"""
argspec = inspect.getargspec(fxn)
@functools.wraps(fxn)
def slash_redirect(*args, **kwargs):
""" Perform the redirect or pass though to view """
# pyramid always calls with (context, request) arguments
if len(args) == 2 and is_request(args[1]):
request = args[1]
if not request.path_url.endswith('/'):
new_url = request.path_url + '/'
if request.query_string:
new_url += '?' + request.query_string
return HTTPFound(location=new_url)
if len(argspec.args) == 1 and argspec.varargs is None:
return fxn(request)
else:
return fxn(*args)
else:
# Otherwise, it's likely a unit test. Don't change anything.
return fxn(*args, **kwargs)
return slash_redirect
def includeme(config):
""" Add the custom view predicates """
config.add_view_predicate('subpath', SubpathPredicate)
| mit | 1,096,755,703,211,089,400 | 27.251309 | 87 | 0.532061 | false |
diegojromerolopez/djanban | src/djanban/apps/repositories/migrations/0003_auto_20160921_1748.py | 1 | 1075 | # -*- coding: utf-8 -*-
# Generated by Django 1.10 on 2016-09-21 15:48
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('repositories', '0002_auto_20160911_1601'),
]
operations = [
migrations.AddField(
model_name='gitlabrepository',
name='ci_token',
field=models.CharField(default='', help_text='CI token used to clone and checkout the repository', max_length=128, verbose_name='CI token for the repository'),
preserve_default=False,
),
migrations.AddField(
model_name='gitlabrepository',
name='project_userspace',
field=models.CharField(default='', max_length=128, verbose_name='Project userspace'),
preserve_default=False,
),
migrations.AlterField(
model_name='gitlabrepository',
name='project_name',
field=models.CharField(max_length=128, verbose_name='Project name'),
),
]
| mit | -2,720,929,637,803,019,000 | 32.59375 | 171 | 0.610233 | false |
FrodeSolheim/fs-uae-launcher | launcher/ui/imports/ImportTask.py | 1 | 3835 | import os
import shutil
import threading
import traceback
import fsui
from fsbc.path import is_same_file
from fsgamesys.amiga.rommanager import ROMManager
from fsgamesys.filedatabase import FileDatabase
from fsgamesys.FSGSDirectories import FSGSDirectories
from ...i18n import gettext
from ...launcher_signal import LauncherSignal
class ImportTask(threading.Thread):
AMIGA_FOREVER = 1
def __init__(self, path, import_type):
threading.Thread.__init__(self, name="ImportTaskThread")
self.path = path
self.type = import_type
self.done = False
self.log_lines = []
self.log_lock = threading.Lock()
def run(self):
if self.type:
self.log(gettext("Amiga Forever import task started"))
else:
self.log(gettext("Kickstart import task started"))
self.log("")
print("ImportTask.run")
try:
count = self.run_task()
except Exception as e:
self.log("")
self.log(repr(e))
traceback.print_exc()
else:
if count == 0:
self.log(gettext("Warning: No ROM files were found!"))
else:
self.log("")
self.log(gettext("{} ROM files were imported").format(count))
self.done = True
print("ImportTask.run is done")
self.log("")
self.log(gettext("Import task is done"))
def run_in_main():
LauncherSignal.broadcast("scan_done")
fsui.call_after(run_in_main)
def get_new_log_lines(self, count):
with self.log_lock:
if len(self.log_lines) <= count:
return []
return self.log_lines[count:]
def log(self, line):
with self.log_lock:
self.log_lines.append(line)
def run_task(self):
if self.type == 0:
return self.import_roms()
elif self.type == 1:
return self.import_amiga_forever()
def import_roms(self):
return self.copy_roms(self.path, FSGSDirectories.get_kickstarts_dir())
def import_amiga_forever(self):
return self.copy_roms(
os.path.join(self.path, "Amiga Files", "Shared", "rom"),
FSGSDirectories.get_kickstarts_dir(),
)
def copy_file(self, src, dst):
with self.log_lock:
self.log_lines.append(gettext("Copy {0}\nto {1}").format(src, dst))
if is_same_file(src, dst):
self.log_lines.append(
"- source and destination are the same, skipping..."
)
return
if not os.path.exists(os.path.dirname(dst)):
os.makedirs(os.path.dirname(dst))
if os.path.exists(dst):
# try to remove the file first, in case the file has read-only
# permissions
try:
os.remove(dst)
except Exception:
pass
shutil.copy(src, dst)
def copy_roms(self, src, dst):
count = 0
if not os.path.isdir(src):
self.log("{0} is not a directory".format(src))
return count
src_file = os.path.join(src, "rom.key")
if os.path.exists(src_file):
dst_file = os.path.join(dst, "rom.key")
self.copy_file(src_file, dst_file)
for file_name in os.listdir(src):
name, ext = os.path.splitext(file_name)
if ext not in [".rom"]:
continue
src_file = os.path.join(src, file_name)
dst_file = os.path.join(dst, file_name)
self.copy_file(src_file, dst_file)
database = FileDatabase.get_instance()
ROMManager.add_rom_to_database(dst_file, database, self.log)
database.commit()
count += 1
return count
| gpl-2.0 | 5,107,383,758,132,274,000 | 30.694215 | 79 | 0.557497 | false |
smallyear/linuxLearn | salt/salt/modules/groupadd.py | 1 | 6379 | # -*- coding: utf-8 -*-
'''
Manage groups on Linux, OpenBSD and NetBSD
'''
# Import python libs
from __future__ import absolute_import
import logging
try:
import grp
except ImportError:
pass
log = logging.getLogger(__name__)
# Define the module's virtual name
__virtualname__ = 'group'
def __virtual__():
'''
Set the user module if the kernel is Linux or OpenBSD
'''
if __grains__['kernel'] in ('Linux', 'OpenBSD', 'NetBSD'):
return __virtualname__
return False
def add(name, gid=None, system=False):
'''
Add the specified group
CLI Example:
.. code-block:: bash
salt '*' group.add foo 3456
'''
cmd = 'groupadd '
if gid:
cmd += '-g {0} '.format(gid)
if system and __grains__['kernel'] != 'OpenBSD':
cmd += '-r '
cmd += name
ret = __salt__['cmd.run_all'](cmd, python_shell=False)
return not ret['retcode']
def delete(name):
'''
Remove the named group
CLI Example:
.. code-block:: bash
salt '*' group.delete foo
'''
ret = __salt__['cmd.run_all']('groupdel {0}'.format(name), python_shell=False)
return not ret['retcode']
def info(name):
'''
Return information about a group
CLI Example:
.. code-block:: bash
salt '*' group.info foo
'''
try:
grinfo = grp.getgrnam(name)
except KeyError:
return {}
else:
return _format_info(grinfo)
def _format_info(data):
'''
Return formatted information in a pretty way.
'''
return {'name': data.gr_name,
'passwd': data.gr_passwd,
'gid': data.gr_gid,
'members': data.gr_mem}
def getent(refresh=False):
'''
Return info on all groups
CLI Example:
.. code-block:: bash
salt '*' group.getent
'''
if 'group.getent' in __context__ and not refresh:
return __context__['group.getent']
ret = []
for grinfo in grp.getgrall():
ret.append(_format_info(grinfo))
__context__['group.getent'] = ret
return ret
def chgid(name, gid):
'''
Change the gid for a named group
CLI Example:
.. code-block:: bash
salt '*' group.chgid foo 4376
'''
pre_gid = __salt__['file.group_to_gid'](name)
if gid == pre_gid:
return True
cmd = 'groupmod -g {0} {1}'.format(gid, name)
__salt__['cmd.run'](cmd, python_shell=False)
post_gid = __salt__['file.group_to_gid'](name)
if post_gid != pre_gid:
return post_gid == gid
return False
def adduser(name, username):
'''
Add a user in the group.
CLI Example:
.. code-block:: bash
salt '*' group.adduser foo bar
Verifies if a valid username 'bar' as a member of an existing group 'foo',
if not then adds it.
'''
on_redhat_5 = __grains__.get('os_family') == 'RedHat' and __grains__.get('osmajorrelease') == '5'
if __grains__['kernel'] == 'Linux':
if on_redhat_5:
cmd = 'gpasswd -a {0} {1}'.format(username, name)
else:
cmd = 'gpasswd --add {0} {1}'.format(username, name)
else:
cmd = 'usermod -G {0} {1}'.format(name, username)
retcode = __salt__['cmd.retcode'](cmd, python_shell=False)
return not retcode
def deluser(name, username):
'''
Remove a user from the group.
CLI Example:
.. code-block:: bash
salt '*' group.deluser foo bar
Removes a member user 'bar' from a group 'foo'. If group is not present
then returns True.
'''
on_redhat_5 = __grains__.get('os_family') == 'RedHat' and __grains__.get('osmajorrelease') == '5'
grp_info = __salt__['group.info'](name)
try:
if username in grp_info['members']:
if __grains__['kernel'] == 'Linux':
if on_redhat_5:
cmd = 'gpasswd -d {0} {1}'.format(username, name)
else:
cmd = 'gpasswd --del {0} {1}'.format(username, name)
retcode = __salt__['cmd.retcode'](cmd, python_shell=False)
elif __grains__['kernel'] == 'OpenBSD':
out = __salt__['cmd.run_stdout']('id -Gn {0}'.format(username),
python_shell=False)
cmd = 'usermod -S '
cmd += ','.join([g for g in out.split() if g != str(name)])
cmd += ' {0}'.format(username)
retcode = __salt__['cmd.retcode'](cmd, python_shell=False)
else:
log.error('group.deluser is not yet supported on this platform')
return False
return not retcode
else:
return True
except Exception:
return True
def members(name, members_list):
'''
Replaces members of the group with a provided list.
CLI Example:
salt '*' group.members foo 'user1,user2,user3,...'
Replaces a membership list for a local group 'foo'.
foo:x:1234:user1,user2,user3,...
'''
on_redhat_5 = __grains__.get('os_family') == 'RedHat' and __grains__.get('osmajorrelease') == '5'
if __grains__['kernel'] == 'Linux':
if on_redhat_5:
cmd = 'gpasswd -M {0} {1}'.format(members_list, name)
else:
cmd = 'gpasswd --members {0} {1}'.format(members_list, name)
retcode = __salt__['cmd.retcode'](cmd, python_shell=False)
elif __grains__['kernel'] == 'OpenBSD':
retcode = 1
grp_info = __salt__['group.info'](name)
if grp_info and name in grp_info['name']:
__salt__['cmd.run']('groupdel {0}'.format(name),
python_shell=False)
__salt__['cmd.run']('groupadd -g {0} {1}'.format(
grp_info['gid'], name), python_shell=False)
for user in members_list.split(","):
if user:
retcode = __salt__['cmd.retcode'](
'usermod -G {0} {1}'.format(name, user),
python_shell=False)
if not retcode == 0:
break
# provided list is '': users previously deleted from group
else:
retcode = 0
else:
log.error('group.members is not yet supported on this platform')
return False
return not retcode
| apache-2.0 | -5,202,034,855,072,395,000 | 24.930894 | 101 | 0.525474 | false |
phtagn/sickbeard_mp4_automator | readSettings.py | 1 | 33044 | import os
import sys
import locale
try:
import configparser
except ImportError:
import ConfigParser as configparser
import logging
from extensions import *
from babelfish import Language
import languagecode
class ReadSettings:
def __init__(self, directory, filename, logger=None):
# Setup logging
if logger:
log = logger
else:
log = logging.getLogger(__name__)
# Setup encoding to avoid UTF-8 errors
if sys.version[0] == '2':
SYS_ENCODING = None
try:
locale.setlocale(locale.LC_ALL, "")
SYS_ENCODING = locale.getpreferredencoding()
except (locale.Error, IOError):
pass
# For OSes that are poorly configured just force UTF-8
if not SYS_ENCODING or SYS_ENCODING in ('ANSI_X3.4-1968', 'US-ASCII', 'ASCII'):
SYS_ENCODING = 'UTF-8'
if not hasattr(sys, "setdefaultencoding"):
reload(sys)
try:
# pylint: disable=E1101
# On non-unicode builds this will raise an AttributeError, if encoding type is not valid it throws a LookupError
sys.setdefaultencoding(SYS_ENCODING)
except:
log.exception("Sorry, your environment is not setup correctly for utf-8 support. Please fix your setup and try again")
sys.exit("Sorry, your environment is not setup correctly for utf-8 support. Please fix your setup and try again")
log.info(sys.executable)
# Default settings for SickBeard
sb_defaults = {'host': 'localhost',
'port': '8081',
'ssl': "False",
'api_key': '',
'web_root': '',
'username': '',
'password': ''}
# Default MP4 conversion settings
mp4_defaults = {'ffmpeg': 'ffmpeg.exe',
'ffprobe': 'ffprobe.exe',
'threads': 'auto',
'output_directory': '',
'copy_to': '',
'move_to': '',
'output_extension': 'mp4',
'output_format': 'mp4',
'delete_original': 'True',
'relocate_moov': 'True',
'ios-audio': 'True',
'ios-first-track-only': 'False',
'ios-move-last': 'False',
'ios-audio-filter': '',
'max-audio-channels': '',
'audio-language': '',
'audio-default-language': '',
'audio-codec': 'ac3',
'audio-filter': '',
'audio-channel-bitrate': '256',
'audio-copy-original': 'False',
'video-codec': 'h264, x264',
'video-bitrate': '',
'video-crf': '',
'video-max-width': '',
'video-profile': '',
'h264-max-level': '',
'aac_adtstoasc': 'False',
'use-qsv-decoder-with-encoder': 'True',
'use-hevc-qsv-decoder': 'False',
'enable_dxva2_gpu_decode': 'False',
'subtitle-codec': 'mov_text',
'subtitle-language': '',
'subtitle-default-language': '',
'subtitle-encoding': '',
'convert-mp4': 'False',
'fullpathguess': 'True',
'tagfile': 'True',
'tag-language': 'en',
'download-artwork': 'poster',
'download-subs': 'False',
'embed-subs': 'True',
'embed-only-internal-subs': 'False',
'sub-providers': 'addic7ed, podnapisi, thesubdb, opensubtitles',
'permissions': '777',
'post-process': 'False',
'pix-fmt': '',
'preopts': '',
'postopts': ''}
# Default settings for CouchPotato
cp_defaults = {'host': 'localhost',
'port': '5050',
'username': '',
'password': '',
'apikey': '',
'delay': '65',
'method': 'renamer',
'delete_failed': 'False',
'ssl': 'False',
'web_root': ''}
# Default settings for Sonarr
sonarr_defaults = {'host': 'localhost',
'port': '8989',
'apikey': '',
'ssl': 'False',
'web_root': ''}
# Default settings for Radarr
radarr_defaults = {'host': 'localhost',
'port': '7878',
'apikey': '',
'ssl': 'False',
'web_root': ''}
# Default uTorrent settings
utorrent_defaults = {'couchpotato-label': 'couchpotato',
'sickbeard-label': 'sickbeard',
'sickrage-label': 'sickrage',
'sonarr-label': 'sonarr',
'radarr-label': 'radarr',
'bypass-label': 'bypass',
'convert': 'True',
'webui': 'False',
'action_before': 'stop',
'action_after': 'removedata',
'host': 'http://localhost:8080/',
'username': '',
'password': '',
'output_directory': ''}
# Default SAB settings
sab_defaults = {'convert': 'True',
'Sickbeard-category': 'sickbeard',
'Sickrage-category': 'sickrage',
'Couchpotato-category': 'couchpotato',
'Sonarr-category': 'sonarr',
'Radarr-category': 'radarr',
'Bypass-category': 'bypass',
'output_directory': ''}
# Default Sickrage Settings
sr_defaults = {'host': 'localhost',
'port': '8081',
'ssl': "False",
'api_key': '',
'web_root': '',
'username': '',
'password': ''}
# Default deluge settings
deluge_defaults = {'couchpotato-label': 'couchpotato',
'sickbeard-label': 'sickbeard',
'sickrage-label': 'sickrage',
'sonarr-label': 'sonarr',
'radarr-label': 'radarr',
'bypass-label': 'bypass',
'convert': 'True',
'host': 'localhost',
'port': '58846',
'username': '',
'password': '',
'output_directory': '',
'remove': 'false'}
# Default Plex Settings
plex_defaults = {'host': 'localhost',
'port': '32400',
'refresh': 'true',
'token': ''}
defaults = {'SickBeard': sb_defaults, 'CouchPotato': cp_defaults, 'Sonarr': sonarr_defaults, 'Radarr': radarr_defaults, 'MP4': mp4_defaults, 'uTorrent': utorrent_defaults, 'SABNZBD': sab_defaults, 'Sickrage': sr_defaults, 'Deluge': deluge_defaults, 'Plex': plex_defaults}
write = False # Will be changed to true if a value is missing from the config file and needs to be written
config = configparser.SafeConfigParser()
configFile = os.path.join(directory, filename)
if os.path.isfile(configFile):
config.read(configFile)
else:
log.error("Config file not found, creating %s." % configFile)
# config.filename = filename
write = True
# Make sure all sections and all keys for each section are present
for s in defaults:
if not config.has_section(s):
config.add_section(s)
write = True
for k in defaults[s]:
if not config.has_option(s, k):
config.set(s, k, defaults[s][k])
write = True
# If any keys are missing from the config file, write them
if write:
self.writeConfig(config, configFile)
# Read relevant MP4 section information
section = "MP4"
self.ffmpeg = os.path.normpath(self.raw(config.get(section, "ffmpeg"))) # Location of FFMPEG.exe
self.ffprobe = os.path.normpath(self.raw(config.get(section, "ffprobe"))) # Location of FFPROBE.exe
self.threads = config.get(section, "threads") # Number of FFMPEG threads
try:
if int(self.threads) < 1:
self.threads = "auto"
except:
self.threads = "auto"
self.output_dir = config.get(section, "output_directory")
if self.output_dir == '':
self.output_dir = None
else:
self.output_dir = os.path.normpath(self.raw(self.output_dir)) # Output directory
self.copyto = config.get(section, "copy_to") # Directories to make copies of the final product
if self.copyto == '':
self.copyto = None
else:
self.copyto = self.copyto.split('|')
for i in range(len(self.copyto)):
self.copyto[i] = os.path.normpath(self.copyto[i])
if not os.path.isdir(self.copyto[i]):
try:
os.makedirs(self.copyto[i])
except:
log.exception("Error making directory %s." % (self.copyto[i]))
self.moveto = config.get(section, "move_to") # Directory to move final product to
if self.moveto == '':
self.moveto = None
else:
self.moveto = os.path.normpath(self.moveto)
if not os.path.isdir(self.moveto):
try:
os.makedirs(self.moveto)
except:
log.exception("Error making directory %s." % (self.moveto))
self.moveto = None
self.output_extension = config.get(section, "output_extension") # Output extension
self.output_format = config.get(section, "output_format") # Output format
if self.output_format not in valid_formats:
self.output_format = 'mov'
self.delete = config.getboolean(section, "delete_original") # Delete original file
self.relocate_moov = config.getboolean(section, "relocate_moov") # Relocate MOOV atom to start of file
if self.relocate_moov:
try:
import qtfaststart
except:
log.error("Please install QTFastStart via PIP, relocate_moov will be disabled without this module.")
self.relocate_moov = False
self.acodec = config.get(section, "audio-codec").lower() # Gets the desired audio codec, if no valid codec selected, default to AC3
if self.acodec == '':
self.acodec == ['ac3']
else:
self.acodec = self.acodec.lower().replace(' ', '').split(',')
self.abitrate = config.get(section, "audio-channel-bitrate")
try:
self.abitrate = int(self.abitrate)
except:
self.abitrate = 256
log.warning("Audio bitrate was invalid, defaulting to 256 per channel.")
if self.abitrate > 256:
log.warning("Audio bitrate >256 may create errors with common codecs.")
self.audio_copyoriginal = config.getboolean(section, "audio-copy-original") # Copies the original audio track regardless of format if a converted track is being generated
self.afilter = config.get(section, "audio-filter").lower().strip() # Audio filter
if self.afilter == '':
self.afilter = None
self.iOS = config.get(section, "ios-audio") # Creates a second audio channel if the standard output methods are different from this for iOS compatability
if self.iOS == "" or self.iOS.lower() in ['false', 'no', 'f', '0']:
self.iOS = False
else:
if self.iOS.lower() in ['true', 'yes', 't', '1']:
self.iOS = ['aac']
else:
self.iOS = self.iOS.lower().replace(' ', '').split(',')
self.iOSFirst = config.getboolean(section, "ios-first-track-only") # Enables the iOS audio option only for the first track
self.iOSLast = config.getboolean(section, "ios-move-last") # Moves the iOS audio track to the last in the series of tracks
self.iOSfilter = config.get(section, "ios-audio-filter").lower().strip() # iOS audio filter
if self.iOSfilter == '':
self.iOSfilter = None
self.downloadsubs = config.getboolean(section, "download-subs") # Enables downloading of subtitles from the internet sources using subliminal
if self.downloadsubs:
try:
import subliminal
except Exception as e:
self.downloadsubs = False
log.exception("Subliminal is not installed, automatically downloading of subs has been disabled.")
self.subproviders = config.get(section, 'sub-providers').lower()
if self.subproviders == '':
self.downloadsubs = False
log.warning("You must specifiy at least one subtitle provider to downlaod subs automatically, subtitle downloading disabled.")
else:
self.subproviders = self.subproviders.lower().replace(' ', '').split(',')
self.embedsubs = config.getboolean(section, 'embed-subs')
self.embedonlyinternalsubs = config.getboolean(section, 'embed-only-internal-subs')
self.permissions = config.get(section, 'permissions')
try:
self.permissions = int(self.permissions, 8)
except:
log.exception("Invalid permissions, defaulting to 777.")
self.permissions = int("0777", 8)
try:
self.postprocess = config.getboolean(section, 'post-process')
except:
self.postprocess = False
self.aac_adtstoasc = config.getboolean(section, 'aac_adtstoasc')
# Setup variable for maximum audio channels
self.maxchannels = config.get(section, 'max-audio-channels')
if self.maxchannels == "":
self.maxchannels = None
else:
try:
self.maxchannels = int(self.maxchannels)
except:
log.exception("Invalid number of audio channels specified.")
self.maxchannels = None
if self.maxchannels is not None and self.maxchannels < 1:
log.warning("Must have at least 1 audio channel.")
self.maxchannels = None
self.vcodec = config.get(section, "video-codec")
if self.vcodec == '':
self.vcodec == ['h264', 'x264']
else:
self.vcodec = self.vcodec.lower().replace(' ', '').split(',')
self.vbitrate = config.get(section, "video-bitrate")
if self.vbitrate == '':
self.vbitrate = None
else:
try:
self.vbitrate = int(self.vbitrate)
if not (self.vbitrate > 0):
self.vbitrate = None
log.warning("Video bitrate must be greater than 0, defaulting to no video bitrate cap.")
except:
log.exception("Invalid video bitrate, defaulting to no video bitrate cap.")
self.vbitrate = None
self.vcrf = config.get(section, "video-crf")
if self.vcrf == '':
self.vcrf = None
else:
try:
self.vcrf = int(self.vcrf)
except:
log.exception("Invalid CRF setting, defaulting to none.")
self.vcrf = None
self.vwidth = config.get(section, "video-max-width")
if self.vwidth == '':
self.vwidth = None
else:
try:
self.vwidth = int(self.vwidth)
except:
log.exception("Invalid video width, defaulting to none.")
self.vwidth = None
self.h264_level = config.get(section, "h264-max-level")
if self.h264_level == '':
self.h264_level = None
else:
try:
self.h264_level = float(self.h264_level)
except:
log.exception("Invalid h264 level, defaulting to none.")
self.h264_level = None
self.vprofile = config.get(section, "video-profile")
if self.vprofile == '':
self.vprofile = None
else:
self.vprofile = self.vprofile.lower().strip().replace(' ', '').split(',')
self.qsv_decoder = config.getboolean(section, "use-qsv-decoder-with-encoder") # Use Intel QuickSync Decoder when using QuickSync Encoder
self.hevc_qsv_decoder = config.getboolean( section, "use-hevc-qsv-decoder") #only supported on 6th gen intel and up.
self.dxva2_decoder = config.getboolean( section, "enable_dxva2_gpu_decode" )
self.pix_fmt = config.get(section, "pix-fmt").strip().lower()
if self.pix_fmt == '':
self.pix_fmt = None
else:
self.pix_fmt = self.pix_fmt.lower().replace(' ', '').split(',')
self.awl = config.get(section, 'audio-language').strip().lower() # List of acceptable languages for audio streams to be carried over from the original file, separated by a comma. Blank for all
self.awl = languagecode.validateLangCode(self.awl.replace(' ', '').split(','))
self.scodec = config.get(section, 'subtitle-codec').strip().lower()
if not self.scodec or self.scodec == "":
if self.embedsubs:
self.scodec = ['mov_text']
else:
self.scodec = ['srt']
log.warning("Invalid subtitle codec, defaulting to '%s'." % self.scodec)
else:
self.scodec = self.scodec.replace(' ', '').split(',')
if self.embedsubs:
if len(self.scodec) > 1:
log.warning("Can only embed one subtitle type, defaulting to 'mov_text'.")
self.scodec = ['mov_text']
if self.scodec[0] not in valid_internal_subcodecs:
log.warning("Invalid interal subtitle codec %s, defaulting to 'mov_text'." % self.scodec[0])
self.scodec = ['mov_text']
else:
for codec in self.scodec:
if codec not in valid_external_subcodecs:
log.warning("Invalid external subtitle codec %s, ignoring." % codec)
self.scodec.remove(codec)
if len(self.scodec) == 0:
log.warning("No valid subtitle formats found, defaulting to 'srt'.")
self.scodec = ['srt']
self.swl = config.get(section, 'subtitle-language').strip().lower() # List of acceptable languages for subtitle streams to be carried over from the original file, separated by a comma. Blank for all
self.swl = languagecode.validateLangCode(self.swl.replace(' ', '').split(','))
self.subencoding = config.get(section, 'subtitle-encoding').strip().lower()
if self.subencoding == '':
self.subencoding = None
self.adl = languagecode.validateLangCode(config.get(section, 'audio-default-language')) # What language to default an undefined audio language tag to. If blank, it will remain undefined. This is useful for single language releases which tend to leave things tagged as und
self.sdl = languagecode.validateLangCode(config.get(section, 'subtitle-default-language')) # What language to default an undefined subtitle language tag to. If blank, it will remain undefined. This is useful for single language releases which tend to leave things tagged as und
# Prevent incompatible combination of settings
if self.output_dir == "" and self.delete is False:
log.error("You must specify an alternate output directory if you aren't going to delete the original file.")
sys.exit()
# Create output directory if it does not exist
if self.output_dir is not None:
if not os.path.isdir(self.output_dir):
os.makedirs(self.output_dir)
self.processMP4 = config.getboolean(section, "convert-mp4") # Determine whether or not to reprocess mp4 files or just tag them
self.fullpathguess = config.getboolean(section, "fullpathguess") # Guess using the full path or not
self.tagfile = config.getboolean(section, "tagfile") # Tag files with metadata
self.taglanguage = config.get(section, "tag-language").strip().lower() # Language to tag files
if len(self.taglanguage) > 2:
try:
babel = Language(self.taglanguage)
self.taglanguage = babel.alpha2
except:
log.exception("Unable to set tag language, defaulting to English.")
self.taglanguage = 'en'
elif len(self.taglanguage) < 2:
log.exception("Unable to set tag language, defaulting to English.")
self.taglanguage = 'en'
self.artwork = config.get(section, "download-artwork").lower() # Download and embed artwork
if self.artwork == "poster":
self.artwork = True
self.thumbnail = False
elif self.artwork == "thumb" or self.artwork == "thumbnail":
self.artwork = True
self.thumbnail = True
else:
self.thumbnail = False
try:
self.artwork = config.getboolean(section, "download-artwork")
except:
self.artwork = True
log.error("Invalid download-artwork value, defaulting to 'poster'.")
self.preopts = config.get(section, "preopts").lower()
if self.preopts == '':
self.preopts = None
else:
self.preopts = self.preopts.split(',')
[o.strip() for o in self.preopts]
self.postopts = config.get(section, "postopts").lower()
if self.postopts == '':
self.postopts = None
else:
self.postopts = self.postopts.split(',')
[o.strip() for o in self.postopts]
# Read relevant CouchPotato section information
section = "CouchPotato"
self.CP = {}
self.CP['host'] = config.get(section, "host")
self.CP['port'] = config.get(section, "port")
self.CP['username'] = config.get(section, "username")
self.CP['password'] = config.get(section, "password")
self.CP['apikey'] = config.get(section, "apikey")
self.CP['delay'] = config.get(section, "delay")
self.CP['method'] = config.get(section, "method")
self.CP['web_root'] = config.get(section, "web_root")
try:
self.CP['delay'] = float(self.CP['delay'])
except ValueError:
self.CP['delay'] = 60
try:
self.CP['delete_failed'] = config.getboolean(section, "delete_failed")
except (configparser.NoOptionError, ValueError):
self.CP['delete_failed'] = False
try:
if config.getboolean(section, 'ssl'):
self.CP['protocol'] = "https://"
else:
self.CP['protocol'] = "http://"
except (configparser.NoOptionError, ValueError):
self.CP['protocol'] = "http://"
# Read relevant uTorrent section information
section = "uTorrent"
self.uTorrent = {}
self.uTorrent['cp'] = config.get(section, "couchpotato-label").lower()
self.uTorrent['sb'] = config.get(section, "sickbeard-label").lower()
self.uTorrent['sr'] = config.get(section, "sickrage-label").lower()
self.uTorrent['sonarr'] = config.get(section, "sonarr-label").lower()
self.uTorrent['radarr'] = config.get(section, "radarr-label").lower()
self.uTorrent['bypass'] = config.get(section, "bypass-label").lower()
try:
self.uTorrent['convert'] = config.getboolean(section, "convert")
except:
self.uTorrent['convert'] = False
self.uTorrent['output_dir'] = config.get(section, "output_directory")
if self.uTorrent['output_dir'] == '':
self.uTorrent['output_dir'] = None
else:
self.uTorrent['output_dir'] = os.path.normpath(self.raw(self.uTorrent['output_dir'])) # Output directory
self.uTorrentWebUI = config.getboolean(section, "webui")
self.uTorrentActionBefore = config.get(section, "action_before").lower()
self.uTorrentActionAfter = config.get(section, "action_after").lower()
self.uTorrentHost = config.get(section, "host").lower()
self.uTorrentUsername = config.get(section, "username")
self.uTorrentPassword = config.get(section, "password")
# Read relevant Deluge section information
section = "Deluge"
self.deluge = {}
self.deluge['cp'] = config.get(section, "couchpotato-label").lower()
self.deluge['sb'] = config.get(section, "sickbeard-label").lower()
self.deluge['sr'] = config.get(section, "sickrage-label").lower()
self.deluge['sonarr'] = config.get(section, "sonarr-label").lower()
self.deluge['radarr'] = config.get(section, "radarr-label").lower()
self.deluge['bypass'] = config.get(section, "bypass-label").lower()
try:
self.deluge['convert'] = config.getboolean(section, "convert")
except:
self.deluge['convert'] = False
self.deluge['host'] = config.get(section, "host").lower()
self.deluge['port'] = config.get(section, "port")
self.deluge['user'] = config.get(section, "username")
self.deluge['pass'] = config.get(section, "password")
self.deluge['output_dir'] = config.get(section, "output_directory")
self.deluge['remove'] = config.getboolean(section, "remove")
if self.deluge['output_dir'] == '':
self.deluge['output_dir'] = None
else:
self.deluge['output_dir'] = os.path.normpath(self.raw(self.deluge['output_dir'])) # Output directory
# Read relevant Sonarr section information
section = "Sonarr"
self.Sonarr = {}
self.Sonarr['host'] = config.get(section, "host")
self.Sonarr['port'] = config.get(section, "port")
self.Sonarr['apikey'] = config.get(section, "apikey")
self.Sonarr['ssl'] = config.get(section, "ssl")
self.Sonarr['web_root'] = config.get(section, "web_root")
if not self.Sonarr['web_root'].startswith("/"):
self.Sonarr['web_root'] = "/" + self.Sonarr['web_root']
if self.Sonarr['web_root'].endswith("/"):
self.Sonarr['web_root'] = self.Sonarr['web_root'][:-1]
# Read relevant Radarr section information
section = "Radarr"
self.Radarr = {}
self.Radarr['host'] = config.get(section, "host")
self.Radarr['port'] = config.get(section, "port")
self.Radarr['apikey'] = config.get(section, "apikey")
self.Radarr['ssl'] = config.get(section, "ssl")
self.Radarr['web_root'] = config.get(section, "web_root")
if not self.Radarr['web_root'].startswith("/"):
self.Radarr['web_root'] = "/" + self.Radarr['web_root']
if self.Radarr['web_root'].endswith("/"):
self.Radarr['web_root'] = self.Radarr['web_root'][:-1]
# Read Sickbeard section information
section = "SickBeard"
self.Sickbeard = {}
self.Sickbeard['host'] = config.get(section, "host") # Server Address
self.Sickbeard['port'] = config.get(section, "port") # Server Port
self.Sickbeard['api_key'] = config.get(section, "api_key") # Sickbeard API key
self.Sickbeard['web_root'] = config.get(section, "web_root") # Sickbeard webroot
self.Sickbeard['ssl'] = config.getboolean(section, "ssl") # SSL
self.Sickbeard['user'] = config.get(section, "username")
self.Sickbeard['pass'] = config.get(section, "password")
# Read Sickrage section information
section = "Sickrage"
self.Sickrage = {}
self.Sickrage['host'] = config.get(section, "host") # Server Address
self.Sickrage['port'] = config.get(section, "port") # Server Port
self.Sickrage['api_key'] = config.get(section, "api_key") # Sickbeard API key
self.Sickrage['web_root'] = config.get(section, "web_root") # Sickbeard webroot
self.Sickrage['ssl'] = config.getboolean(section, "ssl") # SSL
self.Sickrage['user'] = config.get(section, "username")
self.Sickrage['pass'] = config.get(section, "password")
# Read SAB section information
section = "SABNZBD"
self.SAB = {}
try:
self.SAB['convert'] = config.getboolean(section, "convert") # Convert
except:
self.SAB['convert'] = False
self.SAB['cp'] = config.get(section, "Couchpotato-category").lower()
self.SAB['sb'] = config.get(section, "Sickbeard-category").lower()
self.SAB['sr'] = config.get(section, "Sickrage-category").lower()
self.SAB['sonarr'] = config.get(section, "Sonarr-category").lower()
self.SAB['radarr'] = config.get(section, "Radarr-category").lower()
self.SAB['bypass'] = config.get(section, "Bypass-category").lower()
self.SAB['output_dir'] = config.get(section, "output_directory")
if self.SAB['output_dir'] == '':
self.SAB['output_dir'] = None
else:
self.SAB['output_dir'] = os.path.normpath(self.raw(self.SAB['output_dir'])) # Output directory
# Read Plex section information
section = "Plex"
self.Plex = {}
self.Plex['host'] = config.get(section, "host")
self.Plex['port'] = config.get(section, "port")
try:
self.Plex['refresh'] = config.getboolean(section, "refresh")
except:
self.Plex['refresh'] = False
self.Plex['token'] = config.get(section, "token")
if self.Plex['token'] == '':
self.Plex['token'] = None
# Pass the values on
self.config = config
self.configFile = configFile
def getRefreshURL(self, tvdb_id):
config = self.config
section = "SickBeard"
protocol = "http://" # SSL
try:
if config.getboolean(section, "ssl"):
protocol = "https://"
except (configparser.NoOptionError, ValueError):
pass
host = config.get(section, "host") # Server Address
port = config.get(section, "port") # Server Port
api_key = config.get(section, "api_key") # Sickbeard API key
web_root = config.get(section, "web_root") # Sickbeard webroot
sickbeard_url = protocol + host + ":" + port + web_root + "/api/" + api_key + "/?cmd=show.refresh&tvdbid=" + str(tvdb_id)
return sickbeard_url
def writeConfig(self, config, cfgfile):
fp = open(cfgfile, "w")
try:
config.write(fp)
except IOError:
pass
fp.close()
def raw(self, text):
escape_dict = {'\a': r'\a',
'\b': r'\b',
'\c': r'\c',
'\f': r'\f',
'\n': r'\n',
'\r': r'\r',
'\t': r'\t',
'\v': r'\v',
'\'': r'\'',
'\"': r'\"',
'\0': r'\0',
'\1': r'\1',
'\2': r'\2',
'\3': r'\3',
'\4': r'\4',
'\5': r'\5',
'\6': r'\6',
'\7': r'\7',
'\8': r'\8',
'\9': r'\9'}
output = ''
for char in text:
try:
output += escape_dict[char]
except KeyError:
output += char
return output
| mit | 4,854,968,743,351,780,000 | 45.022284 | 286 | 0.526419 | false |
likeucode/PythonLearning | basicLearning/basicClass.py | 1 | 1090 | #! /usr/bin/env python
#this code is a demo of oop of python
class Filter:
def init(self):
self.blocked=[]
def filter(self,sequence):
return [x for x in sequence if x not in self.blocked]
class SPAMFilter(Filter):
num=0
def init(self):
SPAMFilter.num+=1
print SPAMFilter.num
self.blocked=['SPAM']
class Calcu:
def calculate(self,expression):
self.value=eval(expression)
class Talker:
def talk(self):
print 'Hi, my value is ',self.value
class TalkingCalcu(Calcu,Talker):
pass
def basicClass():
f=Filter()
f.init()
f.filter([1,2,3])
s=SPAMFilter()
s.init()
s.filter(['SPAM','SPAM','eggs','bacon','SPAM'])
print issubclass(SPAMFilter,Filter)
print isinstance(s,SPAMFilter)
def multiSuper():
tc=TalkingCalcu()
tc.calculate('1+2*3')
tc.talk()
#interface
print hasattr(tc,'talk')
print hasattr(tc,'fonrd')
#callable is not used in python 3.0
print callable(getattr(tc,'talk',None))
print callable(getattr(tc,'fonrd',None))
setattr(tc,'name','Mr.Gumby')
print tc.name
def main():
basicClass()
multiSuper()
if __name__ == '__main__':
main()
| bsd-3-clause | -1,111,955,918,422,303,900 | 16.868852 | 55 | 0.685321 | false |
Suninus/NewsBlur | apps/reader/views.py | 1 | 95311 | import datetime
import time
import boto
import redis
import requests
import random
import zlib
from django.shortcuts import get_object_or_404
from django.shortcuts import render
from django.contrib.auth.decorators import login_required
from django.template.loader import render_to_string
from django.db import IntegrityError
from django.db.models import Q
from django.views.decorators.cache import never_cache
from django.core.urlresolvers import reverse
from django.contrib.auth import login as login_user
from django.contrib.auth import logout as logout_user
from django.contrib.auth.models import User
from django.http import HttpResponse, HttpResponseRedirect, HttpResponseForbidden, Http404
from django.conf import settings
from django.core.mail import mail_admins
from django.core.validators import email_re
from django.core.mail import EmailMultiAlternatives
from django.contrib.sites.models import Site
from django.utils import feedgenerator
from mongoengine.queryset import OperationError
from mongoengine.queryset import NotUniqueError
from apps.recommendations.models import RecommendedFeed
from apps.analyzer.models import MClassifierTitle, MClassifierAuthor, MClassifierFeed, MClassifierTag
from apps.analyzer.models import apply_classifier_titles, apply_classifier_feeds
from apps.analyzer.models import apply_classifier_authors, apply_classifier_tags
from apps.analyzer.models import get_classifiers_for_user, sort_classifiers_by_feed
from apps.profile.models import Profile
from apps.reader.models import UserSubscription, UserSubscriptionFolders, RUserStory, Feature
from apps.reader.forms import SignupForm, LoginForm, FeatureForm
from apps.rss_feeds.models import MFeedIcon, MStarredStoryCounts
from apps.search.models import MUserSearch
from apps.statistics.models import MStatistics
# from apps.search.models import SearchStarredStory
try:
from apps.rss_feeds.models import Feed, MFeedPage, DuplicateFeed, MStory, MStarredStory
except:
pass
from apps.social.models import MSharedStory, MSocialProfile, MSocialServices
from apps.social.models import MSocialSubscription, MActivity, MInteraction
from apps.categories.models import MCategory
from apps.social.views import load_social_page
from apps.rss_feeds.tasks import ScheduleImmediateFetches
from utils import json_functions as json
from utils.user_functions import get_user, ajax_login_required
from utils.feed_functions import relative_timesince
from utils.story_functions import format_story_link_date__short
from utils.story_functions import format_story_link_date__long
from utils.story_functions import strip_tags
from utils import log as logging
from utils.view_functions import get_argument_or_404, render_to, is_true
from utils.view_functions import required_params
from utils.ratelimit import ratelimit
from vendor.timezones.utilities import localtime_for_timezone
BANNED_URLS = [
"brentozar.com",
]
@never_cache
@render_to('reader/dashboard.xhtml')
def index(request, **kwargs):
if request.method == "GET" and request.subdomain and request.subdomain not in ['dev', 'www', 'debug']:
username = request.subdomain
if '.' in username:
username = username.split('.')[0]
user = User.objects.filter(username=username)
if not user:
user = User.objects.filter(username__iexact=username)
if user:
user = user[0]
if not user:
return HttpResponseRedirect('http://%s%s' % (
Site.objects.get_current().domain,
reverse('index')))
return load_social_page(request, user_id=user.pk, username=request.subdomain, **kwargs)
if request.user.is_anonymous():
return welcome(request, **kwargs)
else:
return dashboard(request, **kwargs)
def dashboard(request, **kwargs):
user = request.user
feed_count = UserSubscription.objects.filter(user=request.user).count()
recommended_feeds = RecommendedFeed.objects.filter(is_public=True,
approved_date__lte=datetime.datetime.now()
).select_related('feed')[:2]
unmoderated_feeds = []
if user.is_staff:
unmoderated_feeds = RecommendedFeed.objects.filter(is_public=False,
declined_date__isnull=True
).select_related('feed')[:2]
statistics = MStatistics.all()
social_profile = MSocialProfile.get_user(user.pk)
start_import_from_google_reader = request.session.get('import_from_google_reader', False)
if start_import_from_google_reader:
del request.session['import_from_google_reader']
if not user.is_active:
url = "https://%s%s" % (Site.objects.get_current().domain,
reverse('stripe-form'))
return HttpResponseRedirect(url)
logging.user(request, "~FBLoading dashboard")
return {
'user_profile' : user.profile,
'feed_count' : feed_count,
'account_images' : range(1, 4),
'recommended_feeds' : recommended_feeds,
'unmoderated_feeds' : unmoderated_feeds,
'statistics' : statistics,
'social_profile' : social_profile,
'start_import_from_google_reader': start_import_from_google_reader,
'debug' : settings.DEBUG,
}, "reader/dashboard.xhtml"
def welcome(request, **kwargs):
user = get_user(request)
statistics = MStatistics.all()
social_profile = MSocialProfile.get_user(user.pk)
if request.method == "POST":
if request.POST.get('submit', '').startswith('log'):
login_form = LoginForm(request.POST, prefix='login')
signup_form = SignupForm(prefix='signup')
else:
login_form = LoginForm(prefix='login')
signup_form = SignupForm(request.POST, prefix='signup')
else:
login_form = LoginForm(prefix='login')
signup_form = SignupForm(prefix='signup')
logging.user(request, "~FBLoading welcome")
return {
'user_profile' : hasattr(user, 'profile') and user.profile,
'login_form' : login_form,
'signup_form' : signup_form,
'statistics' : statistics,
'social_profile' : social_profile,
'post_request' : request.method == 'POST',
}, "reader/welcome.xhtml"
@never_cache
def login(request):
code = -1
message = ""
if request.method == "POST":
form = LoginForm(request.POST, prefix='login')
if form.is_valid():
login_user(request, form.get_user())
if request.POST.get('api'):
logging.user(form.get_user(), "~FG~BB~SKiPhone Login~FW")
code = 1
else:
logging.user(form.get_user(), "~FG~BBLogin~FW")
return HttpResponseRedirect(reverse('index'))
else:
message = form.errors.items()[0][1][0]
if request.POST.get('api'):
return HttpResponse(json.encode(dict(code=code, message=message)), mimetype='application/json')
else:
return index(request)
@never_cache
def signup(request):
if request.method == "POST":
form = SignupForm(prefix='signup', data=request.POST)
if form.is_valid():
new_user = form.save()
login_user(request, new_user)
logging.user(new_user, "~FG~SB~BBNEW SIGNUP: ~FW%s" % new_user.email)
if not new_user.is_active:
url = "https://%s%s" % (Site.objects.get_current().domain,
reverse('stripe-form'))
return HttpResponseRedirect(url)
return index(request)
@never_cache
def logout(request):
logging.user(request, "~FG~BBLogout~FW")
logout_user(request)
if request.GET.get('api'):
return HttpResponse(json.encode(dict(code=1)), mimetype='application/json')
else:
return HttpResponseRedirect(reverse('index'))
def autologin(request, username, secret):
next = request.GET.get('next', '')
if not username or not secret:
return HttpResponseForbidden()
profile = Profile.objects.filter(user__username=username, secret_token=secret)
if not profile:
return HttpResponseForbidden()
user = profile[0].user
user.backend = settings.AUTHENTICATION_BACKENDS[0]
login_user(request, user)
logging.user(user, "~FG~BB~SKAuto-Login. Next stop: %s~FW" % (next if next else 'Homepage',))
if next and not next.startswith('/'):
next = '?next=' + next
return HttpResponseRedirect(reverse('index') + next)
elif next:
return HttpResponseRedirect(next)
else:
return HttpResponseRedirect(reverse('index'))
@ratelimit(minutes=1, requests=24)
@never_cache
@json.json_view
def load_feeds(request):
user = get_user(request)
feeds = {}
include_favicons = request.REQUEST.get('include_favicons', False)
flat = request.REQUEST.get('flat', False)
update_counts = request.REQUEST.get('update_counts', False)
version = int(request.REQUEST.get('v', 1))
if include_favicons == 'false': include_favicons = False
if update_counts == 'false': update_counts = False
if flat == 'false': flat = False
if flat: return load_feeds_flat(request)
try:
folders = UserSubscriptionFolders.objects.get(user=user)
except UserSubscriptionFolders.DoesNotExist:
data = dict(feeds=[], folders=[])
return data
except UserSubscriptionFolders.MultipleObjectsReturned:
UserSubscriptionFolders.objects.filter(user=user)[1:].delete()
folders = UserSubscriptionFolders.objects.get(user=user)
user_subs = UserSubscription.objects.select_related('feed').filter(user=user)
day_ago = datetime.datetime.now() - datetime.timedelta(days=1)
scheduled_feeds = []
for sub in user_subs:
pk = sub.feed_id
if update_counts and sub.needs_unread_recalc:
sub.calculate_feed_scores(silent=True)
feeds[pk] = sub.canonical(include_favicon=include_favicons)
if not sub.active: continue
if not sub.feed.active and not sub.feed.has_feed_exception:
scheduled_feeds.append(sub.feed.pk)
elif sub.feed.active_subscribers <= 0:
scheduled_feeds.append(sub.feed.pk)
elif sub.feed.next_scheduled_update < day_ago:
scheduled_feeds.append(sub.feed.pk)
if len(scheduled_feeds) > 0 and request.user.is_authenticated():
logging.user(request, "~SN~FMTasking the scheduling immediate fetch of ~SB%s~SN feeds..." %
len(scheduled_feeds))
ScheduleImmediateFetches.apply_async(kwargs=dict(feed_ids=scheduled_feeds, user_id=user.pk))
starred_counts, starred_count = MStarredStoryCounts.user_counts(user.pk, include_total=True)
if not starred_count and len(starred_counts):
starred_count = MStarredStory.objects(user_id=user.pk).count()
social_params = {
'user_id': user.pk,
'include_favicon': include_favicons,
'update_counts': update_counts,
}
social_feeds = MSocialSubscription.feeds(**social_params)
social_profile = MSocialProfile.profile(user.pk)
social_services = MSocialServices.profile(user.pk)
categories = None
if not user_subs:
categories = MCategory.serialize()
logging.user(request, "~FB~SBLoading ~FY%s~FB/~FM%s~FB feeds/socials%s" % (
len(feeds.keys()), len(social_feeds), '. ~FCUpdating counts.' if update_counts else ''))
data = {
'feeds': feeds.values() if version == 2 else feeds,
'social_feeds': social_feeds,
'social_profile': social_profile,
'social_services': social_services,
'user_profile': user.profile,
"is_staff": user.is_staff,
'folders': json.decode(folders.folders),
'starred_count': starred_count,
'starred_counts': starred_counts,
'categories': categories
}
return data
@json.json_view
def load_feed_favicons(request):
user = get_user(request)
feed_ids = request.REQUEST.getlist('feed_ids')
if not feed_ids:
user_subs = UserSubscription.objects.select_related('feed').filter(user=user, active=True)
feed_ids = [sub['feed__pk'] for sub in user_subs.values('feed__pk')]
feed_icons = dict([(i.feed_id, i.data) for i in MFeedIcon.objects(feed_id__in=feed_ids)])
return feed_icons
def load_feeds_flat(request):
user = request.user
include_favicons = is_true(request.REQUEST.get('include_favicons', False))
update_counts = is_true(request.REQUEST.get('update_counts', True))
feeds = {}
day_ago = datetime.datetime.now() - datetime.timedelta(days=1)
scheduled_feeds = []
iphone_version = "2.1"
if include_favicons == 'false': include_favicons = False
if update_counts == 'false': update_counts = False
if not user.is_authenticated():
return HttpResponseForbidden()
try:
folders = UserSubscriptionFolders.objects.get(user=user)
except UserSubscriptionFolders.DoesNotExist:
folders = []
user_subs = UserSubscription.objects.select_related('feed').filter(user=user, active=True)
if not user_subs and folders:
folders.auto_activate()
user_subs = UserSubscription.objects.select_related('feed').filter(user=user, active=True)
for sub in user_subs:
if update_counts and sub.needs_unread_recalc:
sub.calculate_feed_scores(silent=True)
feeds[sub.feed_id] = sub.canonical(include_favicon=include_favicons)
if not sub.feed.active and not sub.feed.has_feed_exception:
scheduled_feeds.append(sub.feed.pk)
elif sub.feed.active_subscribers <= 0:
scheduled_feeds.append(sub.feed.pk)
elif sub.feed.next_scheduled_update < day_ago:
scheduled_feeds.append(sub.feed.pk)
if len(scheduled_feeds) > 0 and request.user.is_authenticated():
logging.user(request, "~SN~FMTasking the scheduling immediate fetch of ~SB%s~SN feeds..." %
len(scheduled_feeds))
ScheduleImmediateFetches.apply_async(kwargs=dict(feed_ids=scheduled_feeds, user_id=user.pk))
flat_folders = []
if folders:
flat_folders = folders.flatten_folders(feeds=feeds)
social_params = {
'user_id': user.pk,
'include_favicon': include_favicons,
'update_counts': update_counts,
}
social_feeds = MSocialSubscription.feeds(**social_params)
social_profile = MSocialProfile.profile(user.pk)
social_services = MSocialServices.profile(user.pk)
starred_counts, starred_count = MStarredStoryCounts.user_counts(user.pk, include_total=True)
if not starred_count and len(starred_counts):
starred_count = MStarredStory.objects(user_id=user.pk).count()
categories = None
if not user_subs:
categories = MCategory.serialize()
logging.user(request, "~FB~SBLoading ~FY%s~FB/~FM%s~FB feeds/socials ~FMflat~FB%s" % (
len(feeds.keys()), len(social_feeds), '. ~FCUpdating counts.' if update_counts else ''))
data = {
"flat_folders": flat_folders,
"feeds": feeds,
"social_feeds": social_feeds,
"social_profile": social_profile,
"social_services": social_services,
"user": user.username,
"is_staff": user.is_staff,
"user_profile": user.profile,
"iphone_version": iphone_version,
"categories": categories,
'starred_count': starred_count,
'starred_counts': starred_counts,
}
return data
@ratelimit(minutes=1, requests=10)
@never_cache
@json.json_view
def refresh_feeds(request):
user = get_user(request)
feed_ids = request.REQUEST.getlist('feed_id')
check_fetch_status = request.REQUEST.get('check_fetch_status')
favicons_fetching = request.REQUEST.getlist('favicons_fetching')
social_feed_ids = [feed_id for feed_id in feed_ids if 'social:' in feed_id]
feed_ids = list(set(feed_ids) - set(social_feed_ids))
feeds = {}
if feed_ids or (not social_feed_ids and not feed_ids):
feeds = UserSubscription.feeds_with_updated_counts(user, feed_ids=feed_ids,
check_fetch_status=check_fetch_status)
social_feeds = {}
if social_feed_ids or (not social_feed_ids and not feed_ids):
social_feeds = MSocialSubscription.feeds_with_updated_counts(user, social_feed_ids=social_feed_ids)
favicons_fetching = [int(f) for f in favicons_fetching if f]
feed_icons = {}
if favicons_fetching:
feed_icons = dict([(i.feed_id, i) for i in MFeedIcon.objects(feed_id__in=favicons_fetching)])
for feed_id, feed in feeds.items():
if feed_id in favicons_fetching and feed_id in feed_icons:
feeds[feed_id]['favicon'] = feed_icons[feed_id].data
feeds[feed_id]['favicon_color'] = feed_icons[feed_id].color
feeds[feed_id]['favicon_fetching'] = feed.get('favicon_fetching')
user_subs = UserSubscription.objects.filter(user=user, active=True).only('feed')
sub_feed_ids = [s.feed_id for s in user_subs]
if favicons_fetching:
moved_feed_ids = [f for f in favicons_fetching if f not in sub_feed_ids]
for moved_feed_id in moved_feed_ids:
duplicate_feeds = DuplicateFeed.objects.filter(duplicate_feed_id=moved_feed_id)
if duplicate_feeds and duplicate_feeds[0].feed.pk in feeds:
feeds[moved_feed_id] = feeds[duplicate_feeds[0].feed_id]
feeds[moved_feed_id]['dupe_feed_id'] = duplicate_feeds[0].feed_id
if check_fetch_status:
missing_feed_ids = list(set(feed_ids) - set(sub_feed_ids))
if missing_feed_ids:
duplicate_feeds = DuplicateFeed.objects.filter(duplicate_feed_id__in=missing_feed_ids)
for duplicate_feed in duplicate_feeds:
feeds[duplicate_feed.duplicate_feed_id] = {'id': duplicate_feed.feed_id}
interactions_count = MInteraction.user_unread_count(user.pk)
if True or settings.DEBUG or check_fetch_status:
logging.user(request, "~FBRefreshing %s feeds (%s/%s)" % (
len(feeds.keys()), check_fetch_status, len(favicons_fetching)))
return {
'feeds': feeds,
'social_feeds': social_feeds,
'interactions_count': interactions_count,
}
@json.json_view
def interactions_count(request):
user = get_user(request)
interactions_count = MInteraction.user_unread_count(user.pk)
return {
'interactions_count': interactions_count,
}
@never_cache
@ajax_login_required
@json.json_view
def feed_unread_count(request):
user = request.user
feed_ids = request.REQUEST.getlist('feed_id')
force = request.REQUEST.get('force', False)
social_feed_ids = [feed_id for feed_id in feed_ids if 'social:' in feed_id]
feed_ids = list(set(feed_ids) - set(social_feed_ids))
feeds = {}
if feed_ids:
feeds = UserSubscription.feeds_with_updated_counts(user, feed_ids=feed_ids, force=force)
social_feeds = {}
if social_feed_ids:
social_feeds = MSocialSubscription.feeds_with_updated_counts(user, social_feed_ids=social_feed_ids)
if len(feed_ids) == 1:
if settings.DEBUG:
feed_title = Feed.get_by_id(feed_ids[0]).feed_title
else:
feed_title = feed_ids[0]
elif len(social_feed_ids) == 1:
feed_title = MSocialProfile.objects.get(user_id=social_feed_ids[0].replace('social:', '')).username
else:
feed_title = "%s feeds" % (len(feeds) + len(social_feeds))
logging.user(request, "~FBUpdating unread count on: %s" % feed_title)
return {'feeds': feeds, 'social_feeds': social_feeds}
def refresh_feed(request, feed_id):
user = get_user(request)
feed = get_object_or_404(Feed, pk=feed_id)
feed = feed.update(force=True, compute_scores=False)
usersub = UserSubscription.objects.get(user=user, feed=feed)
usersub.calculate_feed_scores(silent=False)
logging.user(request, "~FBRefreshing feed: %s" % feed)
return load_single_feed(request, feed_id)
@never_cache
@json.json_view
def load_single_feed(request, feed_id):
start = time.time()
user = get_user(request)
# offset = int(request.REQUEST.get('offset', 0))
# limit = int(request.REQUEST.get('limit', 6))
limit = 6
page = int(request.REQUEST.get('page', 1))
offset = limit * (page-1)
order = request.REQUEST.get('order', 'newest')
read_filter = request.REQUEST.get('read_filter', 'all')
query = request.REQUEST.get('query')
include_story_content = is_true(request.REQUEST.get('include_story_content', True))
include_hidden = is_true(request.REQUEST.get('include_hidden', False))
message = None
user_search = None
dupe_feed_id = None
user_profiles = []
now = localtime_for_timezone(datetime.datetime.now(), user.profile.timezone)
if not feed_id: raise Http404
feed_address = request.REQUEST.get('feed_address')
feed = Feed.get_by_id(feed_id, feed_address=feed_address)
if not feed:
raise Http404
try:
usersub = UserSubscription.objects.get(user=user, feed=feed)
except UserSubscription.DoesNotExist:
usersub = None
if query:
if user.profile.is_premium:
user_search = MUserSearch.get_user(user.pk)
user_search.touch_search_date()
stories = feed.find_stories(query, order=order, offset=offset, limit=limit)
else:
stories = []
message = "You must be a premium subscriber to search."
elif read_filter == 'starred':
mstories = MStarredStory.objects(
user_id=user.pk,
story_feed_id=feed_id
).order_by('%sstarred_date' % ('-' if order == 'newest' else ''))[offset:offset+limit]
stories = Feed.format_stories(mstories)
elif usersub and (read_filter == 'unread' or order == 'oldest'):
stories = usersub.get_stories(order=order, read_filter=read_filter, offset=offset, limit=limit,
default_cutoff_date=user.profile.unread_cutoff)
else:
stories = feed.get_stories(offset, limit)
checkpoint1 = time.time()
try:
stories, user_profiles = MSharedStory.stories_with_comments_and_profiles(stories, user.pk)
except redis.ConnectionError:
logging.user(request, "~BR~FK~SBRedis is unavailable for shared stories.")
checkpoint2 = time.time()
# Get intelligence classifier for user
if usersub and usersub.is_trained:
classifier_feeds = list(MClassifierFeed.objects(user_id=user.pk, feed_id=feed_id, social_user_id=0))
classifier_authors = list(MClassifierAuthor.objects(user_id=user.pk, feed_id=feed_id))
classifier_titles = list(MClassifierTitle.objects(user_id=user.pk, feed_id=feed_id))
classifier_tags = list(MClassifierTag.objects(user_id=user.pk, feed_id=feed_id))
else:
classifier_feeds = []
classifier_authors = []
classifier_titles = []
classifier_tags = []
classifiers = get_classifiers_for_user(user, feed_id=feed_id,
classifier_feeds=classifier_feeds,
classifier_authors=classifier_authors,
classifier_titles=classifier_titles,
classifier_tags=classifier_tags)
checkpoint3 = time.time()
unread_story_hashes = []
if stories:
if (read_filter == 'all' or query) and usersub:
unread_story_hashes = UserSubscription.story_hashes(user.pk, read_filter='unread',
feed_ids=[usersub.feed_id],
usersubs=[usersub],
group_by_feed=False,
cutoff_date=user.profile.unread_cutoff)
story_hashes = [story['story_hash'] for story in stories if story['story_hash']]
starred_stories = MStarredStory.objects(user_id=user.pk,
story_feed_id=feed.pk,
story_hash__in=story_hashes)\
.only('story_hash', 'starred_date', 'user_tags')
shared_story_hashes = MSharedStory.check_shared_story_hashes(user.pk, story_hashes)
shared_stories = []
if shared_story_hashes:
shared_stories = MSharedStory.objects(user_id=user.pk,
story_hash__in=shared_story_hashes)\
.only('story_hash', 'shared_date', 'comments')
starred_stories = dict([(story.story_hash, dict(starred_date=story.starred_date,
user_tags=story.user_tags))
for story in starred_stories])
shared_stories = dict([(story.story_hash, dict(shared_date=story.shared_date,
comments=story.comments))
for story in shared_stories])
checkpoint4 = time.time()
for story in stories:
if not include_story_content:
del story['story_content']
story_date = localtime_for_timezone(story['story_date'], user.profile.timezone)
nowtz = localtime_for_timezone(now, user.profile.timezone)
story['short_parsed_date'] = format_story_link_date__short(story_date, nowtz)
story['long_parsed_date'] = format_story_link_date__long(story_date, nowtz)
if usersub:
story['read_status'] = 1
if (read_filter == 'all' or query) and usersub:
story['read_status'] = 1 if story['story_hash'] not in unread_story_hashes else 0
elif read_filter == 'unread' and usersub:
story['read_status'] = 0
if story['story_hash'] in starred_stories:
story['starred'] = True
starred_date = localtime_for_timezone(starred_stories[story['story_hash']]['starred_date'],
user.profile.timezone)
story['starred_date'] = format_story_link_date__long(starred_date, now)
story['starred_timestamp'] = starred_date.strftime('%s')
story['user_tags'] = starred_stories[story['story_hash']]['user_tags']
if story['story_hash'] in shared_stories:
story['shared'] = True
shared_date = localtime_for_timezone(shared_stories[story['story_hash']]['shared_date'],
user.profile.timezone)
story['shared_date'] = format_story_link_date__long(shared_date, now)
story['shared_comments'] = strip_tags(shared_stories[story['story_hash']]['comments'])
else:
story['read_status'] = 1
story['intelligence'] = {
'feed': apply_classifier_feeds(classifier_feeds, feed),
'author': apply_classifier_authors(classifier_authors, story),
'tags': apply_classifier_tags(classifier_tags, story),
'title': apply_classifier_titles(classifier_titles, story),
}
story['score'] = UserSubscription.score_story(story['intelligence'])
# Intelligence
feed_tags = json.decode(feed.data.popular_tags) if feed.data.popular_tags else []
feed_authors = json.decode(feed.data.popular_authors) if feed.data.popular_authors else []
if usersub:
usersub.feed_opens += 1
usersub.needs_unread_recalc = True
usersub.save(update_fields=['feed_opens', 'needs_unread_recalc'])
diff1 = checkpoint1-start
diff2 = checkpoint2-start
diff3 = checkpoint3-start
diff4 = checkpoint4-start
timediff = time.time()-start
last_update = relative_timesince(feed.last_update)
time_breakdown = ""
if timediff > 1 or settings.DEBUG:
time_breakdown = "~SN~FR(~SB%.4s/%.4s/%.4s/%.4s~SN)" % (
diff1, diff2, diff3, diff4)
search_log = "~SN~FG(~SB%s~SN) " % query if query else ""
logging.user(request, "~FYLoading feed: ~SB%s%s (%s/%s) %s%s" % (
feed.feed_title[:22], ('~SN/p%s' % page) if page > 1 else '', order, read_filter, search_log, time_breakdown))
if not include_hidden:
hidden_stories_removed = 0
new_stories = []
for story in stories:
if story['score'] >= 0:
new_stories.append(story)
else:
hidden_stories_removed += 1
stories = new_stories
data = dict(stories=stories,
user_profiles=user_profiles,
feed_tags=feed_tags,
feed_authors=feed_authors,
classifiers=classifiers,
updated=last_update,
user_search=user_search,
feed_id=feed.pk,
elapsed_time=round(float(timediff), 2),
message=message)
if not include_hidden: data['hidden_stories_removed'] = hidden_stories_removed
if dupe_feed_id: data['dupe_feed_id'] = dupe_feed_id
if not usersub:
data.update(feed.canonical())
# if not usersub and feed.num_subscribers <= 1:
# data = dict(code=-1, message="You must be subscribed to this feed.")
# if page <= 3:
# import random
# time.sleep(random.randint(2, 4))
# if page == 2:
# assert False
return data
def load_feed_page(request, feed_id):
if not feed_id:
raise Http404
feed = Feed.get_by_id(feed_id)
if feed and feed.has_page and not feed.has_page_exception:
if settings.BACKED_BY_AWS.get('pages_on_node'):
url = "http://%s/original_page/%s" % (
settings.ORIGINAL_PAGE_SERVER,
feed.pk,
)
page_response = requests.get(url)
if page_response.status_code == 200:
response = HttpResponse(page_response.content, mimetype="text/html; charset=utf-8")
response['Content-Encoding'] = 'gzip'
response['Last-Modified'] = page_response.headers.get('Last-modified')
response['Etag'] = page_response.headers.get('Etag')
response['Content-Length'] = str(len(page_response.content))
logging.user(request, "~FYLoading original page, proxied from node: ~SB%s bytes" %
(len(page_response.content)))
return response
if settings.BACKED_BY_AWS['pages_on_s3'] and feed.s3_page:
if settings.PROXY_S3_PAGES:
key = settings.S3_PAGES_BUCKET.get_key(feed.s3_pages_key)
if key:
compressed_data = key.get_contents_as_string()
response = HttpResponse(compressed_data, mimetype="text/html; charset=utf-8")
response['Content-Encoding'] = 'gzip'
logging.user(request, "~FYLoading original page, proxied: ~SB%s bytes" %
(len(compressed_data)))
return response
else:
logging.user(request, "~FYLoading original page, non-proxied")
return HttpResponseRedirect('//%s/%s' % (settings.S3_PAGES_BUCKET_NAME,
feed.s3_pages_key))
data = MFeedPage.get_data(feed_id=feed_id)
if not data or not feed or not feed.has_page or feed.has_page_exception:
logging.user(request, "~FYLoading original page, ~FRmissing")
return render(request, 'static/404_original_page.xhtml', {},
content_type='text/html',
status=404)
logging.user(request, "~FYLoading original page, from the db")
return HttpResponse(data, mimetype="text/html; charset=utf-8")
@json.json_view
def load_starred_stories(request):
user = get_user(request)
offset = int(request.REQUEST.get('offset', 0))
limit = int(request.REQUEST.get('limit', 10))
page = int(request.REQUEST.get('page', 0))
query = request.REQUEST.get('query')
order = request.REQUEST.get('order', 'newest')
tag = request.REQUEST.get('tag')
story_hashes = request.REQUEST.getlist('h')[:100]
version = int(request.REQUEST.get('v', 1))
now = localtime_for_timezone(datetime.datetime.now(), user.profile.timezone)
message = None
order_by = '-' if order == "newest" else ""
if page: offset = limit * (page - 1)
if query:
# results = SearchStarredStory.query(user.pk, query)
# story_ids = [result.db_id for result in results]
if user.profile.is_premium:
stories = MStarredStory.find_stories(query, user.pk, tag=tag, offset=offset, limit=limit,
order=order)
else:
stories = []
message = "You must be a premium subscriber to search."
elif tag:
if user.profile.is_premium:
mstories = MStarredStory.objects(
user_id=user.pk,
user_tags__contains=tag
).order_by('%sstarred_date' % order_by)[offset:offset+limit]
stories = Feed.format_stories(mstories)
else:
stories = []
message = "You must be a premium subscriber to read saved stories by tag."
elif story_hashes:
mstories = MStarredStory.objects(
user_id=user.pk,
story_hash__in=story_hashes
).order_by('%sstarred_date' % order_by)[offset:offset+limit]
stories = Feed.format_stories(mstories)
else:
mstories = MStarredStory.objects(
user_id=user.pk
).order_by('%sstarred_date' % order_by)[offset:offset+limit]
stories = Feed.format_stories(mstories)
stories, user_profiles = MSharedStory.stories_with_comments_and_profiles(stories, user.pk, check_all=True)
story_hashes = [story['story_hash'] for story in stories]
story_feed_ids = list(set(s['story_feed_id'] for s in stories))
usersub_ids = UserSubscription.objects.filter(user__pk=user.pk, feed__pk__in=story_feed_ids).values('feed__pk')
usersub_ids = [us['feed__pk'] for us in usersub_ids]
unsub_feed_ids = list(set(story_feed_ids).difference(set(usersub_ids)))
unsub_feeds = Feed.objects.filter(pk__in=unsub_feed_ids)
unsub_feeds = dict((feed.pk, feed.canonical(include_favicon=False)) for feed in unsub_feeds)
shared_story_hashes = MSharedStory.check_shared_story_hashes(user.pk, story_hashes)
shared_stories = []
if shared_story_hashes:
shared_stories = MSharedStory.objects(user_id=user.pk,
story_hash__in=shared_story_hashes)\
.only('story_hash', 'shared_date', 'comments')
shared_stories = dict([(story.story_hash, dict(shared_date=story.shared_date,
comments=story.comments))
for story in shared_stories])
nowtz = localtime_for_timezone(now, user.profile.timezone)
for story in stories:
story_date = localtime_for_timezone(story['story_date'], user.profile.timezone)
story['short_parsed_date'] = format_story_link_date__short(story_date, nowtz)
story['long_parsed_date'] = format_story_link_date__long(story_date, nowtz)
starred_date = localtime_for_timezone(story['starred_date'], user.profile.timezone)
story['starred_date'] = format_story_link_date__long(starred_date, nowtz)
story['starred_timestamp'] = starred_date.strftime('%s')
story['read_status'] = 1
story['starred'] = True
story['intelligence'] = {
'feed': 1,
'author': 0,
'tags': 0,
'title': 0,
}
if story['story_hash'] in shared_stories:
story['shared'] = True
story['shared_comments'] = strip_tags(shared_stories[story['story_hash']]['comments'])
search_log = "~SN~FG(~SB%s~SN)" % query if query else ""
logging.user(request, "~FCLoading starred stories: ~SB%s stories %s" % (len(stories), search_log))
return {
"stories": stories,
"user_profiles": user_profiles,
'feeds': unsub_feeds.values() if version == 2 else unsub_feeds,
"message": message,
}
@json.json_view
def starred_story_hashes(request):
user = get_user(request)
include_timestamps = is_true(request.REQUEST.get('include_timestamps', False))
mstories = MStarredStory.objects(
user_id=user.pk
).only('story_hash', 'starred_date').order_by('-starred_date')
if include_timestamps:
story_hashes = [(s.story_hash, s.starred_date.strftime("%s")) for s in mstories]
else:
story_hashes = [s.story_hash for s in mstories]
logging.user(request, "~FYLoading ~FCstarred story hashes~FY: %s story hashes" %
(len(story_hashes)))
return dict(starred_story_hashes=story_hashes)
def starred_stories_rss_feed(request, user_id, secret_token, tag_slug):
try:
user = User.objects.get(pk=user_id)
except User.DoesNotExist:
raise Http404
try:
tag_counts = MStarredStoryCounts.objects.get(user_id=user_id, slug=tag_slug)
except MStarredStoryCounts.MultipleObjectsReturned:
tag_counts = MStarredStoryCounts.objects(user_id=user_id, slug=tag_slug).first()
except MStarredStoryCounts.DoesNotExist:
raise Http404
data = {}
data['title'] = "Saved Stories - %s" % tag_counts.tag
data['link'] = "%s%s" % (
settings.NEWSBLUR_URL,
reverse('saved-stories-tag', kwargs=dict(tag_name=tag_slug)))
data['description'] = "Stories saved by %s on NewsBlur with the tag \"%s\"." % (user.username,
tag_counts.tag)
data['lastBuildDate'] = datetime.datetime.utcnow()
data['generator'] = 'NewsBlur - %s' % settings.NEWSBLUR_URL
data['docs'] = None
data['author_name'] = user.username
data['feed_url'] = "%s%s" % (
settings.NEWSBLUR_URL,
reverse('starred-stories-rss-feed',
kwargs=dict(user_id=user_id, secret_token=secret_token, tag_slug=tag_slug)),
)
rss = feedgenerator.Atom1Feed(**data)
if not tag_counts.tag:
starred_stories = MStarredStory.objects(
user_id=user.pk
).order_by('-starred_date').limit(25)
else:
starred_stories = MStarredStory.objects(
user_id=user.pk,
user_tags__contains=tag_counts.tag
).order_by('-starred_date').limit(25)
for starred_story in starred_stories:
story_data = {
'title': starred_story.story_title,
'link': starred_story.story_permalink,
'description': (starred_story.story_content_z and
zlib.decompress(starred_story.story_content_z)),
'author_name': starred_story.story_author_name,
'categories': starred_story.story_tags,
'unique_id': starred_story.story_guid,
'pubdate': starred_story.starred_date,
}
rss.add_item(**story_data)
logging.user(request, "~FBGenerating ~SB%s~SN's saved story RSS feed (%s, %s stories): ~FM%s" % (
user.username,
tag_counts.tag,
tag_counts.count,
request.META.get('HTTP_USER_AGENT', "")[:24]
))
return HttpResponse(rss.writeString('utf-8'), content_type='application/rss+xml')
@json.json_view
def load_read_stories(request):
user = get_user(request)
offset = int(request.REQUEST.get('offset', 0))
limit = int(request.REQUEST.get('limit', 10))
page = int(request.REQUEST.get('page', 0))
order = request.REQUEST.get('order', 'newest')
query = request.REQUEST.get('query')
now = localtime_for_timezone(datetime.datetime.now(), user.profile.timezone)
message = None
if page: offset = limit * (page - 1)
if query:
stories = []
message = "Not implemented yet."
# if user.profile.is_premium:
# stories = MStarredStory.find_stories(query, user.pk, offset=offset, limit=limit)
# else:
# stories = []
# message = "You must be a premium subscriber to search."
else:
story_hashes = RUserStory.get_read_stories(user.pk, offset=offset, limit=limit, order=order)
mstories = MStory.objects(story_hash__in=story_hashes)
stories = Feed.format_stories(mstories)
stories = sorted(stories, key=lambda story: story_hashes.index(story['story_hash']),
reverse=bool(order=="oldest"))
stories, user_profiles = MSharedStory.stories_with_comments_and_profiles(stories, user.pk, check_all=True)
story_hashes = [story['story_hash'] for story in stories]
story_feed_ids = list(set(s['story_feed_id'] for s in stories))
usersub_ids = UserSubscription.objects.filter(user__pk=user.pk, feed__pk__in=story_feed_ids).values('feed__pk')
usersub_ids = [us['feed__pk'] for us in usersub_ids]
unsub_feed_ids = list(set(story_feed_ids).difference(set(usersub_ids)))
unsub_feeds = Feed.objects.filter(pk__in=unsub_feed_ids)
unsub_feeds = [feed.canonical(include_favicon=False) for feed in unsub_feeds]
shared_stories = MSharedStory.objects(user_id=user.pk,
story_hash__in=story_hashes)\
.only('story_hash', 'shared_date', 'comments')
shared_stories = dict([(story.story_hash, dict(shared_date=story.shared_date,
comments=story.comments))
for story in shared_stories])
starred_stories = MStarredStory.objects(user_id=user.pk,
story_hash__in=story_hashes)\
.only('story_hash', 'starred_date')
starred_stories = dict([(story.story_hash, story.starred_date)
for story in starred_stories])
nowtz = localtime_for_timezone(now, user.profile.timezone)
for story in stories:
story_date = localtime_for_timezone(story['story_date'], user.profile.timezone)
story['short_parsed_date'] = format_story_link_date__short(story_date, nowtz)
story['long_parsed_date'] = format_story_link_date__long(story_date, nowtz)
story['read_status'] = 1
story['intelligence'] = {
'feed': 1,
'author': 0,
'tags': 0,
'title': 0,
}
if story['story_hash'] in starred_stories:
story['starred'] = True
starred_date = localtime_for_timezone(starred_stories[story['story_hash']],
user.profile.timezone)
story['starred_date'] = format_story_link_date__long(starred_date, now)
story['starred_timestamp'] = starred_date.strftime('%s')
if story['story_hash'] in shared_stories:
story['shared'] = True
story['shared_comments'] = strip_tags(shared_stories[story['story_hash']]['comments'])
search_log = "~SN~FG(~SB%s~SN)" % query if query else ""
logging.user(request, "~FCLoading read stories: ~SB%s stories %s" % (len(stories), search_log))
return {
"stories": stories,
"user_profiles": user_profiles,
"feeds": unsub_feeds,
"message": message,
}
@json.json_view
def load_river_stories__redis(request):
limit = 12
start = time.time()
user = get_user(request)
message = None
feed_ids = [int(feed_id) for feed_id in request.REQUEST.getlist('feeds') if feed_id]
if not feed_ids:
feed_ids = [int(feed_id) for feed_id in request.REQUEST.getlist('f') if feed_id]
story_hashes = request.REQUEST.getlist('h')[:100]
original_feed_ids = list(feed_ids)
page = int(request.REQUEST.get('page', 1))
order = request.REQUEST.get('order', 'newest')
read_filter = request.REQUEST.get('read_filter', 'unread')
query = request.REQUEST.get('query')
include_hidden = is_true(request.REQUEST.get('include_hidden', False))
now = localtime_for_timezone(datetime.datetime.now(), user.profile.timezone)
usersubs = []
code = 1
user_search = None
offset = (page-1) * limit
limit = page * limit
story_date_order = "%sstory_date" % ('' if order == 'oldest' else '-')
if story_hashes:
unread_feed_story_hashes = None
read_filter = 'unread'
mstories = MStory.objects(story_hash__in=story_hashes).order_by(story_date_order)
stories = Feed.format_stories(mstories)
elif query:
if user.profile.is_premium:
user_search = MUserSearch.get_user(user.pk)
user_search.touch_search_date()
usersubs = UserSubscription.subs_for_feeds(user.pk, feed_ids=feed_ids,
read_filter='all')
feed_ids = [sub.feed_id for sub in usersubs]
stories = Feed.find_feed_stories(feed_ids, query, order=order, offset=offset, limit=limit)
mstories = stories
unread_feed_story_hashes = UserSubscription.story_hashes(user.pk, feed_ids=feed_ids,
read_filter="unread", order=order,
group_by_feed=False,
cutoff_date=user.profile.unread_cutoff)
else:
stories = []
mstories = []
message = "You must be a premium subscriber to search."
elif read_filter == 'starred':
mstories = MStarredStory.objects(
user_id=user.pk,
story_feed_id__in=feed_ids
).order_by('%sstarred_date' % ('-' if order == 'newest' else ''))[offset:offset+limit]
stories = Feed.format_stories(mstories)
else:
usersubs = UserSubscription.subs_for_feeds(user.pk, feed_ids=feed_ids,
read_filter=read_filter)
all_feed_ids = [f for f in feed_ids]
feed_ids = [sub.feed_id for sub in usersubs]
if feed_ids:
params = {
"user_id": user.pk,
"feed_ids": feed_ids,
"all_feed_ids": all_feed_ids,
"offset": offset,
"limit": limit,
"order": order,
"read_filter": read_filter,
"usersubs": usersubs,
"cutoff_date": user.profile.unread_cutoff,
}
story_hashes, unread_feed_story_hashes = UserSubscription.feed_stories(**params)
else:
story_hashes = []
unread_feed_story_hashes = []
mstories = MStory.objects(story_hash__in=story_hashes).order_by(story_date_order)
stories = Feed.format_stories(mstories)
found_feed_ids = list(set([story['story_feed_id'] for story in stories]))
stories, user_profiles = MSharedStory.stories_with_comments_and_profiles(stories, user.pk)
if not usersubs:
usersubs = UserSubscription.subs_for_feeds(user.pk, feed_ids=found_feed_ids,
read_filter=read_filter)
trained_feed_ids = [sub.feed_id for sub in usersubs if sub.is_trained]
found_trained_feed_ids = list(set(trained_feed_ids) & set(found_feed_ids))
# Find starred stories
if found_feed_ids:
if read_filter == 'starred':
starred_stories = mstories
else:
starred_stories = MStarredStory.objects(
user_id=user.pk,
story_feed_id__in=found_feed_ids
).only('story_hash', 'starred_date')
starred_stories = dict([(story.story_hash, dict(starred_date=story.starred_date,
user_tags=story.user_tags))
for story in starred_stories])
else:
starred_stories = {}
# Intelligence classifiers for all feeds involved
if found_trained_feed_ids:
classifier_feeds = list(MClassifierFeed.objects(user_id=user.pk,
feed_id__in=found_trained_feed_ids,
social_user_id=0))
classifier_authors = list(MClassifierAuthor.objects(user_id=user.pk,
feed_id__in=found_trained_feed_ids))
classifier_titles = list(MClassifierTitle.objects(user_id=user.pk,
feed_id__in=found_trained_feed_ids))
classifier_tags = list(MClassifierTag.objects(user_id=user.pk,
feed_id__in=found_trained_feed_ids))
else:
classifier_feeds = []
classifier_authors = []
classifier_titles = []
classifier_tags = []
classifiers = sort_classifiers_by_feed(user=user, feed_ids=found_feed_ids,
classifier_feeds=classifier_feeds,
classifier_authors=classifier_authors,
classifier_titles=classifier_titles,
classifier_tags=classifier_tags)
# Just need to format stories
nowtz = localtime_for_timezone(now, user.profile.timezone)
for story in stories:
if read_filter == 'starred':
story['read_status'] = 1
else:
story['read_status'] = 0
if read_filter == 'all' or query:
if (unread_feed_story_hashes is not None and
story['story_hash'] not in unread_feed_story_hashes):
story['read_status'] = 1
story_date = localtime_for_timezone(story['story_date'], user.profile.timezone)
story['short_parsed_date'] = format_story_link_date__short(story_date, nowtz)
story['long_parsed_date'] = format_story_link_date__long(story_date, nowtz)
if story['story_hash'] in starred_stories:
story['starred'] = True
starred_date = localtime_for_timezone(starred_stories[story['story_hash']]['starred_date'],
user.profile.timezone)
story['starred_date'] = format_story_link_date__long(starred_date, now)
story['starred_timestamp'] = starred_date.strftime('%s')
story['user_tags'] = starred_stories[story['story_hash']]['user_tags']
story['intelligence'] = {
'feed': apply_classifier_feeds(classifier_feeds, story['story_feed_id']),
'author': apply_classifier_authors(classifier_authors, story),
'tags': apply_classifier_tags(classifier_tags, story),
'title': apply_classifier_titles(classifier_titles, story),
}
story['score'] = UserSubscription.score_story(story['intelligence'])
if not user.profile.is_premium:
message = "The full River of News is a premium feature."
code = 0
# if page > 1:
# stories = []
# else:
# stories = stories[:5]
diff = time.time() - start
timediff = round(float(diff), 2)
logging.user(request, "~FYLoading ~FCriver stories~FY: ~SBp%s~SN (%s/%s "
"stories, ~SN%s/%s/%s feeds, %s/%s)" %
(page, len(stories), len(mstories), len(found_feed_ids),
len(feed_ids), len(original_feed_ids), order, read_filter))
if not include_hidden:
hidden_stories_removed = 0
new_stories = []
for story in stories:
if story['score'] >= 0:
new_stories.append(story)
else:
hidden_stories_removed += 1
stories = new_stories
# if page <= 1:
# import random
# time.sleep(random.randint(0, 6))
data = dict(code=code,
message=message,
stories=stories,
classifiers=classifiers,
elapsed_time=timediff,
user_search=user_search,
user_profiles=user_profiles)
if not include_hidden: data['hidden_stories_removed'] = hidden_stories_removed
return data
@json.json_view
def unread_story_hashes__old(request):
user = get_user(request)
feed_ids = [int(feed_id) for feed_id in request.REQUEST.getlist('feed_id') if feed_id]
include_timestamps = is_true(request.REQUEST.get('include_timestamps', False))
usersubs = {}
if not feed_ids:
usersubs = UserSubscription.objects.filter(Q(unread_count_neutral__gt=0) |
Q(unread_count_positive__gt=0),
user=user, active=True)
feed_ids = [sub.feed_id for sub in usersubs]
else:
usersubs = UserSubscription.objects.filter(Q(unread_count_neutral__gt=0) |
Q(unread_count_positive__gt=0),
user=user, active=True, feed__in=feed_ids)
unread_feed_story_hashes = {}
story_hash_count = 0
usersubs = dict((sub.feed_id, sub) for sub in usersubs)
for feed_id in feed_ids:
if feed_id in usersubs:
us = usersubs[feed_id]
else:
continue
if not us.unread_count_neutral and not us.unread_count_positive:
continue
unread_feed_story_hashes[feed_id] = us.get_stories(read_filter='unread', limit=500,
withscores=include_timestamps,
hashes_only=True,
default_cutoff_date=user.profile.unread_cutoff)
story_hash_count += len(unread_feed_story_hashes[feed_id])
logging.user(request, "~FYLoading ~FCunread story hashes~FY: ~SB%s feeds~SN (%s story hashes)" %
(len(feed_ids), len(story_hash_count)))
return dict(unread_feed_story_hashes=unread_feed_story_hashes)
@json.json_view
def unread_story_hashes(request):
user = get_user(request)
feed_ids = [int(feed_id) for feed_id in request.REQUEST.getlist('feed_id') if feed_id]
include_timestamps = is_true(request.REQUEST.get('include_timestamps', False))
order = request.REQUEST.get('order', 'newest')
read_filter = request.REQUEST.get('read_filter', 'unread')
story_hashes = UserSubscription.story_hashes(user.pk, feed_ids=feed_ids,
order=order, read_filter=read_filter,
include_timestamps=include_timestamps,
cutoff_date=user.profile.unread_cutoff)
logging.user(request, "~FYLoading ~FCunread story hashes~FY: ~SB%s feeds~SN (%s story hashes)" %
(len(feed_ids), len(story_hashes)))
return dict(unread_feed_story_hashes=story_hashes)
@ajax_login_required
@json.json_view
def mark_all_as_read(request):
code = 1
try:
days = int(request.REQUEST.get('days', 0))
except ValueError:
return dict(code=-1, message="Days parameter must be an integer, not: %s" %
request.REQUEST.get('days'))
read_date = datetime.datetime.utcnow() - datetime.timedelta(days=days)
feeds = UserSubscription.objects.filter(user=request.user)
socialsubs = MSocialSubscription.objects.filter(user_id=request.user.pk)
for subtype in [feeds, socialsubs]:
for sub in subtype:
if days == 0:
sub.mark_feed_read()
else:
if sub.mark_read_date < read_date:
sub.needs_unread_recalc = True
sub.mark_read_date = read_date
sub.save()
logging.user(request, "~FMMarking all as read: ~SB%s days" % (days,))
return dict(code=code)
@ajax_login_required
@json.json_view
def mark_story_as_read(request):
story_ids = request.REQUEST.getlist('story_id')
try:
feed_id = int(get_argument_or_404(request, 'feed_id'))
except ValueError:
return dict(code=-1, errors=["You must pass a valid feed_id: %s" %
request.REQUEST.get('feed_id')])
try:
usersub = UserSubscription.objects.select_related('feed').get(user=request.user, feed=feed_id)
except Feed.DoesNotExist:
duplicate_feed = DuplicateFeed.objects.filter(duplicate_feed_id=feed_id)
if duplicate_feed:
feed_id = duplicate_feed[0].feed_id
try:
usersub = UserSubscription.objects.get(user=request.user,
feed=duplicate_feed[0].feed)
except (Feed.DoesNotExist):
return dict(code=-1, errors=["No feed exists for feed_id %d." % feed_id])
else:
return dict(code=-1, errors=["No feed exists for feed_id %d." % feed_id])
except UserSubscription.DoesNotExist:
usersub = None
if usersub:
data = usersub.mark_story_ids_as_read(story_ids, request=request)
else:
data = dict(code=-1, errors=["User is not subscribed to this feed."])
r = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL)
r.publish(request.user.username, 'feed:%s' % feed_id)
return data
@ajax_login_required
@json.json_view
def mark_story_hashes_as_read(request):
r = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL)
story_hashes = request.REQUEST.getlist('story_hash')
feed_ids, friend_ids = RUserStory.mark_story_hashes_read(request.user.pk, story_hashes)
if friend_ids:
socialsubs = MSocialSubscription.objects.filter(
user_id=request.user.pk,
subscription_user_id__in=friend_ids)
for socialsub in socialsubs:
if not socialsub.needs_unread_recalc:
socialsub.needs_unread_recalc = True
socialsub.save()
r.publish(request.user.username, 'social:%s' % socialsub.subscription_user_id)
# Also count on original subscription
for feed_id in feed_ids:
usersubs = UserSubscription.objects.filter(user=request.user.pk, feed=feed_id)
if usersubs:
usersub = usersubs[0]
if not usersub.needs_unread_recalc:
usersub.needs_unread_recalc = True
usersub.save(update_fields=['needs_unread_recalc'])
r.publish(request.user.username, 'feed:%s' % feed_id)
hash_count = len(story_hashes)
logging.user(request, "~FYRead %s %s in feed/socialsubs: %s/%s" % (
hash_count, 'story' if hash_count == 1 else 'stories', feed_ids, friend_ids))
return dict(code=1, story_hashes=story_hashes,
feed_ids=feed_ids, friend_user_ids=friend_ids)
@ajax_login_required
@json.json_view
def mark_feed_stories_as_read(request):
r = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL)
feeds_stories = request.REQUEST.get('feeds_stories', "{}")
feeds_stories = json.decode(feeds_stories)
data = {
'code': -1,
'message': 'Nothing was marked as read'
}
for feed_id, story_ids in feeds_stories.items():
try:
feed_id = int(feed_id)
except ValueError:
continue
try:
usersub = UserSubscription.objects.select_related('feed').get(user=request.user, feed=feed_id)
data = usersub.mark_story_ids_as_read(story_ids, request=request)
except UserSubscription.DoesNotExist:
return dict(code=-1, error="You are not subscribed to this feed_id: %d" % feed_id)
except Feed.DoesNotExist:
duplicate_feed = DuplicateFeed.objects.filter(duplicate_feed_id=feed_id)
try:
if not duplicate_feed: raise Feed.DoesNotExist
usersub = UserSubscription.objects.get(user=request.user,
feed=duplicate_feed[0].feed)
data = usersub.mark_story_ids_as_read(story_ids, request=request)
except (UserSubscription.DoesNotExist, Feed.DoesNotExist):
return dict(code=-1, error="No feed exists for feed_id: %d" % feed_id)
r.publish(request.user.username, 'feed:%s' % feed_id)
return data
@ajax_login_required
@json.json_view
def mark_social_stories_as_read(request):
code = 1
errors = []
data = {}
r = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL)
users_feeds_stories = request.REQUEST.get('users_feeds_stories', "{}")
users_feeds_stories = json.decode(users_feeds_stories)
for social_user_id, feeds in users_feeds_stories.items():
for feed_id, story_ids in feeds.items():
feed_id = int(feed_id)
try:
socialsub = MSocialSubscription.objects.get(user_id=request.user.pk,
subscription_user_id=social_user_id)
data = socialsub.mark_story_ids_as_read(story_ids, feed_id, request=request)
except OperationError, e:
code = -1
errors.append("Already read story: %s" % e)
except MSocialSubscription.DoesNotExist:
MSocialSubscription.mark_unsub_story_ids_as_read(request.user.pk, social_user_id,
story_ids, feed_id,
request=request)
except Feed.DoesNotExist:
duplicate_feed = DuplicateFeed.objects.filter(duplicate_feed_id=feed_id)
if duplicate_feed:
try:
socialsub = MSocialSubscription.objects.get(user_id=request.user.pk,
subscription_user_id=social_user_id)
data = socialsub.mark_story_ids_as_read(story_ids, duplicate_feed[0].feed.pk, request=request)
except (UserSubscription.DoesNotExist, Feed.DoesNotExist):
code = -1
errors.append("No feed exists for feed_id %d." % feed_id)
else:
continue
r.publish(request.user.username, 'feed:%s' % feed_id)
r.publish(request.user.username, 'social:%s' % social_user_id)
data.update(code=code, errors=errors)
return data
@required_params('story_id', feed_id=int)
@ajax_login_required
@json.json_view
def mark_story_as_unread(request):
story_id = request.REQUEST.get('story_id', None)
feed_id = int(request.REQUEST.get('feed_id', 0))
try:
usersub = UserSubscription.objects.select_related('feed').get(user=request.user, feed=feed_id)
feed = usersub.feed
except UserSubscription.DoesNotExist:
usersub = None
feed = Feed.get_by_id(feed_id)
if usersub and not usersub.needs_unread_recalc:
usersub.needs_unread_recalc = True
usersub.save(update_fields=['needs_unread_recalc'])
data = dict(code=0, payload=dict(story_id=story_id))
story, found_original = MStory.find_story(feed_id, story_id)
if not story:
logging.user(request, "~FY~SBUnread~SN story in feed: %s (NOT FOUND)" % (feed))
return dict(code=-1, message="Story not found.")
if usersub:
data = usersub.invert_read_stories_after_unread_story(story, request)
message = RUserStory.story_can_be_marked_read_by_user(story, request.user)
if message:
data['code'] = -1
data['message'] = message
return data
social_subs = MSocialSubscription.mark_dirty_sharing_story(user_id=request.user.pk,
story_feed_id=feed_id,
story_guid_hash=story.guid_hash)
dirty_count = social_subs and social_subs.count()
dirty_count = ("(%s social_subs)" % dirty_count) if dirty_count else ""
RUserStory.mark_story_hash_unread(user_id=request.user.pk, story_hash=story.story_hash)
r = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL)
r.publish(request.user.username, 'feed:%s' % feed_id)
logging.user(request, "~FY~SBUnread~SN story in feed: %s %s" % (feed, dirty_count))
return data
@ajax_login_required
@json.json_view
@required_params('story_hash')
def mark_story_hash_as_unread(request):
r = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL)
story_hash = request.REQUEST.get('story_hash')
feed_id, _ = MStory.split_story_hash(story_hash)
story, _ = MStory.find_story(feed_id, story_hash)
if not story:
data = dict(code=-1, message="That story has been removed from the feed, no need to mark it unread.")
return data
message = RUserStory.story_can_be_marked_read_by_user(story, request.user)
if message:
data = dict(code=-1, message=message)
return data
# Also count on original subscription
usersubs = UserSubscription.objects.filter(user=request.user.pk, feed=feed_id)
if usersubs:
usersub = usersubs[0]
if not usersub.needs_unread_recalc:
usersub.needs_unread_recalc = True
usersub.save(update_fields=['needs_unread_recalc'])
data = usersub.invert_read_stories_after_unread_story(story, request)
r.publish(request.user.username, 'feed:%s' % feed_id)
feed_id, friend_ids = RUserStory.mark_story_hash_unread(request.user.pk, story_hash)
if friend_ids:
socialsubs = MSocialSubscription.objects.filter(
user_id=request.user.pk,
subscription_user_id__in=friend_ids)
for socialsub in socialsubs:
if not socialsub.needs_unread_recalc:
socialsub.needs_unread_recalc = True
socialsub.save()
r.publish(request.user.username, 'social:%s' % socialsub.subscription_user_id)
logging.user(request, "~FYUnread story in feed/socialsubs: %s/%s" % (feed_id, friend_ids))
return dict(code=1, story_hash=story_hash, feed_id=feed_id, friend_user_ids=friend_ids)
@ajax_login_required
@json.json_view
def mark_feed_as_read(request):
r = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL)
feed_ids = request.REQUEST.getlist('feed_id')
cutoff_timestamp = int(request.REQUEST.get('cutoff_timestamp', 0))
direction = request.REQUEST.get('direction', 'older')
multiple = len(feed_ids) > 1
code = 1
errors = []
cutoff_date = datetime.datetime.fromtimestamp(cutoff_timestamp) if cutoff_timestamp else None
for feed_id in feed_ids:
if 'social:' in feed_id:
user_id = int(feed_id.replace('social:', ''))
try:
sub = MSocialSubscription.objects.get(user_id=request.user.pk,
subscription_user_id=user_id)
except MSocialSubscription.DoesNotExist:
logging.user(request, "~FRCouldn't find socialsub: %s" % user_id)
continue
if not multiple:
sub_user = User.objects.get(pk=sub.subscription_user_id)
logging.user(request, "~FMMarking social feed as read: ~SB%s" % (sub_user.username,))
else:
try:
feed = Feed.objects.get(id=feed_id)
sub = UserSubscription.objects.get(feed=feed, user=request.user)
if not multiple:
logging.user(request, "~FMMarking feed as read: ~SB%s" % (feed,))
except (Feed.DoesNotExist, UserSubscription.DoesNotExist), e:
errors.append("User not subscribed: %s" % e)
continue
except (ValueError), e:
errors.append("Invalid feed_id: %s" % e)
continue
if not sub:
errors.append("User not subscribed: %s" % feed_id)
continue
try:
if direction == "older":
marked_read = sub.mark_feed_read(cutoff_date=cutoff_date)
else:
marked_read = sub.mark_newer_stories_read(cutoff_date=cutoff_date)
if marked_read and not multiple:
r.publish(request.user.username, 'feed:%s' % feed_id)
except IntegrityError, e:
errors.append("Could not mark feed as read: %s" % e)
code = -1
if multiple:
logging.user(request, "~FMMarking ~SB%s~SN feeds as read" % len(feed_ids))
r.publish(request.user.username, 'refresh:%s' % ','.join(feed_ids))
if errors:
logging.user(request, "~FMMarking read had errors: ~FR%s" % errors)
return dict(code=code, errors=errors, cutoff_date=cutoff_date, direction=direction)
def _parse_user_info(user):
return {
'user_info': {
'is_anonymous': json.encode(user.is_anonymous()),
'is_authenticated': json.encode(user.is_authenticated()),
'username': json.encode(user.username if user.is_authenticated() else 'Anonymous')
}
}
@ajax_login_required
@json.json_view
def add_url(request):
code = 0
url = request.POST['url']
folder = request.POST.get('folder', '')
new_folder = request.POST.get('new_folder')
auto_active = is_true(request.POST.get('auto_active', 1))
skip_fetch = is_true(request.POST.get('skip_fetch', False))
feed = None
if not url:
code = -1
message = 'Enter in the website address or the feed URL.'
elif any([(banned_url in url) for banned_url in BANNED_URLS]):
code = -1
message = "The publisher of this website has banned NewsBlur."
else:
if new_folder:
usf, _ = UserSubscriptionFolders.objects.get_or_create(user=request.user)
usf.add_folder(folder, new_folder)
folder = new_folder
code, message, us = UserSubscription.add_subscription(user=request.user, feed_address=url,
folder=folder, auto_active=auto_active,
skip_fetch=skip_fetch)
feed = us and us.feed
if feed:
r = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL)
r.publish(request.user.username, 'reload:%s' % feed.pk)
MUserSearch.schedule_index_feeds_for_search(feed.pk, request.user.pk)
return dict(code=code, message=message, feed=feed)
@ajax_login_required
@json.json_view
def add_folder(request):
folder = request.POST['folder']
parent_folder = request.POST.get('parent_folder', '')
folders = None
logging.user(request, "~FRAdding Folder: ~SB%s (in %s)" % (folder, parent_folder))
if folder:
code = 1
message = ""
user_sub_folders_object, _ = UserSubscriptionFolders.objects.get_or_create(user=request.user)
user_sub_folders_object.add_folder(parent_folder, folder)
folders = json.decode(user_sub_folders_object.folders)
r = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL)
r.publish(request.user.username, 'reload:feeds')
else:
code = -1
message = "Gotta write in a folder name."
return dict(code=code, message=message, folders=folders)
@ajax_login_required
@json.json_view
def delete_feed(request):
feed_id = int(request.POST['feed_id'])
in_folder = request.POST.get('in_folder', None)
if not in_folder or in_folder == ' ':
in_folder = ""
user_sub_folders = get_object_or_404(UserSubscriptionFolders, user=request.user)
user_sub_folders.delete_feed(feed_id, in_folder)
feed = Feed.objects.filter(pk=feed_id)
if feed:
feed[0].count_subscribers()
r = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL)
r.publish(request.user.username, 'reload:feeds')
return dict(code=1, message="Removed %s from '%s'." % (feed, in_folder))
@ajax_login_required
@json.json_view
def delete_feed_by_url(request):
message = ""
code = 0
url = request.POST['url']
in_folder = request.POST.get('in_folder', '')
if in_folder == ' ':
in_folder = ""
feed = Feed.get_feed_from_url(url, create=False)
if feed:
user_sub_folders = get_object_or_404(UserSubscriptionFolders, user=request.user)
user_sub_folders.delete_feed(feed.pk, in_folder)
code = 1
feed = Feed.objects.filter(pk=feed.pk)
if feed:
feed[0].count_subscribers()
else:
code = -1
message = "URL not found."
return dict(code=code, message=message)
@ajax_login_required
@json.json_view
def delete_folder(request):
folder_to_delete = request.POST.get('folder_name') or request.POST.get('folder_to_delete')
in_folder = request.POST.get('in_folder', None)
feed_ids_in_folder = [int(f) for f in request.REQUEST.getlist('feed_id') if f]
request.user.profile.send_opml_export_email(reason="You have deleted an entire folder of feeds, so here's a backup just in case.")
# Works piss poor with duplicate folder titles, if they are both in the same folder.
# Deletes all, but only in the same folder parent. But nobody should be doing that, right?
user_sub_folders = get_object_or_404(UserSubscriptionFolders, user=request.user)
user_sub_folders.delete_folder(folder_to_delete, in_folder, feed_ids_in_folder)
folders = json.decode(user_sub_folders.folders)
r = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL)
r.publish(request.user.username, 'reload:feeds')
return dict(code=1, folders=folders)
@required_params('feeds_by_folder')
@ajax_login_required
@json.json_view
def delete_feeds_by_folder(request):
feeds_by_folder = json.decode(request.POST['feeds_by_folder'])
request.user.profile.send_opml_export_email(reason="You have deleted a number of feeds at once, so here's a backup just in case.")
# Works piss poor with duplicate folder titles, if they are both in the same folder.
# Deletes all, but only in the same folder parent. But nobody should be doing that, right?
user_sub_folders = get_object_or_404(UserSubscriptionFolders, user=request.user)
user_sub_folders.delete_feeds_by_folder(feeds_by_folder)
folders = json.decode(user_sub_folders.folders)
r = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL)
r.publish(request.user.username, 'reload:feeds')
return dict(code=1, folders=folders)
@ajax_login_required
@json.json_view
def rename_feed(request):
feed = get_object_or_404(Feed, pk=int(request.POST['feed_id']))
user_sub = UserSubscription.objects.get(user=request.user, feed=feed)
feed_title = request.POST['feed_title']
logging.user(request, "~FRRenaming feed '~SB%s~SN' to: ~SB%s" % (
feed.feed_title, feed_title))
user_sub.user_title = feed_title
user_sub.save()
return dict(code=1)
@ajax_login_required
@json.json_view
def rename_folder(request):
folder_to_rename = request.POST.get('folder_name') or request.POST.get('folder_to_rename')
new_folder_name = request.POST['new_folder_name']
in_folder = request.POST.get('in_folder', '')
code = 0
# Works piss poor with duplicate folder titles, if they are both in the same folder.
# renames all, but only in the same folder parent. But nobody should be doing that, right?
if folder_to_rename and new_folder_name:
user_sub_folders = get_object_or_404(UserSubscriptionFolders, user=request.user)
user_sub_folders.rename_folder(folder_to_rename, new_folder_name, in_folder)
code = 1
else:
code = -1
return dict(code=code)
@ajax_login_required
@json.json_view
def move_feed_to_folders(request):
feed_id = int(request.POST['feed_id'])
in_folders = request.POST.getlist('in_folders', '')
to_folders = request.POST.getlist('to_folders', '')
user_sub_folders = get_object_or_404(UserSubscriptionFolders, user=request.user)
user_sub_folders = user_sub_folders.move_feed_to_folders(feed_id, in_folders=in_folders,
to_folders=to_folders)
r = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL)
r.publish(request.user.username, 'reload:feeds')
return dict(code=1, folders=json.decode(user_sub_folders.folders))
@ajax_login_required
@json.json_view
def move_feed_to_folder(request):
feed_id = int(request.POST['feed_id'])
in_folder = request.POST.get('in_folder', '')
to_folder = request.POST.get('to_folder', '')
user_sub_folders = get_object_or_404(UserSubscriptionFolders, user=request.user)
user_sub_folders = user_sub_folders.move_feed_to_folder(feed_id, in_folder=in_folder,
to_folder=to_folder)
r = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL)
r.publish(request.user.username, 'reload:feeds')
return dict(code=1, folders=json.decode(user_sub_folders.folders))
@ajax_login_required
@json.json_view
def move_folder_to_folder(request):
folder_name = request.POST['folder_name']
in_folder = request.POST.get('in_folder', '')
to_folder = request.POST.get('to_folder', '')
user_sub_folders = get_object_or_404(UserSubscriptionFolders, user=request.user)
user_sub_folders = user_sub_folders.move_folder_to_folder(folder_name, in_folder=in_folder, to_folder=to_folder)
r = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL)
r.publish(request.user.username, 'reload:feeds')
return dict(code=1, folders=json.decode(user_sub_folders.folders))
@required_params('feeds_by_folder', 'to_folder')
@ajax_login_required
@json.json_view
def move_feeds_by_folder_to_folder(request):
feeds_by_folder = json.decode(request.POST['feeds_by_folder'])
to_folder = request.POST['to_folder']
new_folder = request.POST.get('new_folder', None)
request.user.profile.send_opml_export_email(reason="You have moved a number of feeds at once, so here's a backup just in case.")
user_sub_folders = get_object_or_404(UserSubscriptionFolders, user=request.user)
if new_folder:
user_sub_folders.add_folder(to_folder, new_folder)
to_folder = new_folder
user_sub_folders = user_sub_folders.move_feeds_by_folder_to_folder(feeds_by_folder, to_folder)
r = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL)
r.publish(request.user.username, 'reload:feeds')
return dict(code=1, folders=json.decode(user_sub_folders.folders))
@login_required
def add_feature(request):
if not request.user.is_staff:
return HttpResponseForbidden()
code = -1
form = FeatureForm(request.POST)
if form.is_valid():
form.save()
code = 1
return HttpResponseRedirect(reverse('index'))
return dict(code=code)
@json.json_view
def load_features(request):
user = get_user(request)
page = max(int(request.REQUEST.get('page', 0)), 0)
logging.user(request, "~FBBrowse features: ~SBPage #%s" % (page+1))
features = Feature.objects.all()[page*3:(page+1)*3+1].values()
features = [{
'description': f['description'],
'date': localtime_for_timezone(f['date'], user.profile.timezone).strftime("%b %d, %Y")
} for f in features]
return features
@ajax_login_required
@json.json_view
def save_feed_order(request):
folders = request.POST.get('folders')
if folders:
# Test that folders can be JSON decoded
folders_list = json.decode(folders)
assert folders_list is not None
logging.user(request, "~FBFeed re-ordering: ~SB%s folders/feeds" % (len(folders_list)))
user_sub_folders = UserSubscriptionFolders.objects.get(user=request.user)
user_sub_folders.folders = folders
user_sub_folders.save()
return {}
@json.json_view
def feeds_trainer(request):
classifiers = []
feed_id = request.REQUEST.get('feed_id')
user = get_user(request)
usersubs = UserSubscription.objects.filter(user=user, active=True)
if feed_id:
feed = get_object_or_404(Feed, pk=feed_id)
usersubs = usersubs.filter(feed=feed)
usersubs = usersubs.select_related('feed').order_by('-feed__stories_last_month')
for us in usersubs:
if (not us.is_trained and us.feed.stories_last_month > 0) or feed_id:
classifier = dict()
classifier['classifiers'] = get_classifiers_for_user(user, feed_id=us.feed.pk)
classifier['feed_id'] = us.feed_id
classifier['stories_last_month'] = us.feed.stories_last_month
classifier['num_subscribers'] = us.feed.num_subscribers
classifier['feed_tags'] = json.decode(us.feed.data.popular_tags) if us.feed.data.popular_tags else []
classifier['feed_authors'] = json.decode(us.feed.data.popular_authors) if us.feed.data.popular_authors else []
classifiers.append(classifier)
user.profile.has_trained_intelligence = True
user.profile.save()
logging.user(user, "~FGLoading Trainer: ~SB%s feeds" % (len(classifiers)))
return classifiers
@ajax_login_required
@json.json_view
def save_feed_chooser(request):
is_premium = request.user.profile.is_premium
approved_feeds = [int(feed_id) for feed_id in request.POST.getlist('approved_feeds') if feed_id]
if not is_premium:
approved_feeds = approved_feeds[:64]
activated = 0
usersubs = UserSubscription.objects.filter(user=request.user)
for sub in usersubs:
try:
if sub.feed_id in approved_feeds:
activated += 1
if not sub.active:
sub.active = True
sub.save()
if sub.feed.active_subscribers <= 0:
sub.feed.count_subscribers()
elif sub.active:
sub.active = False
sub.save()
except Feed.DoesNotExist:
pass
UserSubscription.queue_new_feeds(request.user)
UserSubscription.refresh_stale_feeds(request.user, exclude_new=True)
r = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL)
r.publish(request.user.username, 'reload:feeds')
logging.user(request, "~BB~FW~SBFeed chooser: ~FC%s~SN/~SB%s" % (
activated,
usersubs.count()
))
return {'activated': activated}
@ajax_login_required
def retrain_all_sites(request):
for sub in UserSubscription.objects.filter(user=request.user):
sub.is_trained = False
sub.save()
return feeds_trainer(request)
@login_required
def activate_premium_account(request):
try:
usersubs = UserSubscription.objects.select_related('feed').filter(user=request.user)
for sub in usersubs:
sub.active = True
sub.save()
if sub.feed.premium_subscribers <= 0:
sub.feed.count_subscribers()
sub.feed.schedule_feed_fetch_immediately()
except Exception, e:
subject = "Premium activation failed"
message = "%s -- %s\n\n%s" % (request.user, usersubs, e)
mail_admins(subject, message, fail_silently=True)
request.user.profile.is_premium = True
request.user.profile.save()
return HttpResponseRedirect(reverse('index'))
@login_required
def login_as(request):
if not request.user.is_staff:
logging.user(request, "~SKNON-STAFF LOGGING IN AS ANOTHER USER!")
assert False
return HttpResponseForbidden()
username = request.GET['user']
user = get_object_or_404(User, username__iexact=username)
user.backend = settings.AUTHENTICATION_BACKENDS[0]
login_user(request, user)
return HttpResponseRedirect(reverse('index'))
def iframe_buster(request):
logging.user(request, "~FB~SBiFrame bust!")
return HttpResponse(status=204)
@required_params('story_id', feed_id=int)
@ajax_login_required
@json.json_view
def mark_story_as_starred(request):
return _mark_story_as_starred(request)
@required_params('story_hash')
@ajax_login_required
@json.json_view
def mark_story_hash_as_starred(request):
return _mark_story_as_starred(request)
def _mark_story_as_starred(request):
code = 1
feed_id = int(request.REQUEST.get('feed_id', 0))
story_id = request.REQUEST.get('story_id', None)
story_hash = request.REQUEST.get('story_hash', None)
user_tags = request.REQUEST.getlist('user_tags')
message = ""
if story_hash:
story, _ = MStory.find_story(story_hash=story_hash)
feed_id = story and story.story_feed_id
else:
story, _ = MStory.find_story(story_feed_id=feed_id, story_id=story_id)
if not story:
return {'code': -1, 'message': "Could not find story to save."}
story_db = dict([(k, v) for k, v in story._data.items()
if k is not None and v is not None])
story_db.pop('user_id', None)
story_db.pop('starred_date', None)
story_db.pop('id', None)
story_db.pop('user_tags', None)
now = datetime.datetime.now()
story_values = dict(starred_date=now, user_tags=user_tags, **story_db)
params = dict(story_guid=story.story_guid, user_id=request.user.pk)
starred_story = MStarredStory.objects(**params).limit(1)
created = False
removed_user_tags = []
if not starred_story:
params.update(story_values)
starred_story = MStarredStory.objects.create(**params)
created = True
MActivity.new_starred_story(user_id=request.user.pk,
story_title=story.story_title,
story_feed_id=feed_id,
story_id=starred_story.story_guid)
new_user_tags = user_tags
MStarredStoryCounts.adjust_count(request.user.pk, feed_id=feed_id, amount=1)
else:
starred_story = starred_story[0]
new_user_tags = list(set(user_tags) - set(starred_story.user_tags or []))
removed_user_tags = list(set(starred_story.user_tags or []) - set(user_tags))
starred_story.user_tags = user_tags
starred_story.save()
for tag in new_user_tags:
MStarredStoryCounts.adjust_count(request.user.pk, tag=tag, amount=1)
for tag in removed_user_tags:
MStarredStoryCounts.adjust_count(request.user.pk, tag=tag, amount=-1)
if random.random() < 0.01:
MStarredStoryCounts.schedule_count_tags_for_user(request.user.pk)
MStarredStoryCounts.count_for_user(request.user.pk, total_only=True)
starred_counts, starred_count = MStarredStoryCounts.user_counts(request.user.pk, include_total=True)
if not starred_count and len(starred_counts):
starred_count = MStarredStory.objects(user_id=request.user.pk).count()
if created:
logging.user(request, "~FCStarring: ~SB%s (~FM~SB%s~FC~SN)" % (story.story_title[:32], starred_story.user_tags))
else:
logging.user(request, "~FCUpdating starred:~SN~FC ~SB%s~SN (~FM~SB%s~FC~SN)" % (story.story_title[:32], starred_story.user_tags))
return {'code': code, 'message': message, 'starred_count': starred_count, 'starred_counts': starred_counts}
@required_params('story_id')
@ajax_login_required
@json.json_view
def mark_story_as_unstarred(request):
return _mark_story_as_unstarred(request)
@required_params('story_hash')
@ajax_login_required
@json.json_view
def mark_story_hash_as_unstarred(request):
return _mark_story_as_unstarred(request)
def _mark_story_as_unstarred(request):
code = 1
story_id = request.POST.get('story_id', None)
story_hash = request.REQUEST.get('story_hash', None)
starred_counts = None
starred_story = None
if story_id:
starred_story = MStarredStory.objects(user_id=request.user.pk, story_guid=story_id)
if not story_id or not starred_story:
starred_story = MStarredStory.objects(user_id=request.user.pk, story_hash=story_hash or story_id)
if starred_story:
starred_story = starred_story[0]
logging.user(request, "~FCUnstarring: ~SB%s" % (starred_story.story_title[:50]))
user_tags = starred_story.user_tags
feed_id = starred_story.story_feed_id
MActivity.remove_starred_story(user_id=request.user.pk,
story_feed_id=starred_story.story_feed_id,
story_id=starred_story.story_guid)
starred_story.user_id = 0
try:
starred_story.save()
except NotUniqueError:
starred_story.delete()
MStarredStoryCounts.adjust_count(request.user.pk, feed_id=feed_id, amount=-1)
for tag in user_tags:
try:
MStarredStoryCounts.adjust_count(request.user.pk, tag=tag, amount=-1)
except MStarredStoryCounts.DoesNotExist:
pass
# MStarredStoryCounts.schedule_count_tags_for_user(request.user.pk)
MStarredStoryCounts.count_for_user(request.user.pk, total_only=True)
starred_counts = MStarredStoryCounts.user_counts(request.user.pk)
else:
code = -1
return {'code': code, 'starred_counts': starred_counts}
@ajax_login_required
@json.json_view
def send_story_email(request):
code = 1
message = 'OK'
story_id = request.POST['story_id']
feed_id = request.POST['feed_id']
to_addresses = request.POST.get('to', '').replace(',', ' ').replace(' ', ' ').strip().split(' ')
from_name = request.POST['from_name']
from_email = request.POST['from_email']
email_cc = is_true(request.POST.get('email_cc', 'true'))
comments = request.POST['comments']
comments = comments[:2048] # Separated due to PyLint
from_address = '[email protected]'
share_user_profile = MSocialProfile.get_user(request.user.pk)
if not to_addresses:
code = -1
message = 'Please provide at least one email address.'
elif not all(email_re.match(to_address) for to_address in to_addresses if to_addresses):
code = -1
message = 'You need to send the email to a valid email address.'
elif not email_re.match(from_email):
code = -1
message = 'You need to provide your email address.'
elif not from_name:
code = -1
message = 'You need to provide your name.'
else:
story, _ = MStory.find_story(feed_id, story_id)
story = Feed.format_story(story, feed_id, text=True)
feed = Feed.get_by_id(story['story_feed_id'])
params = {
"to_addresses": to_addresses,
"from_name": from_name,
"from_email": from_email,
"email_cc": email_cc,
"comments": comments,
"from_address": from_address,
"story": story,
"feed": feed,
"share_user_profile": share_user_profile,
}
text = render_to_string('mail/email_story.txt', params)
html = render_to_string('mail/email_story.xhtml', params)
subject = '%s' % (story['story_title'])
cc = None
if email_cc:
cc = ['%s <%s>' % (from_name, from_email)]
subject = subject.replace('\n', ' ')
msg = EmailMultiAlternatives(subject, text,
from_email='NewsBlur <%s>' % from_address,
to=to_addresses,
cc=cc,
headers={'Reply-To': '%s <%s>' % (from_name, from_email)})
msg.attach_alternative(html, "text/html")
try:
msg.send()
except boto.ses.connection.ResponseError, e:
code = -1
message = "Email error: %s" % str(e)
logging.user(request, '~BMSharing story by email to %s recipient%s: ~FY~SB%s~SN~BM~FY/~SB%s' %
(len(to_addresses), '' if len(to_addresses) == 1 else 's',
story['story_title'][:50], feed and feed.feed_title[:50]))
return {'code': code, 'message': message}
@json.json_view
def load_tutorial(request):
if request.REQUEST.get('finished'):
logging.user(request, '~BY~FW~SBFinishing Tutorial')
return {}
else:
newsblur_feed = Feed.objects.filter(feed_address__icontains='blog.newsblur.com').order_by('-pk')[0]
logging.user(request, '~BY~FW~SBLoading Tutorial')
return {
'newsblur_feed': newsblur_feed.canonical()
}
| mit | 7,578,381,407,372,846,000 | 42.010379 | 137 | 0.597549 | false |
jvasile/withsqlite | withsqlite.py | 1 | 6379 | #!/usr/bin/env python
"""
withsqlite - uses an sqlite db as a back end for a dict-like object,
kind of like shelve but with json and sqlite3.
Copyright 2011-2013 James Vasile
Released under the GNU General Public License, version 3 or later.
See https://www.gnu.org/licenses/gpl-3.0.html for terms.
Repo is at <http://github.com/jvasile/withsqlite>. Patches welcome!
This file was developed as part of planeteria
<http://github.com/jvasile/planeteria>
"""
from __future__ import print_function, unicode_literals
import os
import sqlite3
import time
from contextlib import contextmanager
try:
import simplejson as json
except ImportError:
import json
def to_json(python_object):
if isinstance(python_object, time.struct_time):
return {'__class__': 'time.asctime',
'__value__': time.asctime(python_object)}
return {'__class__': 'basestring',
'__value__': str(python_object)}
class sqlite_db():
"""
Backends a dict on an sqlite db. This class aims to present like a
dict wherever it can.
USE:
import sqlite_db from withsqlite
with sqlite_db("filename") as db:
db['aaa'] = {'test':'ok'}
print db.items()
Specify a table to have one sqlite db hold multiple dicts:
with sqlite_db("filename", table="fruit") as db:
db['citrus'] = ['orange', 'grapefruit']
print db.items()
If you change the dict in any way, its state will differ from the
state of the sqlite database. Changes are committed to disk when you
close the database connection or (if you've set autocommit to True) after
each assignment.
"""
def __init__(self, fname, dir=None, autocommit=False, table="store"):
if dir:
fname = os.path.join(os.path.abspath(os.path.normpath(dir)), fname)
self.fname = '{}.sqlite3'.format(fname)
self.autocommit = autocommit
self.table = table
self.conn = None
self.crsr = None
def _connect(self):
if not self.conn:
if self.autocommit:
self.conn = sqlite3.connect(self.fname, isolation_level=None)
else:
self.conn = sqlite3.connect(self.fname)
if not self.crsr:
self.crsr = self.conn.cursor()
def make_db(self):
conn = sqlite3.connect(self.fname)
crsr = conn.cursor()
qry = "create table if not exists {} (key text unique, val text)"
crsr.execute(qry.format(self.table))
conn.commit()
crsr.close()
def isstring(self, value):
"Check if the value is a string"
try:
return isinstance(value, basestring)
except NameError:
return isinstance(value, str)
def jsonize(self, val):
"If it's just a string, serialize it ourselves"
if self.isstring(val):
return '"{}"'.format(val)
return json.dumps(val, default=to_json, sort_keys=True, indent=3)
def has_key(self, key):
print("W601 .has_key() is deprecated, use 'in'")
return self.__contains__(key)
def keys(self):
"""a.keys() a copy of a's list of keys"""
self.crsr.execute("select key from {}".format(self.table))
return [f[0] for f in self.crsr.fetchall()]
def values(self):
"""a.values() a copy of a's list of values"""
self.crsr.execute("select val from {}".format(self.table))
return [json.loads(f[0]) for f in self.crsr.fetchall()]
def items(self):
"""a.items() a copy of a's list of (key, value) pairs"""
self.crsr.execute("select * from {}".format(self.table))
return [(f[0], json.loads(f[1])) for f in self.crsr.fetchall()]
def get(self, k, x=None):
"""a.get(k[, x]) a[k] if k in a, else x """
try:
return self.__getitem__(k)
except KeyError:
return x
def clear(self):
"""a.clear() remove all items from a"""
self.crsr.execute("delete from {}".format(self.table))
def begin(self):
"Starts the transaction"
if not os.path.exists(self.fname):
self.make_db()
self._connect()
def save(self):
"Ends the transaction"
self.conn.commit()
self.crsr.close()
@contextmanager
def transaction(self):
"""You can use an instance of sqlite_db as follow:
>>> a = sqlite_db("test")
with a.transaction():
a['key'] = 'value'
"""
self.begin()
yield
self.save()
def __enter__(self):
self.begin()
return self
def __exit__(self, type, value, traceback):
self.save()
def __setitem__(self, key, val):
"""a[k] = v set a[k] to v """
try:
if val == self.__getitem__(key):
return
qry = "update or fail {} set val=? where key==?"
self.crsr.execute(qry.format(self.table), [self.jsonize(val), key])
except KeyError:
qry = "insert into {} values (?, ?)"
self.crsr.execute(qry.format(self.table), [key, self.jsonize(val)])
def __getitem__(self, key):
"""a[k] the item of a with key k (1), (10)"""
qry = 'select val from {} where key=?'
self.crsr.execute(qry.format(self.table), [key])
try:
f = self.crsr.fetchone()[0]
except TypeError:
raise KeyError(key)
return json.loads(f)
def __contains__(self, key):
"""k in a True if a has a key k, else False
k not in a Equivalent to not k in a"""
qry = "select COUNT(*) from {} where key=?"
self.crsr.execute(qry.format(self.table), [key])
return self.crsr.fetchone()[0] != 0
def __len__(self):
"""len(a) the number of items in a"""
self.crsr.execute("select COUNT(*) from {}".format(self.table))
return self.crsr.fetchone()[0]
def __delitem__(self, key):
"""del a[k] remove a[k] from a"""
qry = "delete from {} where key=?"
self.crsr.execute(qry.format(self.table), [key])
def __repr__(self):
r = []
for k, v in self.items():
r.append('{!r}: {!r}'.format(k, v))
return '{{{}}}'.format(", ".join(r))
def __iter__(self):
return iter(self.keys())
| gpl-3.0 | 8,648,903,622,903,924,000 | 30.423645 | 79 | 0.56639 | false |
hrahadiant/mini_py_project | minitodolist.py | 1 | 2576 | import os
# initialize a list
task = []
# check current python file path
currentpath = os.path.dirname(os.path.realpath(__file__))
# set name of the file
filename = "my_todo.txt"
# set directory to save the file
filepath = os.path.join(currentpath, filename)
# function for check the file is exist and is empty or not
def open_list():
if os.path.isfile(filepath) == True:
file = open(filepath, 'r')
if os.stat(filepath).st_size > 0:
print("Here's your current todo in your file.")
with file as f:
for line in f:
print(line)
else:
print("Your current todo is empty.")
else:
file = open(filepath, 'w+')
print("File is successfully created on {}.".format(currentpath))
file.close()
save_message()
def save_message():
print("Save another list..")
# function for clear the screen
def clear():
if os.system == "cls":
os.system('cls')
else:
os.system('clear')
# function for show current todo
def show_task():
if len(task) == 0:
print("Your current list is empty.")
else:
print("Here's your current list.")
for listing in task:
print("- {}".format(listing))
def close():
print("Thank you for use this apps.")
exit_question = input("Are you sure? (y/n) ")
if exit_question == "y" :
exit()
elif exit_question == "n":
pass
else:
print("Sorry, your typing is wrong.")
def show_help():
print("Here's a command for using the apps.")
print("type show, to show your current list.")
print("type help, to see this message.")
print("type remove, to remove your selected list.")
print("type save, to save current list into file.")
print("type quit or q, to close the apps")
def welcome_msg():
welcome = print("Welcome to to-do list. Just type and save!")
notif = input("Press enter to continue or q to quit, or help to get help. ")
if notif.lower() == "q":
exit()
elif notif.lower() == "help":
show_help()
else:
open_list()
welcome_msg()
while True:
todo = input("> ")
if todo == "show":
clear()
show_task()
elif todo == "q" or todo == "quit":
close()
elif todo == "help":
clear()
show_help()
continue
elif todo == "remove":
clear()
show_task()
item_to_remove = input("Choose one item you want to remove from the list. ")
idx = int(item_to_remove) - 1
try:
int(item_to_remove)
except TypeError:
print("Only type a number, please.")
except IndexError:
print("the list of index is out of range")
else:
print("{} has been removed from current list.".format(task[idx]))
del task[idx]
continue
elif todo == "save":
pass
else:
task.append(todo)
| apache-2.0 | 7,352,208,570,656,500,000 | 21.206897 | 78 | 0.651009 | false |
SkRobo/Eurobot-2017 | NewCommunication/Sucker/driver.py | 1 | 3816 | from multiprocessing import Process, Queue
from multiprocessing.queues import Queue as QueueType
import serial
from serial.tools import list_ports
from cmd_list import CMD_LIST
import time
from packets import encode_packet, decode_packet
import logging
#<<<<<<< HEAD
#PORT_VID = 1155
#PORT_PID = 22336
#PORT_SNR = '3677346C3034'
#DEVICE_NAME = '/dev/ttyACM0'
#=======
PORT_SNR = '336834593435'
DEVICE_NAME = '/dev/ttyACM0'# 'ACMO'
#>>>>>>> 4ead14e6dbd7bdcaae48a8ba2a886a9ec203a0d3
class DriverException(Exception):
pass
# code is sensitive to the types, If input Float use 1.0 not 1!
# message format:
# {'source': <'fsm' or 'localization'>,
# 'cmd': 'cmd name',
# 'params': [<parameters>]}
# reply format: {'cmd': 'cmd name', 'data': <reply data>}
# You can test it without spawning process:
# >>> from driver import Driver
# >>> d = Driver()
# >>> d.connect()
# >>> d.process_cmd(cmd)
class Driver(Process):
"""Class for communications with STM32. Only one instance can exist at a time.
!!!warning!!! command parameters are type sensitive, use 0.0 not 0 if
parameter must be float
Examples
-------
>>> d = Driver()
>>> # send command in the blocking mode
>>> d.process_cmd('setCoordinates', [0.0, 0.0, 0.0])
"""
def __init__(self,inp_queue,fsm_queue,loc_queue, baudrate=9600, timeout=0.5, device=DEVICE_NAME, connect=True, **kwargs):
super(Driver, self).__init__(**kwargs)
self.device = device
self.port = None
self.baudrate = baudrate
self.timeout = timeout
self.input_cmd_queue = inp_queue
self.output_queues = {'fsm':fsm_queue,'loc':loc_queue}
if connect:
self.connect()
def connect(self):
"""Connect to STM32 using serial port"""
for port in list_ports.comports():
if (port.serial_number == PORT_SNR):
self.device = port.device
break
self.port = serial.Serial(self.device,
baudrate=self.baudrate, timeout=self.timeout)
def close(self):
'''Close serial port'''
self.port.close()
self.port = None
def process_cmd(self, cmd, params=None):
'''Process command in the blocking mode
Parameters
----------
cmd: string
Command name (see CMD_LIST)
params: list, optional
List of command parameters
'''
cmd_id = CMD_LIST[cmd]
packet = encode_packet(cmd_id, params)
logging.debug('data_to_stm:' + ','.join([str(i) for i in packet]))
self.port.write(packet)
data = self.port.read(size=3)
if len(data) != 3:
logging.critical('Couldn\'t read 3 bytes')
return {'cmd':'cylinder staff','data':'ok'}
data = bytearray(data)
data += self.port.read(size=int(data[2]) - 3)
return decode_packet(data)
def register_output(self, name, queue):
'''Register output queue. Must be called before run()'''
if not isinstance(queue, QueueType):
raise TypeError('Wrong type for queue')
self.output_queues[name] = queue
def run(self):
#{'source':'fsm','cmd':'SetCoordinates','params':[0,0,0]}
try:
while True:
cmd = self.input_cmd_queue.get()
if cmd is None:
break
source = cmd.get('source')
reply = self.process_cmd(cmd.get('cmd'),cmd.get('params'))
output_queue = self.output_queues.get(source)
if output_queue is not None:
output_queue.put(reply)
else:
raise DriverException('Incorrect source')
finally:
self.close()
| mit | 2,843,826,998,180,754,400 | 31.615385 | 125 | 0.57521 | false |
hernan-erasmo/project-euler | soluciones/problema13.py | 1 | 5374 | string_grande = """37107287533902102798797998220837590246510135740250
46376937677490009712648124896970078050417018260538
74324986199524741059474233309513058123726617309629
91942213363574161572522430563301811072406154908250
23067588207539346171171980310421047513778063246676
89261670696623633820136378418383684178734361726757
28112879812849979408065481931592621691275889832738
44274228917432520321923589422876796487670272189318
47451445736001306439091167216856844588711603153276
70386486105843025439939619828917593665686757934951
62176457141856560629502157223196586755079324193331
64906352462741904929101432445813822663347944758178
92575867718337217661963751590579239728245598838407
58203565325359399008402633568948830189458628227828
80181199384826282014278194139940567587151170094390
35398664372827112653829987240784473053190104293586
86515506006295864861532075273371959191420517255829
71693888707715466499115593487603532921714970056938
54370070576826684624621495650076471787294438377604
53282654108756828443191190634694037855217779295145
36123272525000296071075082563815656710885258350721
45876576172410976447339110607218265236877223636045
17423706905851860660448207621209813287860733969412
81142660418086830619328460811191061556940512689692
51934325451728388641918047049293215058642563049483
62467221648435076201727918039944693004732956340691
15732444386908125794514089057706229429197107928209
55037687525678773091862540744969844508330393682126
18336384825330154686196124348767681297534375946515
80386287592878490201521685554828717201219257766954
78182833757993103614740356856449095527097864797581
16726320100436897842553539920931837441497806860984
48403098129077791799088218795327364475675590848030
87086987551392711854517078544161852424320693150332
59959406895756536782107074926966537676326235447210
69793950679652694742597709739166693763042633987085
41052684708299085211399427365734116182760315001271
65378607361501080857009149939512557028198746004375
35829035317434717326932123578154982629742552737307
94953759765105305946966067683156574377167401875275
88902802571733229619176668713819931811048770190271
25267680276078003013678680992525463401061632866526
36270218540497705585629946580636237993140746255962
24074486908231174977792365466257246923322810917141
91430288197103288597806669760892938638285025333403
34413065578016127815921815005561868836468420090470
23053081172816430487623791969842487255036638784583
11487696932154902810424020138335124462181441773470
63783299490636259666498587618221225225512486764533
67720186971698544312419572409913959008952310058822
95548255300263520781532296796249481641953868218774
76085327132285723110424803456124867697064507995236
37774242535411291684276865538926205024910326572967
23701913275725675285653248258265463092207058596522
29798860272258331913126375147341994889534765745501
18495701454879288984856827726077713721403798879715
38298203783031473527721580348144513491373226651381
34829543829199918180278916522431027392251122869539
40957953066405232632538044100059654939159879593635
29746152185502371307642255121183693803580388584903
41698116222072977186158236678424689157993532961922
62467957194401269043877107275048102390895523597457
23189706772547915061505504953922979530901129967519
86188088225875314529584099251203829009407770775672
11306739708304724483816533873502340845647058077308
82959174767140363198008187129011875491310547126581
97623331044818386269515456334926366572897563400500
42846280183517070527831839425882145521227251250327
55121603546981200581762165212827652751691296897789
32238195734329339946437501907836945765883352399886
75506164965184775180738168837861091527357929701337
62177842752192623401942399639168044983993173312731
32924185707147349566916674687634660915035914677504
99518671430235219628894890102423325116913619626622
73267460800591547471830798392868535206946944540724
76841822524674417161514036427982273348055556214818
97142617910342598647204516893989422179826088076852
87783646182799346313767754307809363333018982642090
10848802521674670883215120185883543223812876952786
71329612474782464538636993009049310363619763878039
62184073572399794223406235393808339651327408011116
66627891981488087797941876876144230030984490851411
60661826293682836764744779239180335110989069790714
85786944089552990653640447425576083659976645795096
66024396409905389607120198219976047599490197230297
64913982680032973156037120041377903785566085089252
16730939319872750275468906903707539413042652315011
94809377245048795150954100921645863754710598436791
78639167021187492431995700641917969777599028300699
15368713711936614952811305876380278410754449733078
40789923115535562561142322423255033685442488917353
44889911501440648020369068063960672322193204149535
41503128880339536053299340368006977710650566631954
81234880673210146739058568557934581403627822703280
82616570773948327592232845941706525094512325230608
22918802058777319719839450180888072429661980811197
77158542502016545090413245809786882778948721859617
72107838435069186155435662884062257473692284509516
20849603980134001723930671666823555245252804609722
53503534226472524250874054075591789781264330331690"""
def main():
global string_grande
lista_str = string_grande.split('\n')
lista_nros = [int(s) for s in lista_str]
suma = sum(lista_nros)
print "Suma: " + str(suma) + ", primeros 10 digitos: " + str(suma)[:10]
if __name__ == '__main__':
main()
| unlicense | -4,593,026,946,803,485,700 | 46.982143 | 72 | 0.960551 | false |
ChameleonCloud/horizon | openstack_auth/urls.py | 1 | 1556 | # Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from django.conf.urls import url
from django.views import generic
from openstack_auth import utils
from openstack_auth import views
urlpatterns = [
url(r"^login/$", views.login, name='login'),
url(r"^logout/$", views.logout, name='logout'),
url(r'^switch/(?P<tenant_id>[^/]+)/$', views.switch,
name='switch_tenants'),
url(r'^switch_services_region/(?P<region_name>[^/]+)/$',
views.switch_region,
name='switch_services_region'),
url(r'^switch_keystone_provider/(?P<keystone_provider>[^/]+)/$',
views.switch_keystone_provider,
name='switch_keystone_provider'),
]
if utils.allow_expired_passowrd_change():
urlpatterns.append(
url(r'^password/(?P<user_id>[^/]+)/$', views.PasswordView.as_view(),
name='password')
)
if utils.is_websso_enabled():
urlpatterns += [
url(r"^websso/$", views.websso, name='websso'),
url(r"^error/$",
generic.TemplateView.as_view(template_name="403.html"))
]
| apache-2.0 | -2,301,209,320,913,302,500 | 33.577778 | 76 | 0.666452 | false |
ADozois/ML_Challenge | logreg/models/learning/learning_optimization.py | 1 | 2144 | import numpy as np
from logreg.models.feature_computers.prediction_computer import ProbabilityComputerFactory
from logreg.models.cost_computers.cost_computer import CostComputerFactory
class OptimizationType(object):
GRADIENT = "gradient"
class GradientDescent(object):
@classmethod
def compute_gradient(cls, probability_matrix, target_matrix, feature_matrix):
return -(np.dot(feature_matrix.T, (target_matrix - probability_matrix))) / feature_matrix.shape[0]
class UpdateWeights(object):
@staticmethod
def update_weights(weight_matrix, probability_matrix, target_matrix, feature_matrix, learning_rate):
weight_matrix -= learning_rate * GradientDescent.compute_gradient(probability_matrix, target_matrix,
feature_matrix)
return weight_matrix
class Learn(object):
def __init__(self, learning_rate, epoch, cost_threshold, debug):
self.learning_rate = learning_rate
self.epoch = epoch
self.cost_threshold = cost_threshold
self._debug = debug
def learn(self, weight_matrix, target_matrix, feature_matrix):
probability_computer = ProbabilityComputerFactory.create_probability_computer("softmax")
cost_computer = CostComputerFactory.create_cost_computer("neglog")
for epoch in range(0, self.epoch):
probability_matrix = probability_computer.compute_probability(np.dot(feature_matrix, weight_matrix))
cost = cost_computer.compute_cost(target_matrix, probability_matrix)
if self._debug:
print cost
weight_matrix = UpdateWeights.update_weights(weight_matrix, probability_matrix, target_matrix,
feature_matrix, self.learning_rate)
if cost < self.cost_threshold:
return weight_matrix
return weight_matrix
class LearningProcessFactory(object):
@staticmethod
def create_learning_process(learning_rate, epoch, cost_threshold, debug):
return Learn(learning_rate, epoch, cost_threshold, debug)
| mit | 949,555,902,816,901,500 | 42.755102 | 112 | 0.672575 | false |
bospetersen/h2o-3 | h2o-py/tests/testdir_algos/deeplearning/pyunit_cv_cars_mediumDeepLearning.py | 1 | 5439 | import sys
sys.path.insert(1, "../../../")
import h2o, tests
import random
def cv_carsDL(ip,port):
# read in the dataset and construct training set (and validation set)
cars = h2o.import_file(path=h2o.locate("smalldata/junit/cars_20mpg.csv"))
# choose the type model-building exercise (multinomial classification or regression). 0:regression, 1:binomial,
# 2:multinomial
problem = random.sample(range(3),1)[0]
# pick the predictors and the correct response column
predictors = ["displacement","power","weight","acceleration","year"]
if problem == 1 :
response_col = "economy_20mpg"
cars[response_col] = cars[response_col].asfactor()
elif problem == 2 :
response_col = "cylinders"
cars[response_col] = cars[response_col].asfactor()
else :
response_col = "economy"
print "Response column: {0}".format(response_col)
## cross-validation
# 1. basic
dl = h2o.deeplearning(y=cars[response_col], x=cars[predictors], nfolds=random.randint(3,10), fold_assignment="Modulo")
# 2. check that cv metrics are different over repeated "Random" runs
nfolds = random.randint(3,10)
dl1 = h2o.deeplearning(y=cars[response_col], x=cars[predictors], nfolds=nfolds, fold_assignment="Random")
dl2 = h2o.deeplearning(y=cars[response_col], x=cars[predictors], nfolds=nfolds, fold_assignment="Random")
try:
tests.check_models(dl1, dl2, True)
assert False, "Expected models to be different over repeated Random runs"
except AssertionError:
assert True
# 3. folds_column
num_folds = random.randint(2,5)
fold_assignments = h2o.H2OFrame(python_obj=[[random.randint(0,num_folds-1)] for f in range(cars.nrow)])
fold_assignments.setNames(["fold_assignments"])
cars = cars.cbind(fold_assignments)
dl = h2o.deeplearning(y=cars[response_col], x=cars[predictors], training_frame=cars,
fold_column="fold_assignments", keep_cross_validation_predictions=True)
num_cv_models = len(dl._model_json['output']['cross_validation_models'])
assert num_cv_models==num_folds, "Expected {0} cross-validation models, but got " \
"{1}".format(num_folds, num_cv_models)
cv_model1 = h2o.get_model(dl._model_json['output']['cross_validation_models'][0]['name'])
cv_model2 = h2o.get_model(dl._model_json['output']['cross_validation_models'][1]['name'])
assert isinstance(cv_model1, type(dl)), "Expected cross-validation model to be the same model type as the " \
"constructed model, but got {0} and {1}".format(type(cv_model1),type(dl))
assert isinstance(cv_model2, type(dl)), "Expected cross-validation model to be the same model type as the " \
"constructed model, but got {0} and {1}".format(type(cv_model2),type(dl))
# 4. keep_cross_validation_predictions
cv_predictions = dl1._model_json['output']['cross_validation_predictions']
assert cv_predictions is None, "Expected cross-validation predictions to be None, but got {0}".format(cv_predictions)
cv_predictions = dl._model_json['output']['cross_validation_predictions']
assert len(cv_predictions)==num_folds, "Expected the same number of cross-validation predictions " \
"as folds, but got {0}".format(len(cv_predictions))
## boundary cases
# 1. nfolds = number of observations (leave-one-out cross-validation)
dl = h2o.deeplearning(y=cars[response_col], x=cars[predictors], nfolds=cars.nrow, fold_assignment="Modulo")
# 2. nfolds = 0
dl = h2o.deeplearning(y=cars[response_col], x=cars[predictors], nfolds=0)
# 3. cross-validation and regular validation attempted
dl = h2o.deeplearning(y=cars[response_col], x=cars[predictors], nfolds=random.randint(3,10),
validation_y=cars[response_col], validation_x=cars[predictors])
## error cases
# 1. nfolds == 1 or < 0
try:
dl = h2o.deeplearning(y=cars[response_col], x=cars[predictors], nfolds=random.sample([-1,1], 1)[0])
assert False, "Expected model-build to fail when nfolds is 1 or < 0"
except EnvironmentError:
assert True
# 2. more folds than observations
try:
dl = h2o.deeplearning(y=cars[response_col], x=cars[predictors], nfolds=cars.nrow+1, fold_assignment="Modulo")
assert False, "Expected model-build to fail when nfolds > nobs"
except EnvironmentError:
assert True
# 3. fold_column and nfolds both specified
try:
rf = h2o.deeplearning(y=cars[response_col], x=cars[predictors], nfolds=3, fold_column="fold_assignments",
training_frame=cars)
assert False, "Expected model-build to fail when fold_column and nfolds both specified"
except EnvironmentError:
assert True
# # 4. fold_column and fold_assignment both specified
# try:
# rf = h2o.deeplearning(y=cars[response_col], x=cars[predictors], fold_assignment="Random",
# fold_column="fold_assignments", training_frame=cars)
# assert False, "Expected model-build to fail when fold_column and fold_assignment both specified"
# except EnvironmentError:
# assert True
if __name__ == "__main__":
tests.run_test(sys.argv, cv_carsDL)
| apache-2.0 | 6,783,123,054,350,010,000 | 47.5625 | 122 | 0.650303 | false |
psykzz/flask-admin | flask_admin/tests/sqlamodel/test_basic.py | 1 | 24553 | from nose.tools import eq_, ok_, raises
from wtforms import fields
from flask.ext.admin import form
from flask.ext.admin._compat import as_unicode
from flask.ext.admin._compat import iteritems
from flask.ext.admin.contrib.sqla import ModelView
from . import setup
class CustomModelView(ModelView):
def __init__(self, model, session,
name=None, category=None, endpoint=None, url=None,
**kwargs):
for k, v in iteritems(kwargs):
setattr(self, k, v)
super(CustomModelView, self).__init__(model, session, name, category,
endpoint, url)
def create_models(db):
class Model1(db.Model):
def __init__(self, test1=None, test2=None, test3=None, test4=None, bool_field=False):
self.test1 = test1
self.test2 = test2
self.test3 = test3
self.test4 = test4
self.bool_field = bool_field
id = db.Column(db.Integer, primary_key=True)
test1 = db.Column(db.String(20))
test2 = db.Column(db.Unicode(20))
test3 = db.Column(db.Text)
test4 = db.Column(db.UnicodeText)
bool_field = db.Column(db.Boolean)
enum_field = db.Column(db.Enum('model1_v1', 'model1_v1'), nullable=True)
def __str__(self):
return self.test1
class Model2(db.Model):
def __init__(self, string_field=None, int_field=None, bool_field=None, model1=None):
self.string_field = string_field
self.int_field = int_field
self.bool_field = bool_field
self.model1 = model1
id = db.Column(db.Integer, primary_key=True)
string_field = db.Column(db.String)
int_field = db.Column(db.Integer)
bool_field = db.Column(db.Boolean)
enum_field = db.Column(db.Enum('model2_v1', 'model2_v2'), nullable=True)
# Relation
model1_id = db.Column(db.Integer, db.ForeignKey(Model1.id))
model1 = db.relationship(Model1, backref='model2')
db.create_all()
return Model1, Model2
def test_model():
app, db, admin = setup()
Model1, Model2 = create_models(db)
db.create_all()
view = CustomModelView(Model1, db.session)
admin.add_view(view)
eq_(view.model, Model1)
eq_(view.name, 'Model1')
eq_(view.endpoint, 'model1')
eq_(view._primary_key, 'id')
ok_('test1' in view._sortable_columns)
ok_('test2' in view._sortable_columns)
ok_('test3' in view._sortable_columns)
ok_('test4' in view._sortable_columns)
ok_(view._create_form_class is not None)
ok_(view._edit_form_class is not None)
eq_(view._search_supported, False)
eq_(view._filters, None)
# Verify form
eq_(view._create_form_class.test1.field_class, fields.TextField)
eq_(view._create_form_class.test2.field_class, fields.TextField)
eq_(view._create_form_class.test3.field_class, fields.TextAreaField)
eq_(view._create_form_class.test4.field_class, fields.TextAreaField)
# Make some test clients
client = app.test_client()
rv = client.get('/admin/model1/')
eq_(rv.status_code, 200)
rv = client.get('/admin/model1/new/')
eq_(rv.status_code, 200)
rv = client.post('/admin/model1/new/',
data=dict(test1='test1large', test2='test2'))
eq_(rv.status_code, 302)
model = db.session.query(Model1).first()
eq_(model.test1, u'test1large')
eq_(model.test2, u'test2')
eq_(model.test3, u'')
eq_(model.test4, u'')
rv = client.get('/admin/model1/')
eq_(rv.status_code, 200)
ok_(u'test1large' in rv.data.decode('utf-8'))
url = '/admin/model1/edit/?id=%s' % model.id
rv = client.get(url)
eq_(rv.status_code, 200)
rv = client.post(url,
data=dict(test1='test1small', test2='test2large'))
eq_(rv.status_code, 302)
model = db.session.query(Model1).first()
eq_(model.test1, 'test1small')
eq_(model.test2, 'test2large')
eq_(model.test3, '')
eq_(model.test4, '')
url = '/admin/model1/delete/?id=%s' % model.id
rv = client.post(url)
eq_(rv.status_code, 302)
eq_(db.session.query(Model1).count(), 0)
@raises(Exception)
def test_no_pk():
app, db, admin = setup()
class Model(db.Model):
test = db.Column(db.Integer)
view = CustomModelView(Model)
admin.add_view(view)
def test_list_columns():
app, db, admin = setup()
Model1, Model2 = create_models(db)
view = CustomModelView(Model1, db.session,
column_list=['test1', 'test3'],
column_labels=dict(test1='Column1'))
admin.add_view(view)
eq_(len(view._list_columns), 2)
eq_(view._list_columns, [('test1', 'Column1'), ('test3', 'Test3')])
client = app.test_client()
rv = client.get('/admin/model1/')
data = rv.data.decode('utf-8')
ok_('Column1' in data)
ok_('Test2' not in data)
def test_exclude_columns():
app, db, admin = setup()
Model1, Model2 = create_models(db)
view = CustomModelView(
Model1, db.session,
column_exclude_list=['test2', 'test4', 'enum_field']
)
admin.add_view(view)
eq_(
view._list_columns,
[('test1', 'Test1'), ('test3', 'Test3'), ('bool_field', 'Bool Field')]
)
client = app.test_client()
rv = client.get('/admin/model1/')
data = rv.data.decode('utf-8')
ok_('Test1' in data)
ok_('Test2' not in data)
def test_column_searchable_list():
app, db, admin = setup()
Model1, Model2 = create_models(db)
view = CustomModelView(Model1, db.session,
column_searchable_list=['test1', 'test2'])
admin.add_view(view)
eq_(view._search_supported, True)
eq_(len(view._search_fields), 2)
ok_(isinstance(view._search_fields[0], db.Column))
ok_(isinstance(view._search_fields[1], db.Column))
eq_(view._search_fields[0].name, 'test1')
eq_(view._search_fields[1].name, 'test2')
db.session.add(Model1('model1'))
db.session.add(Model1('model2'))
db.session.commit()
client = app.test_client()
rv = client.get('/admin/model1/?search=model1')
data = rv.data.decode('utf-8')
ok_('model1' in data)
ok_('model2' not in data)
def test_column_filters():
app, db, admin = setup()
Model1, Model2 = create_models(db)
view = CustomModelView(
Model1, db.session,
column_filters=['test1']
)
admin.add_view(view)
eq_(len(view._filters), 4)
eq_([(f['index'], f['operation']) for f in view._filter_groups[u'Test1']],
[
(0, u'equals'),
(1, u'not equal'),
(2, u'contains'),
(3, u'not contains')
])
# Test filter that references property
view = CustomModelView(Model2, db.session,
column_filters=['model1'])
eq_([(f['index'], f['operation']) for f in view._filter_groups[u'Model1 / Test1']],
[
(0, u'equals'),
(1, u'not equal'),
(2, u'contains'),
(3, u'not contains')
])
eq_([(f['index'], f['operation']) for f in view._filter_groups[u'Model1 / Test2']],
[
(4, 'equals'),
(5, 'not equal'),
(6, 'contains'),
(7, 'not contains')
])
eq_([(f['index'], f['operation']) for f in view._filter_groups[u'Model1 / Test3']],
[
(8, u'equals'),
(9, u'not equal'),
(10, u'contains'),
(11, u'not contains')
])
eq_([(f['index'], f['operation']) for f in view._filter_groups[u'Model1 / Test4']],
[
(12, u'equals'),
(13, u'not equal'),
(14, u'contains'),
(15, u'not contains')
])
eq_([(f['index'], f['operation']) for f in view._filter_groups[u'Model1 / Bool Field']],
[
(16, u'equals'),
(17, u'not equal'),
])
eq_([(f['index'], f['operation']) for f in view._filter_groups[u'Model1 / Enum Field']],
[
(18, u'equals'),
(19, u'not equal'),
])
# Test filter with a dot
view = CustomModelView(Model2, db.session,
column_filters=['model1.bool_field'])
eq_([(f['index'], f['operation']) for f in view._filter_groups[u'Model1 / Bool Field']],
[
(0, 'equals'),
(1, 'not equal'),
])
# Fill DB
model1_obj1 = Model1('model1_obj1', bool_field=True)
model1_obj2 = Model1('model1_obj2')
model1_obj3 = Model1('model1_obj3')
model1_obj4 = Model1('model1_obj4')
model2_obj1 = Model2('model2_obj1', model1=model1_obj1)
model2_obj2 = Model2('model2_obj2', model1=model1_obj1)
model2_obj3 = Model2('model2_obj3')
model2_obj4 = Model2('model2_obj4')
db.session.add_all([
model1_obj1, model1_obj2, model1_obj3, model1_obj4,
model2_obj1, model2_obj2, model2_obj3, model2_obj4,
])
db.session.commit()
client = app.test_client()
rv = client.get('/admin/model1/?flt0_0=model1_obj1')
eq_(rv.status_code, 200)
data = rv.data.decode('utf-8')
ok_('model1_obj1' in data)
ok_('model1_obj2' not in data)
rv = client.get('/admin/model1/?flt0_5=model1_obj1')
eq_(rv.status_code, 200)
data = rv.data.decode('utf-8')
ok_('model1_obj1' in data)
ok_('model1_obj2' in data)
# Test different filter types
view = CustomModelView(Model2, db.session,
column_filters=['int_field'])
admin.add_view(view)
eq_([(f['index'], f['operation']) for f in view._filter_groups[u'Int Field']],
[
(0, 'equals'),
(1, 'not equal'),
(2, 'greater than'),
(3, 'smaller than')
])
# Test filters to joined table field
view = CustomModelView(
Model2, db.session,
endpoint='_model2',
column_filters=['model1.bool_field'],
column_list=[
'string_field',
'model1.id',
'model1.bool_field',
]
)
admin.add_view(view)
rv = client.get('/admin/_model2/?flt1_0=1')
eq_(rv.status_code, 200)
data = rv.data.decode('utf-8')
ok_('model2_obj1' in data)
ok_('model2_obj2' in data)
ok_('model2_obj3' not in data)
ok_('model2_obj4' not in data)
# Test human readable URLs
view = CustomModelView(
Model1, db.session,
column_filters=['test1'],
endpoint='_model3',
named_filter_urls=True
)
admin.add_view(view)
rv = client.get('/admin/_model3/?flt1_test1_equals=model1_obj1')
eq_(rv.status_code, 200)
data = rv.data.decode('utf-8')
ok_('model1_obj1' in data)
ok_('model1_obj2' not in data)
def test_url_args():
app, db, admin = setup()
Model1, Model2 = create_models(db)
view = CustomModelView(Model1, db.session,
page_size=2,
column_searchable_list=['test1'],
column_filters=['test1'])
admin.add_view(view)
db.session.add(Model1('data1'))
db.session.add(Model1('data2'))
db.session.add(Model1('data3'))
db.session.add(Model1('data4'))
db.session.commit()
client = app.test_client()
rv = client.get('/admin/model1/')
data = rv.data.decode('utf-8')
ok_('data1' in data)
ok_('data3' not in data)
# page
rv = client.get('/admin/model1/?page=1')
data = rv.data.decode('utf-8')
ok_('data1' not in data)
ok_('data3' in data)
# sort
rv = client.get('/admin/model1/?sort=0&desc=1')
data = rv.data.decode('utf-8')
ok_('data1' not in data)
ok_('data3' in data)
ok_('data4' in data)
# search
rv = client.get('/admin/model1/?search=data1')
data = rv.data.decode('utf-8')
ok_('data1' in data)
ok_('data2' not in data)
rv = client.get('/admin/model1/?search=^data1')
data = rv.data.decode('utf-8')
ok_('data2' not in data)
# like
rv = client.get('/admin/model1/?flt0=0&flt0v=data1')
data = rv.data.decode('utf-8')
ok_('data1' in data)
# not like
rv = client.get('/admin/model1/?flt0=1&flt0v=data1')
data = rv.data.decode('utf-8')
ok_('data2' in data)
def test_non_int_pk():
app, db, admin = setup()
class Model(db.Model):
id = db.Column(db.String, primary_key=True)
test = db.Column(db.String)
db.create_all()
view = CustomModelView(Model, db.session, form_columns=['id', 'test'])
admin.add_view(view)
client = app.test_client()
rv = client.get('/admin/model/')
eq_(rv.status_code, 200)
rv = client.post('/admin/model/new/',
data=dict(id='test1', test='test2'))
eq_(rv.status_code, 302)
rv = client.get('/admin/model/')
eq_(rv.status_code, 200)
data = rv.data.decode('utf-8')
ok_('test1' in data)
rv = client.get('/admin/model/edit/?id=test1')
eq_(rv.status_code, 200)
data = rv.data.decode('utf-8')
ok_('test2' in data)
def test_multiple__pk():
# Test multiple primary keys - mix int and string together
app, db, admin = setup()
class Model(db.Model):
id = db.Column(db.Integer, primary_key=True)
id2 = db.Column(db.String(20), primary_key=True)
test = db.Column(db.String)
db.create_all()
view = CustomModelView(Model, db.session, form_columns=['id', 'id2', 'test'])
admin.add_view(view)
client = app.test_client()
rv = client.get('/admin/model/')
eq_(rv.status_code, 200)
rv = client.post('/admin/model/new/',
data=dict(id=1, id2='two', test='test3'))
eq_(rv.status_code, 302)
rv = client.get('/admin/model/')
eq_(rv.status_code, 200)
data = rv.data.decode('utf-8')
ok_('test3' in data)
rv = client.get('/admin/model/edit/?id=1&id=two')
eq_(rv.status_code, 200)
data = rv.data.decode('utf-8')
ok_('test3' in data)
# Correct order is mandatory -> fail here
rv = client.get('/admin/model/edit/?id=two&id=1')
eq_(rv.status_code, 302)
def test_form_columns():
app, db, admin = setup()
class Model(db.Model):
id = db.Column(db.String, primary_key=True)
int_field = db.Column(db.Integer)
datetime_field = db.Column(db.DateTime)
text_field = db.Column(db.UnicodeText)
excluded_column = db.Column(db.String)
class ChildModel(db.Model):
id = db.Column(db.String, primary_key=True)
model_id = db.Column(db.Integer, db.ForeignKey(Model.id))
model = db.relationship(Model, backref='backref')
db.create_all()
view1 = CustomModelView(Model, db.session, endpoint='view1',
form_columns=('int_field', 'text_field'))
view2 = CustomModelView(Model, db.session, endpoint='view2',
form_excluded_columns=('excluded_column',))
view3 = CustomModelView(ChildModel, db.session, endpoint='view3')
form1 = view1.create_form()
form2 = view2.create_form()
form3 = view3.create_form()
ok_('int_field' in form1._fields)
ok_('text_field' in form1._fields)
ok_('datetime_field' not in form1._fields)
ok_('excluded_column' not in form2._fields)
ok_(type(form3.model).__name__ == 'QuerySelectField')
# TODO: form_args
def test_form_override():
app, db, admin = setup()
class Model(db.Model):
id = db.Column(db.String, primary_key=True)
test = db.Column(db.String)
db.create_all()
view1 = CustomModelView(Model, db.session, endpoint='view1')
view2 = CustomModelView(Model, db.session, endpoint='view2', form_overrides=dict(test=fields.FileField))
admin.add_view(view1)
admin.add_view(view2)
eq_(view1._create_form_class.test.field_class, fields.TextField)
eq_(view2._create_form_class.test.field_class, fields.FileField)
def test_form_onetoone():
app, db, admin = setup()
class Model1(db.Model):
id = db.Column(db.Integer, primary_key=True)
test = db.Column(db.String)
class Model2(db.Model):
id = db.Column(db.Integer, primary_key=True)
model1_id = db.Column(db.Integer, db.ForeignKey(Model1.id))
model1 = db.relationship(Model1, backref=db.backref('model2', uselist=False))
db.create_all()
view1 = CustomModelView(Model1, db.session, endpoint='view1')
view2 = CustomModelView(Model2, db.session, endpoint='view2')
admin.add_view(view1)
admin.add_view(view2)
model1 = Model1(test='test')
model2 = Model2(model1=model1)
db.session.add(model1)
db.session.add(model2)
db.session.commit()
eq_(model1.model2, model2)
eq_(model2.model1, model1)
eq_(view1._create_form_class.model2.kwargs['widget'].multiple, False)
eq_(view2._create_form_class.model1.kwargs['widget'].multiple, False)
def test_relations():
# TODO: test relations
pass
def test_on_model_change_delete():
app, db, admin = setup()
Model1, _ = create_models(db)
db.create_all()
class ModelView(CustomModelView):
def on_model_change(self, form, model, is_created):
model.test1 = model.test1.upper()
def on_model_delete(self, model):
self.deleted = True
view = ModelView(Model1, db.session)
admin.add_view(view)
client = app.test_client()
client.post('/admin/model1/new/',
data=dict(test1='test1large', test2='test2'))
model = db.session.query(Model1).first()
eq_(model.test1, 'TEST1LARGE')
url = '/admin/model1/edit/?id=%s' % model.id
client.post(url, data=dict(test1='test1small', test2='test2large'))
model = db.session.query(Model1).first()
eq_(model.test1, 'TEST1SMALL')
url = '/admin/model1/delete/?id=%s' % model.id
client.post(url)
ok_(view.deleted)
def test_multiple_delete():
app, db, admin = setup()
M1, _ = create_models(db)
db.session.add_all([M1('a'), M1('b'), M1('c')])
db.session.commit()
eq_(M1.query.count(), 3)
view = ModelView(M1, db.session)
admin.add_view(view)
client = app.test_client()
rv = client.post('/admin/model1/action/', data=dict(action='delete', rowid=[1, 2, 3]))
eq_(rv.status_code, 302)
eq_(M1.query.count(), 0)
def test_default_sort():
app, db, admin = setup()
M1, _ = create_models(db)
db.session.add_all([M1('c'), M1('b'), M1('a')])
db.session.commit()
eq_(M1.query.count(), 3)
view = CustomModelView(M1, db.session, column_default_sort='test1')
admin.add_view(view)
_, data = view.get_list(0, None, None, None, None)
eq_(len(data), 3)
eq_(data[0].test1, 'a')
eq_(data[1].test1, 'b')
eq_(data[2].test1, 'c')
def test_extra_fields():
app, db, admin = setup()
Model1, _ = create_models(db)
view = CustomModelView(
Model1, db.session,
form_extra_fields={
'extra_field': fields.TextField('Extra Field')
}
)
admin.add_view(view)
client = app.test_client()
rv = client.get('/admin/model1/new/')
eq_(rv.status_code, 200)
# Check presence and order
data = rv.data.decode('utf-8')
ok_('Extra Field' in data)
pos1 = data.find('Extra Field')
pos2 = data.find('Test1')
ok_(pos2 < pos1)
def test_extra_field_order():
app, db, admin = setup()
Model1, _ = create_models(db)
view = CustomModelView(
Model1, db.session,
form_columns=('extra_field', 'test1'),
form_extra_fields={
'extra_field': fields.TextField('Extra Field')
}
)
admin.add_view(view)
client = app.test_client()
rv = client.get('/admin/model1/new/')
eq_(rv.status_code, 200)
# Check presence and order
data = rv.data.decode('utf-8')
pos1 = data.find('Extra Field')
pos2 = data.find('Test1')
ok_(pos2 > pos1)
# TODO: Babel tests
def test_custom_form_base():
app, db, admin = setup()
class TestForm(form.BaseForm):
pass
Model1, _ = create_models(db)
view = CustomModelView(
Model1, db.session,
form_base_class=TestForm
)
admin.add_view(view)
ok_(hasattr(view._create_form_class, 'test1'))
create_form = view.create_form()
ok_(isinstance(create_form, TestForm))
def test_ajax_fk():
app, db, admin = setup()
Model1, Model2 = create_models(db)
view = CustomModelView(
Model2, db.session,
url='view',
form_ajax_refs={
'model1': {
'fields': ('test1', 'test2')
}
}
)
admin.add_view(view)
ok_(u'model1' in view._form_ajax_refs)
model = Model1(u'first')
model2 = Model1(u'foo', u'bar')
db.session.add_all([model, model2])
db.session.commit()
# Check loader
loader = view._form_ajax_refs[u'model1']
mdl = loader.get_one(model.id)
eq_(mdl.test1, model.test1)
items = loader.get_list(u'fir')
eq_(len(items), 1)
eq_(items[0].id, model.id)
items = loader.get_list(u'bar')
eq_(len(items), 1)
eq_(items[0].test1, u'foo')
# Check form generation
form = view.create_form()
eq_(form.model1.__class__.__name__, u'AjaxSelectField')
with app.test_request_context('/admin/view/'):
ok_(u'value=""' not in form.model1())
form.model1.data = model
ok_(u'data-json="[%s, "first"]"' % model.id in form.model1())
ok_(u'value="1"' in form.model1())
# Check querying
client = app.test_client()
req = client.get(u'/admin/view/ajax/lookup/?name=model1&query=foo')
eq_(req.data.decode('utf-8'), u'[[%s, "foo"]]' % model2.id)
# Check submitting
req = client.post('/admin/view/new/', data={u'model1': as_unicode(model.id)})
mdl = db.session.query(Model2).first()
ok_(mdl is not None)
ok_(mdl.model1 is not None)
eq_(mdl.model1.id, model.id)
eq_(mdl.model1.test1, u'first')
def test_ajax_fk_multi():
app, db, admin = setup()
class Model1(db.Model):
__tablename__ = 'model1'
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(20))
def __str__(self):
return self.name
table = db.Table('m2m', db.Model.metadata,
db.Column('model1_id', db.Integer, db.ForeignKey('model1.id')),
db.Column('model2_id', db.Integer, db.ForeignKey('model2.id'))
)
class Model2(db.Model):
__tablename__ = 'model2'
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(20))
model1_id = db.Column(db.Integer(), db.ForeignKey(Model1.id))
model1 = db.relationship(Model1, backref='models2', secondary=table)
db.create_all()
view = CustomModelView(
Model2, db.session,
url='view',
form_ajax_refs={
'model1': {
'fields': ['name']
}
}
)
admin.add_view(view)
ok_(u'model1' in view._form_ajax_refs)
model = Model1(name=u'first')
db.session.add_all([model, Model1(name=u'foo')])
db.session.commit()
# Check form generation
form = view.create_form()
eq_(form.model1.__class__.__name__, u'AjaxSelectMultipleField')
with app.test_request_context('/admin/view/'):
ok_(u'data-json="[]"' in form.model1())
form.model1.data = [model]
ok_(u'data-json="[[1, "first"]]"' in form.model1())
# Check submitting
client = app.test_client()
client.post('/admin/view/new/', data={u'model1': as_unicode(model.id)})
mdl = db.session.query(Model2).first()
ok_(mdl is not None)
ok_(mdl.model1 is not None)
eq_(len(mdl.model1), 1)
def test_safe_redirect():
app, db, admin = setup()
Model1, _ = create_models(db)
db.create_all()
view = CustomModelView(Model1, db.session)
admin.add_view(view)
client = app.test_client()
rv = client.post('/admin/model1/new/?url=http://localhost/admin/model2view/',
data=dict(test1='test1large', test2='test2'))
eq_(rv.status_code, 302)
eq_(rv.location, 'http://localhost/admin/model2view/')
rv = client.post('/admin/model1/new/?url=http://google.com/evil/',
data=dict(test1='test1large', test2='test2'))
eq_(rv.status_code, 302)
eq_(rv.location, 'http://localhost/admin/model1/')
| bsd-3-clause | -4,268,105,280,054,733,300 | 26.464206 | 108 | 0.577608 | false |
awerner/pymime | integration/django_attachmentstore/settings.py | 1 | 5078 | # Django settings for django_attachmentstore project.
import os
ROOT = lambda * x: os.path.normpath(os.path.join(os.path.abspath(os.path.dirname(__file__)), *x))
DEBUG = True
TEMPLATE_DEBUG = DEBUG
ADMINS = (
# ('Your Name', '[email protected]'),
)
MANAGERS = ADMINS
SERVER_EMAIL = "root@localhost"
EMAIL_SUBJECT_PREFIX = "[PyMIME]"
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3', # Add 'postgresql_psycopg2', 'postgresql', 'mysql', 'sqlite3' or 'oracle'.
'NAME': ROOT("sqlite.db"), # Or path to database file if using sqlite3.
'USER': '', # Not used with sqlite3.
'PASSWORD': '', # Not used with sqlite3.
'HOST': '', # Set to empty string for localhost. Not used with sqlite3.
'PORT': '', # Set to empty string for default. Not used with sqlite3.
}
}
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# On Unix systems, a value of None will cause Django to use the same
# timezone as the operating system.
# If running in a Windows environment this must be set to the same as your
# system time zone.
TIME_ZONE = 'Europe/Berlin'
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'en'
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# If you set this to False, Django will not format dates, numbers and
# calendars according to the current locale
USE_L10N = True
# Absolute filesystem path to the directory that will hold user-uploaded files.
# Example: "/home/media/media.lawrence.com/media/"
MEDIA_ROOT = ROOT('media')
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash.
# Examples: "http://media.lawrence.com/media/", "http://example.com/media/"
MEDIA_URL = '/media/'
# Absolute path to the directory static files should be collected to.
# Don't put anything in this directory yourself; store your static files
# in apps' "static/" subdirectories and in STATICFILES_DIRS.
# Example: "/home/media/media.lawrence.com/static/"
STATIC_ROOT = ROOT('static')
# URL prefix for static files.
# Example: "http://media.lawrence.com/static/"
STATIC_URL = '/static/'
# URL prefix for admin static files -- CSS, JavaScript and images.
# Make sure to use a trailing slash.
# Examples: "http://foo.com/static/admin/", "/static/admin/".
ADMIN_MEDIA_PREFIX = '/static/admin/'
# Additional locations of static files
STATICFILES_DIRS = (
# Put strings here, like "/home/html/static" or "C:/www/django/static".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
)
# List of finder classes that know how to find static files in
# various locations.
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
# 'django.contrib.staticfiles.finders.DefaultStorageFinder',
)
# Make this unique, and don't share it with anybody.
SECRET_KEY = '###CHANGEME###'
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
# 'django.template.loaders.eggs.Loader',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
)
ROOT_URLCONF = 'django_attachmentstore.urls'
TEMPLATE_DIRS = (
ROOT("templates"),
# Put strings here, like "/home/html/django_templates" or "C:/www/django/templates".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
)
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.admin',
'django.contrib.admindocs',
'pymime.django_app.pymime_attachmentservice',
)
# A sample logging configuration. The only tangible logging
# performed by this configuration is to send an email to
# the site admins on every HTTP 500 error.
# See http://docs.djangoproject.com/en/dev/topics/logging for
# more details on how to customize your logging configuration.
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'handlers': {
'mail_admins': {
'level': 'ERROR',
'class': 'django.utils.log.AdminEmailHandler'
}
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
}
}
| gpl-3.0 | -4,900,078,274,259,793,000 | 32.853333 | 122 | 0.699882 | false |
dpm76/Bot1 | easy-tracker/playground/Tracking.py | 1 | 2100 | import numpy as np
import cv2
cap = cv2.VideoCapture(0)
startTracking = True
while startTracking:
startTracking = False
# take first frame of the video
_,frame = cap.read()
frame = cv2.flip(frame, 1)
frame = cv2.GaussianBlur(frame, (5, 5), 5)
# setup initial location of window
r,h,c,w = 200,45,300,80 # simply hardcoded the values
track_window = (c,r,w,h)
orig_track = [(c,r),(c+w, r+h)]
# set up the ROI for tracking
roi = frame[r:r+h, c:c+w]
hsv_roi = cv2.cvtColor(roi, cv2.COLOR_BGR2HSV)
mask = cv2.inRange(hsv_roi, np.array((0., 60.,32.)), np.array((180.,255.,255.)))
roi_hist = cv2.calcHist([hsv_roi],[0],mask,[180],[0,180])
cv2.normalize(roi_hist,roi_hist,0,255,cv2.NORM_MINMAX)
roi = cv2.bitwise_and(roi, roi, mask=mask)
cv2.imshow('img1',roi)
# Setup the termination criteria, either 10 iteration or move by atleast 1 pt
term_crit = ( cv2.TERM_CRITERIA_EPS | cv2.TERM_CRITERIA_COUNT, 10, 1 )
while(1):
ret, frame = cap.read()
frame = cv2.flip(frame, 1)
frame = cv2.GaussianBlur(frame, (5, 5), 5)
if ret == True:
hsv = cv2.cvtColor(frame, cv2.COLOR_BGR2HSV)
bp = cv2.calcBackProject([hsv],[0],roi_hist,[0,180],1)
# apply meanshift to get the new location
ret, track_window = cv2.CamShift(bp, track_window, term_crit)
# Draw it on image
pts = cv2.boxPoints(ret)
pts = np.int0(pts)
hsv[:,:,1:2] = 255
output = cv2.cvtColor(hsv, cv2.COLOR_HSV2BGR)
cv2.rectangle(output,orig_track[0], orig_track[1],(255,0,0),2)
cv2.polylines(output,[pts],True, (0,255,255),2)
cv2.imshow('img2',output)
key = cv2.waitKey(1) & 0xFF
if key == ord('q'):
break
elif key == ord('t'):
startTracking = True
break
else:
break
cv2.destroyAllWindows()
cap.release()
| mit | 8,676,742,072,440,708,000 | 29 | 84 | 0.542857 | false |
eqrx/mauzr | mauzr/gui/__init__.py | 1 | 4741 | """ Montoring and control GUI. """
import enum
import pygame # pylint: disable=import-error
__author__ = "Alexander Sowitzki"
class BaseElement:
""" An visible element inside a GUI.
:param location: Center of the element.
:type location: mauzr.gui.Vector
:param size: Size of the element.
:type size: mauzr.gui.Vector
"""
def __init__(self, location, size):
self._location = location
self._size = size
screen = pygame.display.get_surface()
self._rect = pygame.Rect(location.values, size.values)
self._surface = screen.subsurface(self._rect)
self.state_acknowledged = True
def _on_click(self):
""" Called when the element is clicked. """
def on_mouse(self, position):
""" Called on mouse click.
:param position: Location of the cursor when clicked.
:type position: tuple
"""
if self._rect.collidepoint(position):
self._on_click()
def _draw_text(self):
""" Draw text of this element.
Should be overridden by visible mixins.
"""
def _draw_background(self):
""" Draw background of this element.
Should be overridden by visible mixins.
"""
def _draw_foreground(self):
""" Draw foreground of this element.
Should be overridden by visible mixins.
"""
@property
def _color(self):
""" Color of the element as tuple. """
return (150, 0, 0)
def draw(self):
""" Draw the element. """
self._draw_background()
self._draw_foreground()
self._draw_text()
class RectBackgroundMixin:
""" An rectangle element inside a GUI. """
def _draw_background(self):
self._surface.fill(self._color)
class ColorState(enum.Enum):
""" State of :class:`mauzr.gui.ColorStateMixin`.
Each state has a tuple of colors indicating it.
They will be cycled through.
"""
UNKNOWN = ((150, 0, 0),)
""" State is unknown. """
ERROR = ((150, 0, 0), (255, 0, 0))
""" State indicates system error. """
WARNING = ((150, 150, 0), (255, 255, 0))
""" State is undesired. """
INFORMATION = ((0, 150, 0), (0, 255, 0))
""" State is good. """
class ColorStateMixin:
""" Mixin for :class:`mauzr.gui.Element`, adding a color change based
on a configurable state.
:param conditions: Dictionary mapping :class:`mauzr.gui.ColorState` to
functions. The function receives one parameter and should return True
if the value indicates the mapped state.
:type conditions: dict
"""
COLOR_DISPLAY_DURATION = 200
""" Display duration of a single color in milliseconds. """
def __init__(self, conditions):
self._state_conditions = conditions
self._state = ColorState.UNKNOWN
self.state_acknowledged = True
def _update_state(self, value):
""" The conditions functions are called in order of the state
appearance in the state enum. If a function returns True the mapped
state is applied to this mixin.
"""
sc = self._state_conditions
for state in ColorState:
if state in sc and sc[state](value) and state != self._state:
self._state = state
return
@property
def _color(self):
# Take first element as default
i = 0
if not self.state_acknowledged:
# Cycle if not acknowledged
t = pygame.time.get_ticks()
i = t // self.COLOR_DISPLAY_DURATION % len(self._state.value)
return self._state.value[i]
class TextMixin:
""" Mixin for :class:`mauzr.gui.Element`, adding a text label.
:param text: Initial text to display.
:type text: str
:param font_name: Name of the font.
:type font_name: str
:param font_size: Size of the font
:type font_size: int
"""
def __init__(self, text, font_name="Segoe Print", font_size=16):
self._font = pygame.font.SysFont(font_name, font_size)
self._text_offset = self._size // 2
self._current_text = None
self._text_surf = None
self._text_rect = None
self._text = text
@property
def _text(self):
""" Text to display. """
return self._current_text
@_text.setter
def _text(self, text):
""" Set text to display. """
self._current_text = text
self._text_surf = self._font.render(text, 1, (0, 0, 0))
c = self._text_offset.values
self._text_rect = self._text_surf.get_rect(center=c)
def _draw_text(self):
""" Inject text into element. """
self._surface.blit(self._text_surf, self._text_rect)
| agpl-3.0 | -8,324,247,963,371,238,000 | 26.247126 | 75 | 0.591858 | false |
AppEnlight/demo-application | src/appenlight_demo/views/default.py | 1 | 5010 | import random
import decimal
import time
import requests
import logging
from pyramid.view import view_config
from pyramid.httpexceptions import HTTPFound
from datetime import datetime
from ..models import (
User,
Address
)
log = logging.getLogger(__name__)
custom_log = logging.getLogger('other.namespace.logging')
@view_config(route_name='/', renderer='appenlight_demo:templates/index.jinja2')
def home(request):
# its a bit too fast ;-)
time.sleep(0.02)
users = request.dbsession.query(User)
# lets load all our users from database and lazy load
# adresses to illustrate that every access to addresses
# generates new query
for user in users:
for addr in user.addresses:
pass
return {"random": random}
@view_config(route_name='action', match_param="action=exception",
renderer='string')
def test_exception(request):
# do not buffer - send immediately - usually not good setting for production
request.environ['appelight.force_send'] = 1
request.environ['appenlight.tags']['action'] = 'exception_request'
request.environ['appenlight.tags']['somedate'] = datetime.utcnow()
request.environ['appenlight.tags']['price'] = 25.5
request.environ['appenlight.tags']['count'] = random.randint(1, 5)
msg = 'this log entry was sent with exception report {}'
log.warning(msg.format(random.randint(1, 999)))
run_exc = random.randint(1, 4)
dict__that_can_be_inspected = {
"key_integer": 12345,
"some_key_with_string": "Unladen Swallow",
"this_is_a_list": ["a", "b", "cyes", 42]
}
if run_exc == 1:
raise Exception('My custom exception %s' % random.randint(1, 30))
elif run_exc == 2:
decimal.Decimal('nondec')
elif run_exc == 3:
dict__that_can_be_inspected['non_existant']
else:
request.dbsession.execute('SELECT 1.0/0, %s ' % random.randint(1, 30))
raise Foo()
return {}
@view_config(route_name='action', match_param="action=slow_report",
renderer='string')
def test_slow_report(request):
# do not buffer - send immediately - usually not
# good setting for production
request.environ['appenlight.tags']['action'] = 'slow_request'
request.environ['appenlight.tags']['somedate'] = datetime.utcnow()
request.environ['appelight.force_send'] = 1
request.environ['appelight.message'] = "Client marks most timed calls " \
"as slow - for demonstration " \
"purposes"
request.registry.redis_conn.get('test key A')
request.registry.redis_conn.set('testkey', 'ABC')
base_url = request.route_url('/')
result = requests.get(base_url + 'test/slow_endpoint?msg='
'i_simulate_some_external_resource')
some_var_with_result = result.text
users = request.dbsession.query(User)
# lets load all our users from database and lazy load
# adresses to illustrate that every access to addresses
# generates new query
for user in users:
for addr in user.addresses:
pass
request.dbsession.execute(
'SELECT users.id, addresses.id, forums.id, posts.id , '
'1 + :param1 + :param2, :param3 '
'from users join addresses join forums join posts',
{"param1": -1, "param2": 11,
"param3": 'some_string'}).fetchall()
log.info(
'this log entry was sent with slow report %s' % random.randint(1, 999))
return HTTPFound('/')
@view_config(route_name='action', match_param="action=logging",
renderer='string')
def test_logging(request):
# do not buffer - send immediately -
# usually not good setting for production
request.environ['appelight.force_send'] = 1
custom_log.critical('yes life of ' + unichr(960))
custom_log.info('some info entry',
extra={'tag': 'foo', 'count': random.randint(1, 199)})
custom_log.error('and this is custom USER message: %s' % request.POST.get(
'log_message'), extra={'action': 'logging',
'price': random.randint(1, 199)})
custom_log.warning(
'Matched GET /\xc4\x85\xc5\xbc\xc4\x87\xc4\x99'
'\xc4\x99\xc4\x85/fizzbuzz some incorrect encoding here')
return HTTPFound('/')
@view_config(route_name='action', match_param="action=generate_intrusion_log",
renderer='string')
def generate_intrusion_log(request):
request.environ['appelight.force_send'] = 1
custom_log = logging.getLogger('security')
custom_log.critical('breach/fraud attempt',
extra={'action': 'fraud',
'user_id': random.randint(1, 10)})
return HTTPFound('/')
@view_config(route_name='action', match_param="action=slow_endpoint",
renderer='json')
def slow_endpoint(request):
# simulate slow endpoint
time.sleep(0.3)
return {'some_key': 'some_value'}
| bsd-3-clause | 5,970,967,025,054,382,000 | 35.838235 | 80 | 0.627745 | false |
Alex2114/Deb-3df | redeem/gcodes/T0_T1.py | 1 | 1483 | """
GCode T0 and T1
Select currently used extruder tool
Author: Mathieu Monney
email: zittix(at)xwaves(dot)net
Website: http://www.xwaves.net
License: CC BY-SA: http://creativecommons.org/licenses/by-sa/2.0/
"""
from GCodeCommand import GCodeCommand
import logging
class ToolChange(GCodeCommand):
def execute(self, g):
self.printer.path_planner.set_extruder(self.tool_number)
self.printer.current_tool = self.tool_name
def get_description(self):
return "Select currently used extruder tool to be T%s (%s)" % (self.tool_number, self.tool_name)
def is_buffered(self):
return True
def get_test_gcodes(self):
return ["T%s" % (self.tool_number)]
class T0(ToolChange):
def __init__(self, printer):
self.tool_name = "E"
self.tool_number = 0
super(T0, self).__init__(printer)
class T1(ToolChange):
def __init__(self, printer):
self.tool_name = "H"
self.tool_number = 1
super(T1, self).__init__(printer)
class T2(ToolChange):
def __init__(self, printer):
self.tool_name = "A"
self.tool_number = 2
super(T2, self).__init__(printer)
class T3(ToolChange):
def __init__(self, printer):
self.tool_name = "B"
self.tool_number = 3
super(T3, self).__init__(printer)
class T4(ToolChange):
def __init__(self, printer):
self.tool_name = "C"
self.tool_number = 4
super(T4, self).__init__(printer)
| gpl-3.0 | 7,099,931,974,142,642,000 | 25.482143 | 104 | 0.616318 | false |
mahabs/nitro | nssrc/com/citrix/netscaler/nitro/resource/config/vpn/vpnurl.py | 1 | 10823 | #
# Copyright (c) 2008-2015 Citrix Systems, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License")
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from nssrc.com.citrix.netscaler.nitro.resource.base.base_resource import base_resource
from nssrc.com.citrix.netscaler.nitro.resource.base.base_resource import base_response
from nssrc.com.citrix.netscaler.nitro.service.options import options
from nssrc.com.citrix.netscaler.nitro.exception.nitro_exception import nitro_exception
from nssrc.com.citrix.netscaler.nitro.util.nitro_util import nitro_util
class vpnurl(base_resource) :
""" Configuration for VPN URL resource. """
def __init__(self) :
self._urlname = ""
self._linkname = ""
self._actualurl = ""
self._clientlessaccess = ""
self._comment = ""
self.___count = 0
@property
def urlname(self) :
"""Name of the bookmark link.<br/>Minimum length = 1.
"""
try :
return self._urlname
except Exception as e:
raise e
@urlname.setter
def urlname(self, urlname) :
"""Name of the bookmark link.<br/>Minimum length = 1
"""
try :
self._urlname = urlname
except Exception as e:
raise e
@property
def linkname(self) :
"""Description of the bookmark link. The description appears in the Access Interface.<br/>Minimum length = 1.
"""
try :
return self._linkname
except Exception as e:
raise e
@linkname.setter
def linkname(self, linkname) :
"""Description of the bookmark link. The description appears in the Access Interface.<br/>Minimum length = 1
"""
try :
self._linkname = linkname
except Exception as e:
raise e
@property
def actualurl(self) :
"""Web address for the bookmark link.<br/>Minimum length = 1.
"""
try :
return self._actualurl
except Exception as e:
raise e
@actualurl.setter
def actualurl(self, actualurl) :
"""Web address for the bookmark link.<br/>Minimum length = 1
"""
try :
self._actualurl = actualurl
except Exception as e:
raise e
@property
def clientlessaccess(self) :
"""If clientless access to the resource hosting the link is allowed, also use clientless access for the bookmarked web address in the Secure Client Access based session. Allows single sign-on and other HTTP processing on NetScaler Gateway for HTTPS resources.<br/>Default value: OFF<br/>Possible values = ON, OFF.
"""
try :
return self._clientlessaccess
except Exception as e:
raise e
@clientlessaccess.setter
def clientlessaccess(self, clientlessaccess) :
"""If clientless access to the resource hosting the link is allowed, also use clientless access for the bookmarked web address in the Secure Client Access based session. Allows single sign-on and other HTTP processing on NetScaler Gateway for HTTPS resources.<br/>Default value: OFF<br/>Possible values = ON, OFF
"""
try :
self._clientlessaccess = clientlessaccess
except Exception as e:
raise e
@property
def comment(self) :
"""Any comments associated with the bookmark link.
"""
try :
return self._comment
except Exception as e:
raise e
@comment.setter
def comment(self, comment) :
"""Any comments associated with the bookmark link.
"""
try :
self._comment = comment
except Exception as e:
raise e
def _get_nitro_response(self, service, response) :
""" converts nitro response into object and returns the object array in case of get request.
"""
try :
result = service.payload_formatter.string_to_resource(vpnurl_response, response, self.__class__.__name__)
if(result.errorcode != 0) :
if (result.errorcode == 444) :
service.clear_session(self)
if result.severity :
if (result.severity == "ERROR") :
raise nitro_exception(result.errorcode, str(result.message), str(result.severity))
else :
raise nitro_exception(result.errorcode, str(result.message), str(result.severity))
return result.vpnurl
except Exception as e :
raise e
def _get_object_name(self) :
""" Returns the value of object identifier argument
"""
try :
if (self.urlname) :
return str(self.urlname)
return None
except Exception as e :
raise e
@classmethod
def add(cls, client, resource) :
""" Use this API to add vpnurl.
"""
try :
if type(resource) is not list :
addresource = vpnurl()
addresource.urlname = resource.urlname
addresource.linkname = resource.linkname
addresource.actualurl = resource.actualurl
addresource.clientlessaccess = resource.clientlessaccess
addresource.comment = resource.comment
return addresource.add_resource(client)
else :
if (resource and len(resource) > 0) :
addresources = [ vpnurl() for _ in range(len(resource))]
for i in range(len(resource)) :
addresources[i].urlname = resource[i].urlname
addresources[i].linkname = resource[i].linkname
addresources[i].actualurl = resource[i].actualurl
addresources[i].clientlessaccess = resource[i].clientlessaccess
addresources[i].comment = resource[i].comment
result = cls.add_bulk_request(client, addresources)
return result
except Exception as e :
raise e
@classmethod
def delete(cls, client, resource) :
""" Use this API to delete vpnurl.
"""
try :
if type(resource) is not list :
deleteresource = vpnurl()
if type(resource) != type(deleteresource):
deleteresource.urlname = resource
else :
deleteresource.urlname = resource.urlname
return deleteresource.delete_resource(client)
else :
if type(resource[0]) != cls :
if (resource and len(resource) > 0) :
deleteresources = [ vpnurl() for _ in range(len(resource))]
for i in range(len(resource)) :
deleteresources[i].urlname = resource[i]
else :
if (resource and len(resource) > 0) :
deleteresources = [ vpnurl() for _ in range(len(resource))]
for i in range(len(resource)) :
deleteresources[i].urlname = resource[i].urlname
result = cls.delete_bulk_request(client, deleteresources)
return result
except Exception as e :
raise e
@classmethod
def update(cls, client, resource) :
""" Use this API to update vpnurl.
"""
try :
if type(resource) is not list :
updateresource = vpnurl()
updateresource.urlname = resource.urlname
updateresource.linkname = resource.linkname
updateresource.actualurl = resource.actualurl
updateresource.clientlessaccess = resource.clientlessaccess
updateresource.comment = resource.comment
return updateresource.update_resource(client)
else :
if (resource and len(resource) > 0) :
updateresources = [ vpnurl() for _ in range(len(resource))]
for i in range(len(resource)) :
updateresources[i].urlname = resource[i].urlname
updateresources[i].linkname = resource[i].linkname
updateresources[i].actualurl = resource[i].actualurl
updateresources[i].clientlessaccess = resource[i].clientlessaccess
updateresources[i].comment = resource[i].comment
result = cls.update_bulk_request(client, updateresources)
return result
except Exception as e :
raise e
@classmethod
def unset(cls, client, resource, args) :
""" Use this API to unset the properties of vpnurl resource.
Properties that need to be unset are specified in args array.
"""
try :
if type(resource) is not list :
unsetresource = vpnurl()
if type(resource) != type(unsetresource):
unsetresource.urlname = resource
else :
unsetresource.urlname = resource.urlname
return unsetresource.unset_resource(client, args)
else :
if type(resource[0]) != cls :
if (resource and len(resource) > 0) :
unsetresources = [ vpnurl() for _ in range(len(resource))]
for i in range(len(resource)) :
unsetresources[i].urlname = resource[i]
else :
if (resource and len(resource) > 0) :
unsetresources = [ vpnurl() for _ in range(len(resource))]
for i in range(len(resource)) :
unsetresources[i].urlname = resource[i].urlname
result = cls.unset_bulk_request(client, unsetresources, args)
return result
except Exception as e :
raise e
@classmethod
def get(cls, client, name="", option_="") :
""" Use this API to fetch all the vpnurl resources that are configured on netscaler.
"""
try :
if not name :
obj = vpnurl()
response = obj.get_resources(client, option_)
else :
if type(name) != cls :
if type(name) is not list :
obj = vpnurl()
obj.urlname = name
response = obj.get_resource(client, option_)
else :
if name and len(name) > 0 :
response = [vpnurl() for _ in range(len(name))]
obj = [vpnurl() for _ in range(len(name))]
for i in range(len(name)) :
obj[i] = vpnurl()
obj[i].urlname = name[i]
response[i] = obj[i].get_resource(client, option_)
return response
except Exception as e :
raise e
@classmethod
def get_filtered(cls, client, filter_) :
""" Use this API to fetch filtered set of vpnurl resources.
filter string should be in JSON format.eg: "port:80,servicetype:HTTP".
"""
try :
obj = vpnurl()
option_ = options()
option_.filter = filter_
response = obj.getfiltered(client, option_)
return response
except Exception as e :
raise e
@classmethod
def count(cls, client) :
""" Use this API to count the vpnurl resources configured on NetScaler.
"""
try :
obj = vpnurl()
option_ = options()
option_.count = True
response = obj.get_resources(client, option_)
if response :
return response[0].__dict__['___count']
return 0
except Exception as e :
raise e
@classmethod
def count_filtered(cls, client, filter_) :
""" Use this API to count filtered the set of vpnurl resources.
Filter string should be in JSON format.eg: "port:80,servicetype:HTTP".
"""
try :
obj = vpnurl()
option_ = options()
option_.count = True
option_.filter = filter_
response = obj.getfiltered(client, option_)
if response :
return response[0].__dict__['___count']
return 0
except Exception as e :
raise e
class Clientlessaccess:
ON = "ON"
OFF = "OFF"
class vpnurl_response(base_response) :
def __init__(self, length=1) :
self.vpnurl = []
self.errorcode = 0
self.message = ""
self.severity = ""
self.sessionid = ""
self.vpnurl = [vpnurl() for _ in range(length)]
| apache-2.0 | -4,927,869,333,246,552,000 | 29.834758 | 315 | 0.681881 | false |
FedoraScientific/salome-kernel | src/Container/SALOME_ComponentPy.py | 1 | 11883 | #! /usr/bin/env python
# -*- coding: iso-8859-1 -*-
# Copyright (C) 2007-2016 CEA/DEN, EDF R&D, OPEN CASCADE
#
# Copyright (C) 2003-2007 OPEN CASCADE, EADS/CCR, LIP6, CEA/DEN,
# CEDRAT, EDF R&D, LEG, PRINCIPIA R&D, BUREAU VERITAS
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
# See http://www.salome-platform.org/ or email : [email protected]
#
# SALOME Container : implementation of container and engine for Kernel
# File : SALOME_ComponentPy.py
# Author : Paul RASCLE, EDF
# Module : SALOME
# $Header$
## @package SALOME_ComponentPy
# \brief python implementation of component interface for Kernel
#
import os
import sys
import time
import string
import signal
from omniORB import CORBA, PortableServer, any
import Engines, Engines__POA
import Registry
from Utils_Identity import *
from SALOME_NamingServicePy import *
from libNOTIFICATION import *
from SALOME_utilities import *
from thread import *
#=============================================================================
_Sleeping = 0
## define an implementation of the component interface Engines::Component
#
#
class SALOME_ComponentPy_i (Engines__POA.EngineComponent):
_orb = None
_poa = None
_fieldsDict = []
_studyId = -1
#-------------------------------------------------------------------------
def __init__ (self, orb, poa, contID, containerName,
instanceName, interfaceName, notif=False):
# Notif for notification services
# NOT YET IMPLEMENTED
MESSAGE( "SALOME_ComponentPy_i::__init__" + " " + str (containerName) + " " + str(instanceName) + " " + str(interfaceName) )
self._orb = orb
self._poa = poa
self._instanceName = instanceName
self._interfaceName = interfaceName
self._containerName = containerName
self._notif = notif
self._myConnexionToRegistry = 0
self._graphName = ''
self._nodeName = ''
self._serviceName = ''
self._ThreadId = 0
self._StartUsed = 0
self._ThreadCpuUsed = 0
self._Executed = 0
self._contId = contID
naming_service = SALOME_NamingServicePy_i(self._orb)
myMachine=getShortHostName()
Component_path = self._containerName + "/" + self._instanceName
MESSAGE( 'SALOME_ComponentPy_i Register' + str( Component_path ) )
id_o = poa.activate_object(self)
compo_o = poa.id_to_reference(id_o)
naming_service.Register(compo_o, Component_path)
# Add componentinstance to registry
obj = naming_service.Resolve('/Registry')
if obj is None:
MESSAGE( "Registry Reference is invalid" )
else:
regist = obj._narrow(Registry.Components)
if regist is None:
MESSAGE( "Registry Reference is invalid" )
else:
ior = orb.object_to_string(contID)
MESSAGE( ior )
lesInfos = Identity(self._instanceName)
infos = Registry.Infos(lesInfos._name,
lesInfos._pid,
lesInfos._machine,
lesInfos._adip,
lesInfos._uid,
lesInfos._pwname,
int(lesInfos._tc_start),
0,0,0,
lesInfos._cdir,
-1,
ior)
res = regist.add(infos)
self._notifSupplier = NOTIFICATION_Supplier_Swig(instanceName, notif)
#-------------------------------------------------------------------------
def _get_instanceName(self):
MESSAGE( "SALOME_ComponentPy_i::_get_instanceName" )
return self._instanceName
#-------------------------------------------------------------------------
def _get_interfaceName(self):
MESSAGE( "SALOME_ComponentPy_i::_get_interfaceName" )
return self._interfaceName
#-------------------------------------------------------------------------
def ping(self):
MESSAGE( "SALOME_ComponentPy_i::ping() pid " + str(os.getpid()) )
#-------------------------------------------------------------------------
def setProperties(self, dico):
self._fieldsDict = dico
#-------------------------------------------------------------------------
def getProperties(self):
return self._fieldsDict
#-------------------------------------------------------------------------
def destroy(self):
MESSAGE( "SALOME_ComponentPy_i::destroy" )
id = self._poa.servant_to_id(self)
self._poa.deactivate_object(id)
return
#-------------------------------------------------------------------------
def GetContainerRef(self):
MESSAGE( "SALOME_ComponentPy_i::GetContainerRef" )
return self._contId._narrow(Engines.Container)
#-------------------------------------------------------------------------
def beginService(self , serviceName ):
MESSAGE( "Send BeginService notification for " + str(serviceName) + " for graph/node " + str(self._graphName) + " " + str(self._nodeName) )
MESSAGE( "Component instance : " + str ( self._instanceName ) )
self._serviceName = str(serviceName)
self._ThreadId = get_ident()
self._StartUsed = 0
self._StartUsed = self.CpuUsed_impl()
self._ThreadCpuUsed = 0
self._Executed = 1
print "beginService for ",serviceName," Component instance : ",self._instanceName
MESSAGE( "SALOME_ComponentPy_i::beginService _StartUsed " + str( self._ThreadId ) + " " + str( self._StartUsed ) )
for e in self._fieldsDict:
key=e.key
value=any.from_any(e.value)
if isinstance(value,str):
os.environ[key]=value
#-------------------------------------------------------------------------
def endService(self , serviceName ):
MESSAGE( "Send EndService notification for " + str( self._ThreadId ) + " " + str(serviceName) + " for graph/node " + str(self._graphName) + " " + str(self._nodeName) + " CpuUsed " + str( self.CpuUsed_impl() ) )
MESSAGE( "Component instance : " + str(self._instanceName) )
print "endService for",serviceName,"Component instance :",self._instanceName,"Cpu Used:",self.CpuUsed_impl()," (s) "
#-------------------------------------------------------------------------
def sendMessage(self , event_type , message ):
self._notifSupplier.Send(self.graphName(), self.nodeName(), event_type, message)
#-------------------------------------------------------------------------
def Names(self , GraphName , NodeName ):
MESSAGE( "SALOME_ComponentPy_i::Names" + str(GraphName) + str(NodeName) )
self._graphName = GraphName
self._nodeName = NodeName
#-------------------------------------------------------------------------
def graphName(self):
return self._graphName
#-------------------------------------------------------------------------
def nodeName(self):
return self._nodeName
#-------------------------------------------------------------------------
def Killer(self, ThreadId, signum):
#if ThreadId > 0:
#if signum == 0:
#if pthread_cancel(ThreadId): <- from C++
# return 0
#else:
# MESSAGE()
#else:
#if pthread_kill(ThreadId): <- from C++
# return 0
#else:
# MESSAGE()
return 1
#-------------------------------------------------------------------------
def Kill_impl(self):
MESSAGE( "SALOME_ComponentPy_i::Kill_impl" )
RetVal = 0
if self._ThreadId > 0 & self._ThreadId != get_ident():
RetVal = Killer(self._ThreadId,0)
self._ThreadId = 0
return RetVal
#-------------------------------------------------------------------------
def Stop_impl(self):
MESSAGE( "SALOME_ComponentPy_i::Stop_impl" )
RetVal = 0
if self._ThreadId > 0 & self._ThreadId != get_ident():
RetVal = Killer(self._ThreadId,0)
self._ThreadId = 0
return RetVal
#-------------------------------------------------------------------------
def Suspend_impl(self):
MESSAGE( "SALOME_ComponentPy_i::Suspend_impl" )
global _Sleeping
RetVal = 0
if self._ThreadId > 0 & self._ThreadId != get_ident():
if _Sleeping > 0:
return 0
else:
RetVal = Killer(self._ThreadId, signal.SIGINT)
if RetVal > 0:
_Sleeping = 1
return RetVal
#-------------------------------------------------------------------------
def Resume_impl(self):
MESSAGE( "SALOME_ComponentPy_i::Resume_impl" )
global _Sleeping
RetVal = 0
if self._ThreadId > 0 & self._ThreadId != get_ident():
if _Sleeping > 0:
_Sleeping = 0
RetVal = 1
else:
RetVal = 0
return RetVal
#-------------------------------------------------------------------------
def CpuUsed_impl(self):
if ( self._ThreadId | self._Executed ) :
if self._ThreadId == get_ident() :
cpu = time.clock()
self._ThreadCpuUsed = cpu - self._StartUsed
MESSAGE( "SALOME_ComponentPy_i::CpuUsed_impl " + self._serviceName + " " + str( int(cpu) ) + " - " + str( self._StartUsed ) + " = " + str( self._ThreadCpuUsed ) )
return self._ThreadCpuUsed
MESSAGE( "SALOME_ComponentPy_i::CpuUsed_impl " + self._serviceName + " " + str( self._ThreadCpuUsed ) )
return self._ThreadCpuUsed
MESSAGE( "SALOME_ComponentPy_i::CpuUsed_impl self._StartUsed " + self._serviceName + " " + str(self._StartUsed) )
return 0
#-------------------------------------------------------------------------
def DumpPython(self, theStudy, isPublished, isMultiFile):
aBuffer = "\0"
if isMultiFile :
aBuffer = "def RebuildData(theStudy): pass\n\0"
return (aBuffer, 1)
#-------------------------------------------------------------------------
def getStudyId(self):
return self._studyId
#-------------------------------------------------------------------------
def hasObjectInfo(self):
return 0
#-------------------------------------------------------------------------
def getObjectInfo(self, studyId, entry):
return ""
#-------------------------------------------------------------------------
def getVersion(self):
return "" # empty string means "unknown" version
#-------------------------------------------------------------------------
pass # end of SALOME_ComponentPy_i
| lgpl-2.1 | -5,998,506,831,639,814,000 | 35.675926 | 219 | 0.473702 | false |
suninsky/ReceiptOCR | Python/server/bin/painter.py | 1 | 2146 | #!/home/haop/code/ReceiptOCR/Python/server/bin/python2.7
#
# The Python Imaging Library
# $Id$
#
# this demo script illustrates pasting into an already displayed
# photoimage. note that the current version of Tk updates the whole
# image every time we paste, so to get decent performance, we split
# the image into a set of tiles.
#
try:
from tkinter import Tk, Canvas, NW
except ImportError:
from Tkinter import Tk, Canvas, NW
from PIL import Image, ImageTk
import sys
#
# painter widget
class PaintCanvas(Canvas):
def __init__(self, master, image):
Canvas.__init__(self, master, width=image.size[0], height=image.size[1])
# fill the canvas
self.tile = {}
self.tilesize = tilesize = 32
xsize, ysize = image.size
for x in range(0, xsize, tilesize):
for y in range(0, ysize, tilesize):
box = x, y, min(xsize, x+tilesize), min(ysize, y+tilesize)
tile = ImageTk.PhotoImage(image.crop(box))
self.create_image(x, y, image=tile, anchor=NW)
self.tile[(x, y)] = box, tile
self.image = image
self.bind("<B1-Motion>", self.paint)
def paint(self, event):
xy = event.x - 10, event.y - 10, event.x + 10, event.y + 10
im = self.image.crop(xy)
# process the image in some fashion
im = im.convert("L")
self.image.paste(im, xy)
self.repair(xy)
def repair(self, box):
# update canvas
dx = box[0] % self.tilesize
dy = box[1] % self.tilesize
for x in range(box[0]-dx, box[2]+1, self.tilesize):
for y in range(box[1]-dy, box[3]+1, self.tilesize):
try:
xy, tile = self.tile[(x, y)]
tile.paste(self.image.crop(xy))
except KeyError:
pass # outside the image
self.update_idletasks()
#
# main
if len(sys.argv) != 2:
print("Usage: painter file")
sys.exit(1)
root = Tk()
im = Image.open(sys.argv[1])
if im.mode != "RGB":
im = im.convert("RGB")
PaintCanvas(root, im).pack()
root.mainloop()
| mit | -8,268,578,540,794,225,000 | 25.170732 | 80 | 0.576421 | false |
cszc/meethalfway | djangohalfway/halfwayapp/views.py | 1 | 2943 | from django.shortcuts import render
#import googlemaps
import csv
import time
import json
import requests
from django.http import HttpResponse
from django import forms
from . import models
class EnterIDForm(forms.Form):
meeting_id = forms.CharField()
def validate_trip_id(self):
if Meeting.objects.filter(trip_id= meeting_id):
return trip_id
else:
raise forms.ValidationError("Please enter a valid Meeting Trip ID number.")
class AddAddress(forms.ModelForm):
class Meta:
model = models.Address
fields = ["street", "city", "state", "zip_code"]
class AddParticipant(forms.ModelForm):
class Meta:
model = models.Participant
fields = ["transit_mode"]
widgets = {
'transit_mode': forms.Select(),
}
class AddMeeting(forms.ModelForm):
class Meta:
model = models.Meeting
fields = ["business_type"]
widgets = {
'business_type': forms.Select(),
}
def home(request):
if request.method == 'POST':
address = AddAddress(request.POST)
participant = AddParticipant(request.POST)
meeting = AddMeeting(request.POST)
if address.is_valid() and participant.is_valid() and meeting.is_valid():
address_obj = address.save()
part_obj = participant.save()
part_obj.starting_location = address_obj
part_obj.save()
meeting_obj = meeting.save()
meeting_obj.participant_one = part_obj
meeting_obj.trip_id = meeting_obj.random_words()
meeting_obj.save()
c = {
'uniq': meeting_obj.trip_id
}
return render(request,'halfwayapp/response.html',c)
else:
address = AddAddress()
participant = AddParticipant()
meeting = AddMeeting()
c = {
'forms': [address, participant, meeting],
}
return render(request, 'halfwayapp/home.html', c)
def personA(request, address, participant, meeting):
address_obj = address.save()
part_obj = participant.save()
part_obj.starting_location = address_obj
part_obj.save()
meeting_obj = meeting.save()
meeting_obj.participant_one = part_obj
meeting_obj.trip_id = meeting_obj.hash_id()
meeting_obj.save()
c = {
'uniq': meeting_obj.trip_id
}
return render(request,'halfwayapp/response.html',c)
def respond(request):
if request.method == 'POST':
trip_id = GetMeetingID(request.Post)
# address = AddAddress(request.POST)
# participant = AddParticipant(request.POST)
# meeting = AddMeeting(request.POST)
# if address.is_valid() and participant.is_valid() and meeting.is_valid():
# address_obj = address.save()
# part_obj = participant.save()
# part_obj.starting_location = address_obj
# part_obj.save()
# meeting_obj = meeting.save()
# meeting_obj.participant_one = part_obj
# meeting_obj.trip_id = meeting_obj.hash_id()
# meeting_obj.save()
# c = {
# 'uniq': meeting_obj.trip_id
# }
# return render(request,'halfwayapp/response.html',c)
# else:
# address = AddAddress()
# participant = AddParticipant()
# meeting = AddMeeting()
c = {
'forms': [GetMeetingID]
}
| apache-2.0 | -7,919,855,661,507,081,000 | 23.322314 | 78 | 0.691131 | false |
pacogomez/pyvcloud | tests/vcd_ext.py | 1 | 1299 | # VMware vCloud Director Python SDK
# Copyright (c) 2017 VMware, Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import unittest
import yaml
from pyvcloud.vcd.client import Client
from pyvcloud.vcd.client import NSMAP
from pyvcloud.vcd.client import TaskStatus
from pyvcloud.vcd.extension import Extension
from pyvcloud.vcd.test import TestCase
class TestExtension(TestCase):
def test_0001_get_extension(self):
extension = Extension(self.client)
ext_info = extension.get_extension_info(
self.config['vcd']['extension_name'])
assert ext_info
assert ext_info['name'] == self.config['vcd']['extension_name']
assert ext_info['filter_1'].startswith('/api/')
if __name__ == '__main__':
unittest.main()
| apache-2.0 | -2,136,174,322,080,307,700 | 34.108108 | 74 | 0.724403 | false |
alirizakeles/tendenci | tendenci/apps/profiles/management/commands/delete_non_contributors.py | 1 | 3096 | from optparse import make_option
from django.core.management.base import BaseCommand
from django.core.exceptions import ObjectDoesNotExist
class Command(BaseCommand):
"""
Get/Delete list of contributors, non-contributors and total users
"""
option_list = BaseCommand.option_list + (
make_option('-d', '--delete',
action='store_true',
default=False,
help='Delete non contributing users'),
)
def handle(self, *args, **options):
from django.contrib.auth.models import User
contribs = []
try:
from tendenci.apps.directories.models import Directory
for directory in Directory.objects.all():
contribs.append(directory.creator)
contribs.append(directory.owner)
except ImportError:
pass
try:
from tendenci.apps.articles.models import Article
for article in Article.objects.all():
contribs.append(article.creator)
contribs.append(article.owner)
except ImportError:
pass
try:
from tendenci.apps.events.models import Event
for event in Event.objects.all():
contribs.append(event.creator)
contribs.append(event.owner)
except ImportError:
pass
try:
from tendenci.apps.photos.models import Photo
for photo in Photo.objects.all():
contribs.append(photo.creator)
contribs.append(photo.owner)
except ImportError:
pass
try:
from tendenci.apps.pages.models import Page
for page in Page.objects.all():
contribs.append(page.creator)
contribs.append(page.owner)
except ImportError:
pass
try:
from tendenci.apps.news.models import News
for news in News.objects.all():
contribs.append(news.creator)
contribs.append(news.owner)
except ImportError:
pass
contribs = list(set(contribs)) # remove duplicates
slackers = User.objects.exclude(username__in=[c.username for c in contribs if c])
print 'contribs', len(contribs)
print 'slackers', slackers.count()
print 'everyone', User.objects.count()
print 'Pass the -d or --delete fn to delete no contributors'
delete = options['delete']
if delete:
from django.db import connections, DEFAULT_DB_ALIAS, IntegrityError
using = options.get('database', DEFAULT_DB_ALIAS)
connection = connections[using]
cursor = connection.cursor()
cursor.execute('SET FOREIGN_KEY_CHECKS=0;')
for slacker in slackers:
try:
print slacker
slacker.delete()
except IntegrityError as e:
print 'Integrity Error deleting', slacker
cursor.execute('SET FOREIGN_KEY_CHECKS=1;') | gpl-3.0 | 589,413,971,126,053,200 | 31.946809 | 89 | 0.578165 | false |
victorywang80/Maintenance | saltstack/src/tests/integration/states/ssh.py | 1 | 6090 | '''
Test the ssh_known_hosts state
'''
# Import python libs
import os
import shutil
# Import Salt Testing libs
from salttesting import skipIf
from salttesting.helpers import (
destructiveTest,
ensure_in_syspath,
with_system_account
)
ensure_in_syspath('../../')
# Import salt libs
import integration
KNOWN_HOSTS = os.path.join(integration.TMP, 'known_hosts')
GITHUB_FINGERPRINT = '16:27:ac:a5:76:28:2d:36:63:1b:56:4d:eb:df:a6:48'
GITHUB_IP = '207.97.227.239'
class SSHKnownHostsStateTest(integration.ModuleCase,
integration.SaltReturnAssertsMixIn):
'''
Validate the ssh state
'''
def tearDown(self):
if os.path.isfile(KNOWN_HOSTS):
os.remove(KNOWN_HOSTS)
super(SSHKnownHostsStateTest, self).tearDown()
def test_present(self):
'''
ssh_known_hosts.present
'''
kwargs = {
'name': 'github.com',
'user': 'root',
'fingerprint': GITHUB_FINGERPRINT,
'config': KNOWN_HOSTS
}
# test first
ret = self.run_state('ssh_known_hosts.present', test=True, **kwargs)
self.assertSaltNoneReturn(ret)
# save once, new key appears
ret = self.run_state('ssh_known_hosts.present', **kwargs)
try:
self.assertSaltTrueReturn(ret)
except AssertionError as err:
try:
self.assertInSaltComment(
'Unable to receive remote host key', ret
)
self.skipTest('Unable to receive remote host key')
except AssertionError:
# raise initial assertion error
raise err
self.assertSaltStateChangesEqual(
ret, GITHUB_FINGERPRINT, keys=('new', 'fingerprint')
)
# save twice, no changes
ret = self.run_state('ssh_known_hosts.present', **kwargs)
self.assertSaltStateChangesEqual(ret, {})
# test again, nothing is about to be changed
ret = self.run_state('ssh_known_hosts.present', test=True, **kwargs)
self.assertSaltTrueReturn(ret)
# then add a record for IP address
ret = self.run_state('ssh_known_hosts.present',
**dict(kwargs, name=GITHUB_IP))
self.assertSaltStateChangesEqual(
ret, GITHUB_FINGERPRINT, keys=('new', 'fingerprint')
)
# record for every host must be available
ret = self.run_function(
'ssh.get_known_host', ['root', 'github.com'], config=KNOWN_HOSTS
)
try:
self.assertNotIn(ret, ('', None))
except AssertionError:
raise AssertionError(
'Salt return {0!r} is in (\'\', None).'.format(ret)
)
ret = self.run_function(
'ssh.get_known_host', ['root', GITHUB_IP], config=KNOWN_HOSTS
)
try:
self.assertNotIn(ret, ('', None, {}))
except AssertionError:
raise AssertionError(
'Salt return {0!r} is in (\'\', None,'.format(ret) + ' {})'
)
def test_present_fail(self):
# save something wrong
ret = self.run_state(
'ssh_known_hosts.present',
name='github.com',
user='root',
fingerprint='aa:bb:cc:dd',
config=KNOWN_HOSTS
)
self.assertSaltFalseReturn(ret)
def test_absent(self):
'''
ssh_known_hosts.absent
'''
known_hosts = os.path.join(integration.FILES, 'ssh', 'known_hosts')
shutil.copyfile(known_hosts, KNOWN_HOSTS)
if not os.path.isfile(KNOWN_HOSTS):
self.skipTest(
'Unable to copy {0} to {1}'.format(
known_hosts, KNOWN_HOSTS
)
)
kwargs = {'name': 'github.com', 'user': 'root', 'config': KNOWN_HOSTS}
# test first
ret = self.run_state('ssh_known_hosts.absent', test=True, **kwargs)
self.assertSaltNoneReturn(ret)
# remove once, the key is gone
ret = self.run_state('ssh_known_hosts.absent', **kwargs)
self.assertSaltStateChangesEqual(
ret, GITHUB_FINGERPRINT, keys=('old', 'fingerprint')
)
# remove twice, nothing has changed
ret = self.run_state('ssh_known_hosts.absent', **kwargs)
self.assertSaltStateChangesEqual(ret, {})
# test again
ret = self.run_state('ssh_known_hosts.absent', test=True, **kwargs)
self.assertSaltNoneReturn(ret)
class SSHAuthStateTests(integration.ModuleCase,
integration.SaltReturnAssertsMixIn):
@destructiveTest
@skipIf(os.geteuid() != 0, 'you must be root to run this test')
@with_system_account('issue_7409', on_existing='delete', delete=True)
def test_issue_7409_no_linebreaks_between_keys(self, username):
userdetails = self.run_function('user.info', [username])
user_ssh_dir = os.path.join(userdetails['home'], '.ssh')
authorized_keys_file = os.path.join(user_ssh_dir, 'authorized_keys')
ret = self.run_state(
'file.managed',
name=authorized_keys_file,
user=username,
makedirs=True,
# Explicit no ending line break
contents='ssh-rsa AAAAB3NzaC1kc3MAAACBAL0sQ9fJ5bYTEyY== root'
)
ret = self.run_state(
'ssh_auth.present',
name='AAAAB3NzaC1kcQ9J5bYTEyZ==',
enc='ssh-rsa',
user=username,
comment=username
)
self.assertSaltTrueReturn(ret)
self.assertSaltStateChangesEqual(
ret, {'AAAAB3NzaC1kcQ9J5bYTEyZ==': 'New'}
)
self.assertEqual(
open(authorized_keys_file, 'r').read(),
'ssh-rsa AAAAB3NzaC1kc3MAAACBAL0sQ9fJ5bYTEyY== root\n'
'ssh-rsa AAAAB3NzaC1kcQ9J5bYTEyZ== {0}\n'.format(username)
)
if __name__ == '__main__':
from integration import run_tests
run_tests(SSHKnownHostsStateTest)
| apache-2.0 | -317,516,152,801,650,940 | 31.393617 | 78 | 0.569458 | false |
davibe/cerbero | cerbero/bootstrap/linux.py | 1 | 5412 | # cerbero - a multi-platform build system for Open Source software
# Copyright (C) 2012 Andoni Morales Alastruey <[email protected]>
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Library General Public
# License as published by the Free Software Foundation; either
# version 2 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Library General Public License for more details.
#
# You should have received a copy of the GNU Library General Public
# License along with this library; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place - Suite 330,
# Boston, MA 02111-1307, USA.
from cerbero.bootstrap import BootstraperBase
from cerbero.bootstrap.bootstraper import register_bootstraper
from cerbero.config import Platform, Architecture, Distro, DistroVersion
from cerbero.utils import shell
class UnixBootstraper (BootstraperBase):
tool = ''
packages = []
distro_packages = {}
def start(self):
if self.config.distro_packages_install:
packages = self.packages
if self.config.distro_version in self.distro_packages:
packages += self.distro_packages[self.config.distro_version]
shell.call(self.tool % ' '.join(self.packages))
class DebianBootstraper (UnixBootstraper):
tool = 'sudo apt-get install %s'
packages = ['autotools-dev', 'automake', 'autoconf', 'libtool', 'g++',
'autopoint', 'make', 'cmake', 'bison', 'flex', 'yasm',
'pkg-config', 'gtk-doc-tools', 'libxv-dev', 'libx11-dev',
'libpulse-dev', 'python-dev', 'texinfo', 'gettext',
'build-essential', 'pkg-config', 'doxygen', 'curl',
'libxext-dev', 'libxi-dev', 'x11proto-record-dev',
'libxrender-dev', 'libgl1-mesa-dev', 'libxfixes-dev',
'libxdamage-dev', 'libxcomposite-dev', 'libasound2-dev',
'libxml-simple-perl', 'dpkg-dev', 'debhelper',
'build-essential', 'devscripts', 'fakeroot', 'transfig',
'gperf', 'libdbus-glib-1-dev', 'wget', 'glib-networking']
distro_packages = {
DistroVersion.DEBIAN_SQUEEZE: ['libgtk2.0-dev'],
DistroVersion.UBUNTU_MAVERICK: ['libgtk2.0-dev'],
DistroVersion.UBUNTU_LUCID: ['libgtk2.0-dev'],
DistroVersion.UBUNTU_NATTY: ['libgtk2.0-dev'],
DistroVersion.DEBIAN_WHEEZY: ['libgdk-pixbuf2.0-dev'],
DistroVersion.DEBIAN_JESSIE: ['libgdk-pixbuf2.0-dev'],
DistroVersion.UBUNTU_ONEIRIC: ['libgdk-pixbuf2.0-dev'],
DistroVersion.UBUNTU_PRECISE: ['libgdk-pixbuf2.0-dev'],
}
def __init__(self, config):
UnixBootstraper.__init__(self, config)
if self.config.target_platform == Platform.WINDOWS:
if self.config.arch == Architecture.X86_64:
self.packages.append('libc6:i386')
if self.config.distro_version in [DistroVersion.DEBIAN_SQUEEZE,
DistroVersion.UBUNTU_MAVERICK, DistroVersion.UBUNTU_LUCID]:
self.packages.remove('glib-networking')
if self.config.distro_version in [DistroVersion.UBUNTU_LUCID]:
self.packages.remove('autopoint')
class RedHatBootstraper (UnixBootstraper):
tool = 'su -c "yum install %s"'
packages = ['gcc', 'gcc-c++', 'automake', 'autoconf', 'libtool',
'gettext-devel', 'make', 'cmake', 'bison', 'flex', 'yasm',
'pkgconfig', 'gtk-doc', 'curl', 'doxygen', 'texinfo',
'texinfo-tex', 'texlive-dvips', 'docbook-style-xsl',
'transfig', 'intltool', 'rpm-build', 'redhat-rpm-config',
'python-devel', 'libXrender-devel', 'pulseaudio-libs-devel',
'libXv-devel', 'mesa-libGL-devel', 'libXcomposite-devel',
'alsa-lib-devel', 'perl-ExtUtils-MakeMaker', 'libXi-devel',
'perl-XML-Simple', 'gperf', 'gdk-pixbuf2-devel', 'wget',
'docbook-utils-pdf', 'glib-networking', 'help2man','glib2-devel']
def __init__(self, config):
UnixBootstraper.__init__(self, config)
if self.config.target_platform == Platform.WINDOWS:
if self.config.arch == Architecture.X86_64:
self.packages.append('glibc.i686')
class OpenSuseBootstraper (UnixBootstraper):
tool = 'sudo zypper install %s'
packages = ['gcc', 'automake', 'autoconf', 'gcc-c++', 'libtool',
'gettext-tools', 'make', 'cmake', 'bison', 'flex', 'yasm',
'gtk-doc', 'curl', 'doxygen', 'texinfo',
'texlive', 'docbook-xsl-stylesheets',
'transfig', 'intltool', 'patterns-openSUSE-devel_rpm_build',
'python-devel', 'xorg-x11-libXrender-devel', 'libpulse-devel',
'xorg-x11-libXv-devel', 'Mesa-libGL-devel', 'libXcomposite-devel',
'alsa-devel', 'libXi-devel', 'Mesa-devel',
'perl-XML-Simple', 'gperf', 'gdk-pixbuf-devel', 'wget',
'docbook-utils', 'glib-networking']
def register_all():
register_bootstraper(Distro.DEBIAN, DebianBootstraper)
register_bootstraper(Distro.REDHAT, RedHatBootstraper)
register_bootstraper(Distro.SUSE, OpenSuseBootstraper)
| lgpl-2.1 | 1,586,423,981,772,867,800 | 46.473684 | 81 | 0.633038 | false |
hail-is/hail | hail/python/test/hail/matrixtable/test_matrix_table.py | 1 | 69087 | import math
import operator
import random
import pytest
import hail as hl
import hail.expr.aggregators as agg
from hail.utils.java import Env
from hail.utils.misc import new_temp_file
from ..helpers import *
setUpModule = startTestHailContext
tearDownModule = stopTestHailContext
class Tests(unittest.TestCase):
def get_mt(self, min_partitions=None) -> hl.MatrixTable:
return hl.import_vcf(resource("sample.vcf"), min_partitions=min_partitions)
def test_range_count(self):
self.assertEqual(hl.utils.range_matrix_table(7, 13).count(), (7, 13))
def test_row_key_field_show_runs(self):
ds = self.get_mt()
ds.locus.show()
def test_update(self):
mt = self.get_mt()
mt = mt.select_entries(dp=mt.DP, gq=mt.GQ)
self.assertTrue(schema_eq(mt.entry.dtype, hl.tstruct(dp=hl.tint32, gq=hl.tint32)))
def test_annotate(self):
mt = self.get_mt()
mt = mt.annotate_globals(foo=5)
self.assertEqual(mt.globals.dtype, hl.tstruct(foo=hl.tint32))
mt = mt.annotate_rows(x1=agg.count(),
x2=agg.fraction(False),
x3=agg.count_where(True),
x4=mt.info.AC + mt.foo)
mt = mt.annotate_cols(apple=6)
mt = mt.annotate_cols(y1=agg.count(),
y2=agg.fraction(False),
y3=agg.count_where(True),
y4=mt.foo + mt.apple)
expected_schema = hl.tstruct(s=hl.tstr, apple=hl.tint32, y1=hl.tint64, y2=hl.tfloat64, y3=hl.tint64,
y4=hl.tint32)
self.assertTrue(schema_eq(mt.col.dtype, expected_schema),
"expected: " + str(mt.col.dtype) + "\nactual: " + str(expected_schema))
mt = mt.select_entries(z1=mt.x1 + mt.foo,
z2=mt.x1 + mt.y1 + mt.foo)
self.assertTrue(schema_eq(mt.entry.dtype, hl.tstruct(z1=hl.tint64, z2=hl.tint64)))
def test_annotate_globals(self):
mt = hl.utils.range_matrix_table(1, 1)
ht = hl.utils.range_table(1, 1)
data = [
(5, hl.tint, operator.eq),
(float('nan'), hl.tfloat32, lambda x, y: str(x) == str(y)),
(float('inf'), hl.tfloat64, lambda x, y: str(x) == str(y)),
(float('-inf'), hl.tfloat64, lambda x, y: str(x) == str(y)),
(1.111, hl.tfloat64, operator.eq),
([hl.Struct(**{'a': None, 'b': 5}),
hl.Struct(**{'a': 'hello', 'b': 10})], hl.tarray(hl.tstruct(a=hl.tstr, b=hl.tint)), operator.eq)
]
for x, t, f in data:
self.assertTrue(f(hl.eval(mt.annotate_globals(foo=hl.literal(x, t)).foo), x), f"{x}, {t}")
self.assertTrue(f(hl.eval(ht.annotate_globals(foo=hl.literal(x, t)).foo), x), f"{x}, {t}")
def test_head(self):
# no empty partitions
mt1 = hl.utils.range_matrix_table(10, 10)
# empty partitions at front
mt2 = hl.utils.range_matrix_table(20, 10, 20)
mt2 = mt2.filter_rows(mt2.row_idx > 9)
mts = [mt1, mt2]
for mt in mts:
tmp_file = new_temp_file(extension='mt')
mt.write(tmp_file)
mt_readback = hl.read_matrix_table(tmp_file)
for mt_ in [mt, mt_readback]:
assert mt_.head(1).count_rows() == 1
assert mt_.head(1)._force_count_rows() == 1
assert mt_.head(100).count_rows() == 10
assert mt_.head(100)._force_count_rows() == 10
def test_head_cols(self):
mt1 = hl.utils.range_matrix_table(10, 10)
assert mt1.head(1, 2).count() == (1, 2)
assert mt1.head(1, None).count() == (1, 10)
assert mt1.head(None, 1).count() == (10, 1)
def test_tail(self):
# no empty partitions
mt1 = hl.utils.range_matrix_table(10, 10)
# empty partitions at front
mt2 = hl.utils.range_matrix_table(20, 10, 20)
mt2 = mt2.filter_rows(mt2.row_idx > 9)
mts = [mt1, mt2]
for mt in mts:
tmp_file = new_temp_file(extension='mt')
mt.write(tmp_file)
mt_readback = hl.read_matrix_table(tmp_file)
for mt_ in [mt, mt_readback]:
assert mt_.tail(1).count_rows() == 1
assert mt_.tail(1)._force_count_rows() == 1
assert mt_.tail(100).count_rows() == 10
assert mt_.tail(100)._force_count_rows() == 10
def test_tail_cols(self):
mt1 = hl.utils.range_matrix_table(10, 10)
assert mt1.tail(1, 2).count() == (1, 2)
assert mt1.tail(1, None).count() == (1, 10)
assert mt1.tail(None, 1).count() == (10, 1)
def test_tail_entries(self):
mt = hl.utils.range_matrix_table(100, 30)
mt = mt.filter_cols(mt.col_idx != 29)
def tail(*args):
ht = mt.tail(*args).entries()
return ht.aggregate(hl.agg.collect_as_set(hl.tuple([ht.row_idx, ht.col_idx])))
def expected(n, m):
return set((i, j) for i in range(100 - n, 100) for j in range(29 - m, 29))
assert tail(None, 10) == expected(100, 10)
assert tail(30, None) == expected(30, 29)
assert tail(30, 10) == expected(30, 10)
def test_tail_scan(self):
mt = hl.utils.range_matrix_table(30, 40)
mt = mt.annotate_rows(i = hl.scan.count())
mt = mt.annotate_cols(j = hl.scan.count())
mt = mt.tail(10, 11)
ht = mt.entries()
assert ht.aggregate(agg.collect_as_set(hl.tuple([ht.i, ht.j]))) == set(
(i, j) for i in range(20, 30) for j in range(29, 40)
)
def test_filter(self):
mt = self.get_mt()
mt = mt.annotate_globals(foo=5)
mt = mt.annotate_rows(x1=agg.count())
mt = mt.annotate_cols(y1=agg.count())
mt = mt.annotate_entries(z1=mt.DP)
mt = mt.filter_rows((mt.x1 == 5) & (agg.count() == 3) & (mt.foo == 2))
mt = mt.filter_cols((mt.y1 == 5) & (agg.count() == 3) & (mt.foo == 2))
mt = mt.filter_entries((mt.z1 < 5) & (mt.y1 == 3) & (mt.x1 == 5) & (mt.foo == 2))
mt.count_rows()
def test_aggregate(self):
mt = self.get_mt()
mt = mt.annotate_globals(foo=5)
mt = mt.annotate_rows(x1=agg.count())
mt = mt.annotate_cols(y1=agg.count())
mt = mt.annotate_entries(z1=mt.DP)
qv = mt.aggregate_rows(agg.count())
qs = mt.aggregate_cols(agg.count())
qg = mt.aggregate_entries(agg.count())
self.assertIsNotNone(mt.aggregate_entries(hl.agg.take(mt.s, 1)[0]))
self.assertEqual(qv, 346)
self.assertEqual(qs, 100)
self.assertEqual(qg, qv * qs)
qvs = mt.aggregate_rows(hl.Struct(x=agg.collect(mt.locus.contig),
y=agg.collect(mt.x1)))
qss = mt.aggregate_cols(hl.Struct(x=agg.collect(mt.s),
y=agg.collect(mt.y1)))
qgs = mt.aggregate_entries(hl.Struct(x=agg.filter(False, agg.collect(mt.y1)),
y=agg.filter(hl.rand_bool(0.1), agg.collect(mt.GT))))
def test_aggregate_rows_array_agg(self):
mt = hl.utils.range_matrix_table(10, 10)
mt = mt.annotate_rows(maf_flag = hl.empty_array('bool'))
mt.aggregate_rows(hl.agg.array_agg(lambda x: hl.agg.counter(x), mt.maf_flag))
def test_col_agg_no_rows(self):
mt = hl.utils.range_matrix_table(3, 3).filter_rows(False)
mt = mt.annotate_cols(x = hl.agg.count())
assert mt.x.collect() == [0, 0, 0]
def test_col_collect(self):
mt = hl.utils.range_matrix_table(3, 3)
mt.cols().collect()
def test_aggregate_ir(self):
ds = (hl.utils.range_matrix_table(5, 5)
.annotate_globals(g1=5)
.annotate_entries(e1=3))
x = [("col_idx", lambda e: ds.aggregate_cols(e)),
("row_idx", lambda e: ds.aggregate_rows(e))]
for name, f in x:
r = f(hl.struct(x=agg.sum(ds[name]) + ds.g1,
y=agg.filter(ds[name] % 2 != 0, agg.sum(ds[name] + 2)) + ds.g1,
z=agg.sum(ds.g1 + ds[name]) + ds.g1,
mean=agg.mean(ds[name])))
self.assertEqual(convert_struct_to_dict(r), {u'x': 15, u'y': 13, u'z': 40, u'mean': 2.0})
r = f(5)
self.assertEqual(r, 5)
r = f(hl.null(hl.tint32))
self.assertEqual(r, None)
r = f(agg.filter(ds[name] % 2 != 0, agg.sum(ds[name] + 2)) + ds.g1)
self.assertEqual(r, 13)
r = ds.aggregate_entries(agg.filter((ds.row_idx % 2 != 0) & (ds.col_idx % 2 != 0),
agg.sum(ds.e1 + ds.g1 + ds.row_idx + ds.col_idx)) + ds.g1)
self.assertTrue(r, 48)
def test_select_entries(self):
mt = hl.utils.range_matrix_table(10, 10, n_partitions=4)
mt = mt.annotate_entries(a=hl.struct(b=mt.row_idx, c=mt.col_idx), foo=mt.row_idx * 10 + mt.col_idx)
mt = mt.select_entries(mt.a.b, mt.a.c, mt.foo)
mt = mt.annotate_entries(bc=mt.b * 10 + mt.c)
mt_entries = mt.entries()
assert (mt_entries.all(mt_entries.bc == mt_entries.foo))
def test_select_cols(self):
mt = hl.utils.range_matrix_table(3, 5, n_partitions=4)
mt = mt.annotate_entries(e=mt.col_idx * mt.row_idx)
mt = mt.annotate_globals(g=1)
mt = mt.annotate_cols(sum=agg.sum(mt.e + mt.col_idx + mt.row_idx + mt.g) + mt.col_idx + mt.g,
count=agg.count_where(mt.e % 2 == 0),
foo=agg.count())
result = convert_struct_to_dict(mt.cols().collect()[-2])
self.assertEqual(result, {'col_idx': 3, 'sum': 28, 'count': 2, 'foo': 3})
def test_drop(self):
mt = self.get_mt()
mt = mt.annotate_globals(foo=5)
mt = mt.annotate_cols(bar=5)
mt1 = mt.drop('GT', 'info', 'foo', 'bar')
self.assertTrue('foo' not in mt1.globals)
self.assertTrue('info' not in mt1.row)
self.assertTrue('bar' not in mt1.col)
self.assertTrue('GT' not in mt1.entry)
mt1._force_count_rows()
mt2 = mt.drop(mt.GT, mt.info, mt.foo, mt.bar)
self.assertTrue('foo' not in mt2.globals)
self.assertTrue('info' not in mt2.row)
self.assertTrue('bar' not in mt2.col)
self.assertTrue('GT' not in mt2.entry)
mt2._force_count_rows()
def test_explode_rows(self):
mt = hl.utils.range_matrix_table(4, 4)
mt = mt.annotate_entries(e=mt.row_idx * 10 + mt.col_idx)
self.assertTrue(mt.annotate_rows(x=[1]).explode_rows('x').drop('x')._same(mt))
self.assertEqual(mt.annotate_rows(x=hl.empty_array('int')).explode_rows('x').count_rows(), 0)
self.assertEqual(mt.annotate_rows(x=hl.null('array<int>')).explode_rows('x').count_rows(), 0)
self.assertEqual(mt.annotate_rows(x=hl.range(0, mt.row_idx)).explode_rows('x').count_rows(), 6)
mt = mt.annotate_rows(x=hl.struct(y=hl.range(0, mt.row_idx)))
self.assertEqual(mt.explode_rows(mt.x.y).count_rows(), 6)
def test_explode_cols(self):
mt = hl.utils.range_matrix_table(4, 4)
mt = mt.annotate_entries(e=mt.row_idx * 10 + mt.col_idx)
self.assertTrue(mt.annotate_cols(x=[1]).explode_cols('x').drop('x')._same(mt))
self.assertEqual(mt.annotate_cols(x=hl.empty_array('int')).explode_cols('x').count_cols(), 0)
self.assertEqual(mt.annotate_cols(x=hl.null('array<int>')).explode_cols('x').count_cols(), 0)
self.assertEqual(mt.annotate_cols(x=hl.range(0, mt.col_idx)).explode_cols('x').count_cols(), 6)
def test_explode_key_errors(self):
mt = hl.utils.range_matrix_table(1, 1).key_cols_by(a=[1]).key_rows_by(b=[1])
with self.assertRaises(ValueError):
mt.explode_cols('a')
with self.assertRaises(ValueError):
mt.explode_rows('b')
def test_group_by_field_lifetimes(self):
mt = hl.utils.range_matrix_table(3, 3)
mt2 = (mt.group_rows_by(row_idx='100')
.aggregate(x=hl.agg.collect_as_set(mt.row_idx + 5)))
assert mt2.aggregate_entries(hl.agg.all(mt2.x == hl.set({5, 6, 7})))
mt3 = (mt.group_cols_by(col_idx='100')
.aggregate(x=hl.agg.collect_as_set(mt.col_idx + 5)))
assert mt3.aggregate_entries(hl.agg.all(mt3.x == hl.set({5, 6, 7})))
def test_aggregate_cols_by(self):
mt = hl.utils.range_matrix_table(2, 4)
mt = (mt.annotate_cols(group=mt.col_idx < 2)
.annotate_globals(glob=5))
grouped = mt.group_cols_by(mt.group)
result = grouped.aggregate(sum=hl.agg.sum(mt.row_idx * 2 + mt.col_idx + mt.glob) + 3)
expected = (hl.Table.parallelize([
{'row_idx': 0, 'group': True, 'sum': 14},
{'row_idx': 0, 'group': False, 'sum': 18},
{'row_idx': 1, 'group': True, 'sum': 18},
{'row_idx': 1, 'group': False, 'sum': 22}
], hl.tstruct(row_idx=hl.tint, group=hl.tbool, sum=hl.tint64))
.annotate_globals(glob=5)
.key_by('row_idx', 'group'))
self.assertTrue(result.entries()._same(expected))
def test_aggregate_cols_by_init_op(self):
mt = hl.import_vcf(resource('sample.vcf'))
cs = mt.group_cols_by(mt.s).aggregate(cs = hl.agg.call_stats(mt.GT, mt.alleles))
cs._force_count_rows() # should run without error
def test_aggregate_cols_scope_violation(self):
mt = get_dataset()
with pytest.raises(hl.expr.ExpressionException) as exc:
mt.aggregate_cols(hl.agg.filter(False, hl.agg.sum(mt.GT.is_non_ref())))
assert "scope violation" in str(exc.value)
def test_aggregate_rows_by(self):
mt = hl.utils.range_matrix_table(4, 2)
mt = (mt.annotate_rows(group=mt.row_idx < 2)
.annotate_globals(glob=5))
grouped = mt.group_rows_by(mt.group)
result = grouped.aggregate(sum=hl.agg.sum(mt.col_idx * 2 + mt.row_idx + mt.glob) + 3)
expected = (hl.Table.parallelize([
{'col_idx': 0, 'group': True, 'sum': 14},
{'col_idx': 1, 'group': True, 'sum': 18},
{'col_idx': 0, 'group': False, 'sum': 18},
{'col_idx': 1, 'group': False, 'sum': 22}
], hl.tstruct(group=hl.tbool, col_idx=hl.tint, sum=hl.tint64))
.annotate_globals(glob=5)
.key_by('group', 'col_idx'))
self.assertTrue(result.entries()._same(expected))
def test_collect_cols_by_key(self):
mt = hl.utils.range_matrix_table(3, 3)
col_dict = hl.literal({0: [1], 1: [2, 3], 2: [4, 5, 6]})
mt = mt.annotate_cols(foo=col_dict.get(mt.col_idx)) \
.explode_cols('foo')
mt = mt.annotate_entries(bar=mt.row_idx * mt.foo)
grouped = mt.collect_cols_by_key()
self.assertListEqual(grouped.cols().order_by('col_idx').collect(),
[hl.Struct(col_idx=0, foo=[1]),
hl.Struct(col_idx=1, foo=[2, 3]),
hl.Struct(col_idx=2, foo=[4, 5, 6])])
self.assertListEqual(
grouped.entries().select('bar')
.order_by('row_idx', 'col_idx').collect(),
[hl.Struct(row_idx=0, col_idx=0, bar=[0]),
hl.Struct(row_idx=0, col_idx=1, bar=[0, 0]),
hl.Struct(row_idx=0, col_idx=2, bar=[0, 0, 0]),
hl.Struct(row_idx=1, col_idx=0, bar=[1]),
hl.Struct(row_idx=1, col_idx=1, bar=[2, 3]),
hl.Struct(row_idx=1, col_idx=2, bar=[4, 5, 6]),
hl.Struct(row_idx=2, col_idx=0, bar=[2]),
hl.Struct(row_idx=2, col_idx=1, bar=[4, 6]),
hl.Struct(row_idx=2, col_idx=2, bar=[8, 10, 12])])
def test_weird_names(self):
ds = self.get_mt()
exprs = {'a': 5, ' a ': 5, r'\%!^!@#&#&$%#$%': [5], '$': 5, 'ß': 5}
ds.annotate_globals(**exprs)
ds.select_globals(**exprs)
ds.annotate_cols(**exprs)
ds1 = ds.select_cols(**exprs)
ds.annotate_rows(**exprs)
ds2 = ds.select_rows(**exprs)
ds.annotate_entries(**exprs)
ds.select_entries(**exprs)
ds1.explode_cols(r'\%!^!@#&#&$%#$%')
ds1.explode_cols(ds1[r'\%!^!@#&#&$%#$%'])
ds1.group_cols_by(ds1.a).aggregate(**{'*``81': agg.count()})
ds1.drop(r'\%!^!@#&#&$%#$%')
ds1.drop(ds1[r'\%!^!@#&#&$%#$%'])
ds2.explode_rows(r'\%!^!@#&#&$%#$%')
ds2.explode_rows(ds2[r'\%!^!@#&#&$%#$%'])
ds2.group_rows_by(ds2.a).aggregate(**{'*``81': agg.count()})
def test_semi_anti_join_rows(self):
mt = hl.utils.range_matrix_table(10, 3)
ht = hl.utils.range_table(3)
assert mt.semi_join_rows(ht).count() == (3, 3)
assert mt.anti_join_rows(ht).count() == (7, 3)
def test_semi_anti_join_cols(self):
mt = hl.utils.range_matrix_table(3, 10)
ht = hl.utils.range_table(3)
assert mt.semi_join_cols(ht).count() == (3, 3)
assert mt.anti_join_cols(ht).count() == (3, 7)
def test_joins(self):
mt = self.get_mt().select_rows(x1=1, y1=1)
mt2 = mt.select_rows(x2=1, y2=2)
mt2 = mt2.select_cols(c1=1, c2=2)
mt = mt.annotate_rows(y2=mt2.index_rows(mt.row_key).y2)
mt = mt.annotate_cols(c2=mt2.index_cols(mt.s).c2)
mt = mt.annotate_cols(c2=mt2.index_cols(hl.str(mt.s)).c2)
rt = mt.rows()
ct = mt.cols()
mt.annotate_rows(**rt[mt.locus, mt.alleles])
self.assertTrue(rt.all(rt.y2 == 2))
self.assertTrue(ct.all(ct.c2 == 2))
def test_joins_with_key_structs(self):
mt = self.get_mt()
rows = mt.rows()
cols = mt.cols()
self.assertEqual(rows[mt.locus, mt.alleles].take(1), rows[mt.row_key].take(1))
self.assertEqual(cols[mt.s].take(1), cols[mt.col_key].take(1))
self.assertEqual(mt.index_rows(mt.row_key).take(1), mt.index_rows(mt.locus, mt.alleles).take(1))
self.assertEqual(mt.index_cols(mt.col_key).take(1), mt.index_cols(mt.s).take(1))
self.assertEqual(mt[mt.row_key, mt.col_key].take(1), mt[(mt.locus, mt.alleles), mt.s].take(1))
def test_index_keyless(self):
mt = hl.utils.range_matrix_table(3, 3)
with self.assertRaisesRegex(hl.expr.ExpressionException, "MatrixTable row key: *<<<empty key>>>"):
mt.key_rows_by().index_rows(mt.row_idx)
with self.assertRaisesRegex(hl.expr.ExpressionException, "MatrixTable col key: *<<<empty key>>>"):
mt.key_cols_by().index_cols(mt.col_idx)
def test_table_join(self):
ds = self.get_mt()
# test different row schemas
self.assertTrue(ds.union_cols(ds.drop(ds.info))
.count_rows(), 346)
def test_table_product_join(self):
left = hl.utils.range_matrix_table(5, 1)
right = hl.utils.range_table(5)
right = right.annotate(i=hl.range(right.idx + 1, 5)).explode('i').key_by('i')
left = left.annotate_rows(matches=right.index(left.row_key, all_matches=True))
rows = left.rows()
self.assertTrue(rows.all(rows.matches.map(lambda x: x.idx) == hl.range(0, rows.row_idx)))
def test_naive_coalesce(self):
mt = self.get_mt(min_partitions=8)
self.assertEqual(mt.n_partitions(), 8)
repart = mt.naive_coalesce(2)
self.assertTrue(mt._same(repart))
def test_coalesce_with_no_rows(self):
mt = self.get_mt().filter_rows(False)
self.assertEqual(mt.repartition(1).count_rows(), 0)
def test_literals_rebuild(self):
mt = hl.utils.range_matrix_table(1, 1)
mt = mt.annotate_rows(x = hl.cond(hl.literal([1,2,3])[mt.row_idx] < hl.rand_unif(10, 11), mt.globals, hl.struct()))
mt._force_count_rows()
def test_globals_lowering(self):
mt = hl.utils.range_matrix_table(1, 1).annotate_globals(x=1)
lit = hl.literal(hl.utils.Struct(x = 0))
mt.annotate_rows(foo=hl.agg.collect(mt.globals == lit))._force_count_rows()
mt.annotate_cols(foo=hl.agg.collect(mt.globals == lit))._force_count_rows()
mt.filter_rows(mt.globals == lit)._force_count_rows()
mt.filter_cols(mt.globals == lit)._force_count_rows()
mt.filter_entries(mt.globals == lit)._force_count_rows()
(mt.group_rows_by(mt.row_idx)
.aggregate_rows(foo=hl.agg.collect(mt.globals == lit))
.aggregate(bar=hl.agg.collect(mt.globals == lit))
._force_count_rows())
(mt.group_cols_by(mt.col_idx)
.aggregate_cols(foo=hl.agg.collect(mt.globals == lit))
.aggregate(bar=hl.agg.collect(mt.globals == lit))
._force_count_rows())
def test_unions(self):
dataset = hl.import_vcf(resource('sample2.vcf'))
# test union_rows
ds1 = dataset.filter_rows(dataset.locus.position % 2 == 1)
ds2 = dataset.filter_rows(dataset.locus.position % 2 == 0)
datasets = [ds1, ds2]
r1 = ds1.union_rows(ds2)
r2 = hl.MatrixTable.union_rows(*datasets)
self.assertTrue(r1._same(r2))
with self.assertRaises(ValueError):
ds1.filter_cols(ds1.s.endswith('5')).union_rows(ds2)
# test union_cols
ds = dataset.union_cols(dataset).union_cols(dataset)
for s, count in ds.aggregate_cols(agg.counter(ds.s)).items():
self.assertEqual(count, 3)
def test_union_cols_example(self):
joined = hl.import_vcf(resource('joined.vcf'))
left = hl.import_vcf(resource('joinleft.vcf'))
right = hl.import_vcf(resource('joinright.vcf'))
self.assertTrue(left.union_cols(right)._same(joined))
def test_union_cols_distinct(self):
mt = hl.utils.range_matrix_table(10, 10)
mt = mt.key_rows_by(x = mt.row_idx // 2)
assert mt.union_cols(mt).count_rows() == 5
def test_union_cols_outer(self):
r, c = 10, 10
mt = hl.utils.range_matrix_table(2*r, c)
mt = mt.annotate_entries(entry=hl.tuple([mt.row_idx, mt.col_idx]))
mt2 = hl.utils.range_matrix_table(2*r, c)
mt2 = mt2.key_rows_by(row_idx=mt2.row_idx + r)
mt2 = mt2.key_cols_by(col_idx=mt2.col_idx + c)
mt2 = mt2.annotate_entries(entry=hl.tuple([mt2.row_idx, mt2.col_idx]))
expected = hl.utils.range_matrix_table(3*r, 2*c)
missing = hl.null(hl.ttuple(hl.tint, hl.tint))
expected = expected.annotate_entries(entry=hl.cond(
expected.col_idx < c,
hl.cond(expected.row_idx < 2*r, hl.tuple([expected.row_idx, expected.col_idx]), missing),
hl.cond(expected.row_idx >= r, hl.tuple([expected.row_idx, expected.col_idx]), missing)))
assert mt.union_cols(mt2, row_join_type='outer')._same(expected)
def test_union_rows_different_col_schema(self):
mt = hl.utils.range_matrix_table(10, 10)
mt2 = hl.utils.range_matrix_table(10, 10)
mt2 = mt2.annotate_cols(x=mt2.col_idx + 1)
mt2 = mt2.annotate_globals(g="foo")
self.assertEqual(mt.union_rows(mt2).count_rows(), 20)
def test_index(self):
ds = self.get_mt(min_partitions=8)
self.assertEqual(ds.n_partitions(), 8)
ds = ds.add_row_index('rowidx').add_col_index('colidx')
for i, struct in enumerate(ds.cols().select('colidx').collect()):
self.assertEqual(i, struct.colidx)
for i, struct in enumerate(ds.rows().select('rowidx').collect()):
self.assertEqual(i, struct.rowidx)
def test_choose_cols(self):
ds = self.get_mt()
indices = list(range(ds.count_cols()))
random.shuffle(indices)
old_order = ds.key_cols_by()['s'].collect()
self.assertEqual(ds.choose_cols(indices).key_cols_by()['s'].collect(),
[old_order[i] for i in indices])
self.assertEqual(ds.choose_cols(list(range(10))).s.collect(),
old_order[:10])
def test_choose_cols_vs_explode(self):
ds = self.get_mt()
ds2 = ds.annotate_cols(foo=[0, 0]).explode_cols('foo').drop('foo')
self.assertTrue(ds.choose_cols(sorted(list(range(ds.count_cols())) * 2))._same(ds2))
def test_distinct_by_row(self):
orig_mt = hl.utils.range_matrix_table(10, 10)
mt = orig_mt.key_rows_by(row_idx=orig_mt.row_idx // 2)
self.assertTrue(mt.distinct_by_row().count_rows() == 5)
self.assertTrue(orig_mt.union_rows(orig_mt).distinct_by_row()._same(orig_mt))
def test_distinct_by_col(self):
orig_mt = hl.utils.range_matrix_table(10, 10)
mt = orig_mt.key_cols_by(col_idx=orig_mt.col_idx // 2)
self.assertTrue(mt.distinct_by_col().count_cols() == 5)
self.assertTrue(orig_mt.union_cols(orig_mt).distinct_by_col()._same(orig_mt))
def test_aggregation_with_no_aggregators(self):
mt = hl.utils.range_matrix_table(3, 3)
self.assertEqual(mt.group_rows_by(mt.row_idx).aggregate().count_rows(), 3)
self.assertEqual(mt.group_cols_by(mt.col_idx).aggregate().count_cols(), 3)
def test_computed_key_join_1(self):
ds = self.get_mt()
kt = hl.Table.parallelize(
[{'key': 0, 'value': True},
{'key': 1, 'value': False}],
hl.tstruct(key=hl.tint32, value=hl.tbool),
key=['key'])
ds = ds.annotate_rows(key=ds.locus.position % 2)
ds = ds.annotate_rows(value=kt[ds['key']]['value'])
rt = ds.rows()
self.assertTrue(
rt.all(((rt.locus.position % 2) == 0) == rt['value']))
def test_computed_key_join_2(self):
# multiple keys
ds = self.get_mt()
kt = hl.Table.parallelize(
[{'key1': 0, 'key2': 0, 'value': 0},
{'key1': 1, 'key2': 0, 'value': 1},
{'key1': 0, 'key2': 1, 'value': -2},
{'key1': 1, 'key2': 1, 'value': -1}],
hl.tstruct(key1=hl.tint32, key2=hl.tint32, value=hl.tint32),
key=['key1', 'key2'])
ds = ds.annotate_rows(key1=ds.locus.position % 2, key2=ds.info.DP % 2)
ds = ds.annotate_rows(value=kt[ds.key1, ds.key2]['value'])
rt = ds.rows()
self.assertTrue(
rt.all((rt.locus.position % 2) - 2 * (rt.info.DP % 2) == rt['value']))
def test_computed_key_join_3(self):
# duplicate row keys
ds = self.get_mt()
kt = hl.Table.parallelize(
[{'culprit': 'InbreedingCoeff', 'foo': 'bar', 'value': 'IB'}],
hl.tstruct(culprit=hl.tstr, foo=hl.tstr, value=hl.tstr),
key=['culprit', 'foo'])
ds = ds.annotate_rows(
dsfoo='bar',
info=ds.info.annotate(culprit=[ds.info.culprit, "foo"]))
ds = ds.explode_rows(ds.info.culprit)
ds = ds.annotate_rows(value=kt[ds.info.culprit, ds.dsfoo]['value'])
rt = ds.rows()
self.assertTrue(
rt.all(hl.cond(
rt.info.culprit == "InbreedingCoeff",
rt['value'] == "IB",
hl.is_missing(rt['value']))))
def test_interval_join(self):
left = hl.utils.range_matrix_table(50, 1, n_partitions=10)
intervals = hl.utils.range_table(4)
intervals = intervals.key_by(interval=hl.interval(intervals.idx * 10, intervals.idx * 10 + 5))
left = left.annotate_rows(interval_matches=intervals.index(left.row_key))
rows = left.rows()
self.assertTrue(rows.all(hl.case()
.when(rows.row_idx % 10 < 5, rows.interval_matches.idx == rows.row_idx // 10)
.default(hl.is_missing(rows.interval_matches))))
def test_interval_product_join(self):
left = hl.utils.range_matrix_table(50, 1, n_partitions=8)
intervals = hl.utils.range_table(25)
intervals = intervals.key_by(interval=hl.interval(
1 + (intervals.idx // 5) * 10 + (intervals.idx % 5),
(1 + intervals.idx // 5) * 10 - (intervals.idx % 5)))
intervals = intervals.annotate(i=intervals.idx % 5)
left = left.annotate_rows(interval_matches=intervals.index(left.row_key, all_matches=True))
rows = left.rows()
self.assertTrue(rows.all(hl.sorted(rows.interval_matches.map(lambda x: x.i))
== hl.range(0, hl.min(rows.row_idx % 10, 10 - rows.row_idx % 10))))
def test_entry_join_self(self):
mt1 = hl.utils.range_matrix_table(10, 10, n_partitions=4).choose_cols([9, 8, 7, 6, 5, 4, 3, 2, 1, 0])
mt1 = mt1.annotate_entries(x=10 * mt1.row_idx + mt1.col_idx)
self.assertEqual(mt1[mt1.row_idx, mt1.col_idx].dtype, mt1.entry.dtype)
mt_join = mt1.annotate_entries(x2=mt1[mt1.row_idx, mt1.col_idx].x)
mt_join_entries = mt_join.entries()
self.assertTrue(mt_join_entries.all(mt_join_entries.x == mt_join_entries.x2))
def test_entry_join_const(self):
mt1 = hl.utils.range_matrix_table(10, 10, n_partitions=4)
mt1 = mt1.annotate_entries(x=mt1.row_idx + mt1.col_idx)
mt2 = hl.utils.range_matrix_table(1, 1, n_partitions=1)
mt2 = mt2.annotate_entries(foo=10101)
mt_join = mt1.annotate_entries(**mt2[mt1.row_idx // 100, mt1.col_idx // 100])
mt_join_entries = mt_join.entries()
self.assertTrue(mt_join_entries.all(mt_join_entries['foo'] == 10101))
def test_entry_join_missingness(self):
mt1 = hl.utils.range_matrix_table(10, 10, n_partitions=4)
mt1 = mt1.annotate_entries(x=mt1.row_idx + mt1.col_idx)
mt2 = mt1.filter_cols(mt1.col_idx % 2 == 0)
mt2 = mt2.filter_rows(mt2.row_idx % 2 == 0)
mt_join = mt1.annotate_entries(x2=mt2[mt1.row_idx, mt1.col_idx].x * 10)
mt_join_entries = mt_join.entries()
kept = mt_join_entries.filter((mt_join_entries.row_idx % 2 == 0) & (mt_join_entries.col_idx % 2 == 0))
removed = mt_join_entries.filter(~((mt_join_entries.row_idx % 2 == 0) & (mt_join_entries.col_idx % 2 == 0)))
self.assertTrue(kept.all(hl.is_defined(kept.x2) & (kept.x2 == kept.x * 10)))
self.assertTrue(removed.all(hl.is_missing(removed.x2)))
def test_entries_table_length_and_fields(self):
mt = hl.utils.range_matrix_table(10, 10, n_partitions=4)
mt = mt.annotate_entries(x=mt.col_idx + mt.row_idx)
et = mt.entries()
self.assertEqual(et.count(), 100)
self.assertTrue(et.all(et.x == et.col_idx + et.row_idx))
def test_entries_table_no_keys(self):
mt = hl.utils.range_matrix_table(2, 2)
mt = mt.annotate_entries(x = (mt.row_idx, mt.col_idx))
original_order = [
hl.utils.Struct(row_idx=0, col_idx=0, x=(0, 0)),
hl.utils.Struct(row_idx=0, col_idx=1, x=(0, 1)),
hl.utils.Struct(row_idx=1, col_idx=0, x=(1, 0)),
hl.utils.Struct(row_idx=1, col_idx=1, x=(1, 1)),
]
assert mt.entries().collect() == original_order
assert mt.key_cols_by().entries().collect() == original_order
assert mt.key_rows_by().key_cols_by().entries().collect() == original_order
assert mt.key_rows_by().entries().collect() == sorted(original_order, key=lambda x: x.col_idx)
def test_entries_table_with_out_of_order_row_key_fields(self):
mt = hl.utils.range_matrix_table(10, 10, 1)
mt = mt.select_rows(key2=0, key1=mt.row_idx)
mt = mt.key_rows_by(mt.key1, mt.key2)
mt.entries()._force_count()
def test_filter_cols_required_entries(self):
mt1 = hl.utils.range_matrix_table(10, 10, n_partitions=4)
mt1 = mt1.filter_cols(mt1.col_idx < 3)
self.assertEqual(len(mt1.entries().collect()), 30)
def test_filter_cols_with_global_references(self):
mt = hl.utils.range_matrix_table(10, 10)
s = hl.literal({1, 3, 5, 7})
self.assertEqual(mt.filter_cols(s.contains(mt.col_idx)).count_cols(), 4)
def test_filter_cols_agg(self):
mt = hl.utils.range_matrix_table(10, 10)
assert mt.filter_cols(hl.agg.count() > 5).count_cols() == 10
def test_vcf_regression(self):
ds = hl.import_vcf(resource('33alleles.vcf'))
self.assertEqual(
ds.filter_rows(ds.alleles.length() == 2).count_rows(), 0)
def test_field_groups(self):
ds = self.get_mt()
df = ds.annotate_rows(row_struct=ds.row).rows()
self.assertTrue(df.all((df.info == df.row_struct.info) & (df.qual == df.row_struct.qual)))
ds2 = ds.add_col_index()
df = ds2.annotate_cols(col_struct=ds2.col).cols()
self.assertTrue(df.all((df.col_idx == df.col_struct.col_idx)))
df = ds.annotate_entries(entry_struct=ds.entry).entries()
self.assertTrue(df.all(
((hl.is_missing(df.GT) |
(df.GT == df.entry_struct.GT)) &
(df.AD == df.entry_struct.AD))))
def test_filter_partitions(self):
ds = self.get_mt(min_partitions=8)
self.assertEqual(ds.n_partitions(), 8)
self.assertEqual(ds._filter_partitions([0, 1, 4]).n_partitions(), 3)
self.assertEqual(ds._filter_partitions(range(3)).n_partitions(), 3)
self.assertEqual(ds._filter_partitions([4, 5, 7], keep=False).n_partitions(), 5)
self.assertTrue(
ds._same(hl.MatrixTable.union_rows(
ds._filter_partitions([0, 3, 7]),
ds._filter_partitions([0, 3, 7], keep=False))))
def test_from_rows_table(self):
mt = hl.import_vcf(resource('sample.vcf'))
mt = mt.annotate_globals(foo='bar')
rt = mt.rows()
rm = hl.MatrixTable.from_rows_table(rt)
self.assertTrue(rm._same(mt.filter_cols(False).select_entries().key_cols_by().select_cols()))
def test_sample_rows(self):
ds = self.get_mt()
ds_small = ds.sample_rows(0.01)
self.assertTrue(ds_small.count_rows() < ds.count_rows())
def test_read_stored_cols(self):
ds = self.get_mt()
ds = ds.annotate_globals(x='foo')
f = new_temp_file(extension='mt')
ds.write(f)
t = hl.read_table(f + '/cols')
self.assertTrue(ds.cols()._same(t))
def test_read_stored_rows(self):
ds = self.get_mt()
ds = ds.annotate_globals(x='foo')
f = new_temp_file(extension='mt')
ds.write(f)
t = hl.read_table(f + '/rows')
self.assertTrue(ds.rows()._same(t))
def test_read_stored_globals(self):
ds = self.get_mt()
ds = ds.annotate_globals(x=5, baz='foo')
f = new_temp_file(extension='mt')
ds.write(f)
t = hl.read_table(f + '/globals')
self.assertTrue(ds.globals_table()._same(t))
def test_indexed_read(self):
mt = hl.utils.range_matrix_table(2000, 100, 10)
f = new_temp_file(extension='mt')
mt.write(f)
mt2 = hl.read_matrix_table(f, _intervals=[
hl.Interval(start=150, end=250, includes_start=True, includes_end=False),
hl.Interval(start=250, end=500, includes_start=True, includes_end=False),
])
self.assertEqual(mt2.n_partitions(), 2)
self.assertTrue(mt.filter_rows((mt.row_idx >= 150) & (mt.row_idx < 500))._same(mt2))
mt2 = hl.read_matrix_table(f, _intervals=[
hl.Interval(start=150, end=250, includes_start=True, includes_end=False),
hl.Interval(start=250, end=500, includes_start=True, includes_end=False),
], _filter_intervals=True)
self.assertEqual(mt2.n_partitions(), 3)
self.assertTrue(mt.filter_rows((mt.row_idx >= 150) & (mt.row_idx < 500))._same(mt2))
def test_indexed_read_vcf(self):
vcf = self.get_mt(10)
f = new_temp_file(extension='mt')
vcf.write(f)
l1, l2, l3, l4 = hl.Locus('20', 10000000), hl.Locus('20', 11000000), hl.Locus('20', 13000000), hl.Locus('20', 14000000)
mt = hl.read_matrix_table(f, _intervals=[
hl.Interval(start=l1, end=l2),
hl.Interval(start=l3, end=l4),
])
self.assertEqual(mt.n_partitions(), 2)
p = (vcf.locus >= l1) & (vcf.locus < l2)
q = (vcf.locus >= l3) & (vcf.locus < l4)
self.assertTrue(vcf.filter_rows(p | q)._same(mt))
def test_codecs_matrix(self):
from hail.utils.java import scala_object
supported_codecs = scala_object(Env.hail().io, 'BufferSpec').specs()
ds = self.get_mt()
temp = new_temp_file(extension='mt')
for codec in supported_codecs:
ds.write(temp, overwrite=True, _codec_spec=codec.toString())
ds2 = hl.read_matrix_table(temp)
self.assertTrue(ds._same(ds2))
def test_codecs_table(self):
from hail.utils.java import scala_object
supported_codecs = scala_object(Env.hail().io, 'BufferSpec').specs()
rt = self.get_mt().rows()
temp = new_temp_file(extension='ht')
for codec in supported_codecs:
rt.write(temp, overwrite=True, _codec_spec=codec.toString())
rt2 = hl.read_table(temp)
self.assertTrue(rt._same(rt2))
def test_fix3307_read_mt_wrong(self):
mt = hl.import_vcf(resource('sample2.vcf'))
mt = hl.split_multi_hts(mt)
mt.write('/tmp/foo.mt', overwrite=True)
mt2 = hl.read_matrix_table('/tmp/foo.mt')
t = hl.read_table('/tmp/foo.mt/rows')
self.assertTrue(mt.rows()._same(t))
self.assertTrue(mt2.rows()._same(t))
self.assertTrue(mt._same(mt2))
def test_rename(self):
dataset = self.get_mt()
renamed1 = dataset.rename({'locus': 'locus2', 'info': 'info2', 's': 'info'})
self.assertEqual(renamed1['locus2']._type, dataset['locus']._type)
self.assertEqual(renamed1['info2']._type, dataset['info']._type)
self.assertEqual(renamed1['info']._type, dataset['s']._type)
self.assertEqual(renamed1['info']._indices, renamed1._col_indices)
self.assertFalse('locus' in renamed1._fields)
self.assertFalse('s' in renamed1._fields)
with self.assertRaises(ValueError):
dataset.rename({'locus': 'info'})
with self.assertRaises(ValueError):
dataset.rename({'locus': 'a', 's': 'a'})
with self.assertRaises(LookupError):
dataset.rename({'foo': 'a'})
def test_range(self):
ds = hl.utils.range_matrix_table(100, 10)
self.assertEqual(ds.count_rows(), 100)
self.assertEqual(ds.count_cols(), 10)
et = ds.annotate_entries(entry_idx=10 * ds.row_idx + ds.col_idx).entries().add_index()
self.assertTrue(et.all(et.idx == et.entry_idx))
def test_filter_entries(self):
ds = hl.utils.range_matrix_table(100, 10)
ds = ds.annotate_rows(foo=5) # triggered a RV bug
ds = ds.annotate_cols(bar=5)
ds = ds.filter_entries((ds.col_idx * ds.row_idx) % 4 == 0)
entries = ds.entries()
self.assertTrue(entries.all((entries.col_idx * entries.row_idx) % 4 == 0))
def test_filter_na(self):
mt = hl.utils.range_matrix_table(1, 1)
self.assertEqual(mt.filter_rows(hl.null(hl.tbool)).count_rows(), 0)
self.assertEqual(mt.filter_cols(hl.null(hl.tbool)).count_cols(), 0)
self.assertEqual(mt.filter_entries(hl.null(hl.tbool)).entries().count(), 0)
def test_to_table_on_various_fields(self):
mt = hl.utils.range_matrix_table(3, 4)
globe = 'the globe!'
sample_ids = ['Bob', 'Alice', 'David', 'Carol']
entries = [1, 0, 3, 2]
rows = ['1:3:A:G', '1:2:A:G', '1:0:A:G']
sorted_rows = sorted(rows)
mt = mt.annotate_globals(globe=globe)
mt = mt.annotate_cols(s=hl.array(sample_ids)[mt.col_idx]).key_cols_by('s')
mt = mt.annotate_entries(e=hl.array(entries)[mt.col_idx])
mt = mt.annotate_rows(r=hl.array(rows)[mt.row_idx]).key_rows_by('r')
self.assertEqual(mt.globe.collect(), [globe])
self.assertEqual(mt.s.collect(), sample_ids)
self.assertEqual((mt.s + '1').collect(), [s + '1' for s in sample_ids])
self.assertEqual(('1' + mt.s).collect(), ['1' + s for s in sample_ids])
self.assertEqual(mt.s.take(1), [sample_ids[0]])
self.assertEqual(mt.e.collect(), entries * 3)
self.assertEqual(mt.e.take(1), [entries[0]])
self.assertEqual(mt.row_idx.collect(), [2, 1, 0])
self.assertEqual(mt.r.collect(), sorted_rows)
self.assertEqual(mt.r.take(1), [sorted_rows[0]])
self.assertEqual(mt.col_key.collect(),
[hl.Struct(s=s) for s in sample_ids])
self.assertEqual(mt.col.collect(),
[hl.Struct(s=s, col_idx=i) for i, s in enumerate(sample_ids)])
self.assertEqual(mt.row_key.collect(),
[hl.Struct(r=r) for r in sorted_rows])
self.assertEqual(mt.row.collect(),
sorted([hl.Struct(r=r, row_idx=i) for i, r in enumerate(rows)],
key=lambda x: x.r))
self.assertEqual(mt.entry.collect(),
[hl.Struct(e=e)
for _ in sorted_rows
for e in entries])
self.assertEqual(mt.cols().s.collect(), sorted(sample_ids))
self.assertEqual(mt.cols().s.take(1), [sorted(sample_ids)[0]])
self.assertEqual(mt.entries().e.collect(), sorted(entries) * 3)
self.assertEqual(mt.entries().e.take(1), [sorted(entries)[0]])
self.assertEqual(mt.rows().row_idx.collect(), [2, 1, 0])
self.assertEqual(mt.rows().r.collect(), sorted_rows)
self.assertEqual(mt.rows().r.take(1), [sorted_rows[0]])
def test_order_by(self):
ht = hl.utils.range_table(10)
self.assertEqual(ht.order_by('idx').idx.collect(), list(range(10)))
self.assertEqual(ht.order_by(hl.asc('idx')).idx.collect(), list(range(10)))
self.assertEqual(ht.order_by(hl.desc('idx')).idx.collect(), list(range(10))[::-1])
def test_order_by_complex_exprs(self):
ht = hl.utils.range_table(10)
assert ht.order_by(-ht.idx).idx.collect() == list(range(10))[::-1]
def test_order_by_intervals(self):
intervals = {0: hl.Interval(0, 3, includes_start=True, includes_end=False),
1: hl.Interval(0, 4, includes_start=True, includes_end=True),
2: hl.Interval(1, 4, includes_start=True, includes_end=False),
3: hl.Interval(0, 4, includes_start=False, includes_end=False),
4: hl.Interval(0, 4, includes_start=True, includes_end=False)}
ht = hl.utils.range_table(5)
ht = ht.annotate_globals(ilist=intervals)
ht = ht.annotate(interval=ht['ilist'][ht['idx']])
ht = ht.order_by(ht['interval'])
ordered = ht['interval'].collect()
expected = [intervals[i] for i in [0, 4, 1, 3, 2]]
self.assertEqual(ordered, expected)
def test_range_matrix_table(self):
mt = hl.utils.range_matrix_table(13, 7, n_partitions=5)
self.assertEqual(mt.globals.dtype, hl.tstruct())
self.assertEqual(mt.row.dtype, hl.tstruct(row_idx=hl.tint32))
self.assertEqual(mt.col.dtype, hl.tstruct(col_idx=hl.tint32))
self.assertEqual(mt.entry.dtype, hl.tstruct())
self.assertEqual(list(mt.row_key), ['row_idx'])
self.assertEqual(list(mt.col_key), ['col_idx'])
self.assertEqual([r.row_idx for r in mt.rows().collect()], list(range(13)))
self.assertEqual([r.col_idx for r in mt.cols().collect()], list(range(7)))
def test_range_matrix_table_0_rows_0_cols(self):
mt = hl.utils.range_matrix_table(0, 0)
self.assertEqual(mt.col_idx.collect(), [])
self.assertEqual(mt.row_idx.collect(), [])
mt = mt.annotate_entries(x=mt.row_idx * mt.col_idx)
self.assertEqual(mt.x.collect(), [])
def test_make_table(self):
mt = hl.utils.range_matrix_table(3, 2)
mt = mt.select_entries(x=mt.row_idx * mt.col_idx)
mt = mt.key_cols_by(col_idx=hl.str(mt.col_idx))
t = hl.Table.parallelize(
[{'row_idx': 0, '0.x': 0, '1.x': 0},
{'row_idx': 1, '0.x': 0, '1.x': 1},
{'row_idx': 2, '0.x': 0, '1.x': 2}],
hl.tstruct(**{'row_idx': hl.tint32, '0.x': hl.tint32, '1.x': hl.tint32}),
key='row_idx')
self.assertTrue(mt.make_table()._same(t))
def test_make_table_empty_entry_field(self):
mt = hl.utils.range_matrix_table(3, 2)
mt = mt.select_entries(**{'': mt.row_idx * mt.col_idx})
mt = mt.key_cols_by(col_idx=hl.str(mt.col_idx))
t = mt.make_table()
self.assertEqual(
t.row.dtype,
hl.tstruct(**{'row_idx': hl.tint32, '0': hl.tint32, '1': hl.tint32}))
def test_make_table_sep(self):
mt = hl.utils.range_matrix_table(3, 2)
mt = mt.select_entries(x=mt.row_idx * mt.col_idx)
mt = mt.key_cols_by(col_idx=hl.str(mt.col_idx))
t = mt.make_table()
assert list(t.row) == ['row_idx', '0.x', '1.x']
t = mt.make_table(separator='__')
assert list(t.row) == ['row_idx', '0__x', '1__x']
def test_make_table_row_equivalence(self):
mt = hl.utils.range_matrix_table(3, 3)
mt = mt.annotate_rows(r1 = hl.rand_norm(), r2 = hl.rand_norm())
mt = mt.annotate_entries(e1 = hl.rand_norm(), e2 = hl.rand_norm())
mt = mt.key_cols_by(col_idx=hl.str(mt.col_idx))
assert mt.make_table().select(*mt.row_value)._same(mt.rows())
def test_make_table_na_error(self):
mt = hl.utils.range_matrix_table(3, 3).key_cols_by(s = hl.null('str'))
mt = mt.annotate_entries(e1 = 1)
with pytest.raises(ValueError):
mt.make_table()
def test_transmute(self):
mt = (
hl.utils.range_matrix_table(1, 1)
.annotate_globals(g1=0, g2=0)
.annotate_cols(c1=0, c2=0)
.annotate_rows(r1=0, r2=0)
.annotate_entries(e1=0, e2=0))
self.assertEqual(mt.transmute_globals(g3=mt.g2 + 1).globals.dtype, hl.tstruct(g1=hl.tint, g3=hl.tint))
self.assertEqual(mt.transmute_rows(r3=mt.r2 + 1).row_value.dtype, hl.tstruct(r1=hl.tint, r3=hl.tint))
self.assertEqual(mt.transmute_cols(c3=mt.c2 + 1).col_value.dtype, hl.tstruct(c1=hl.tint, c3=hl.tint))
self.assertEqual(mt.transmute_entries(e3=mt.e2 + 1).entry.dtype, hl.tstruct(e1=hl.tint, e3=hl.tint))
def test_transmute_agg(self):
mt = hl.utils.range_matrix_table(1, 1).annotate_entries(x=5)
mt = mt.transmute_rows(y = hl.agg.mean(mt.x))
def test_agg_explode(self):
t = hl.Table.parallelize([
hl.struct(a=[1, 2]),
hl.struct(a=hl.empty_array(hl.tint32)),
hl.struct(a=hl.null(hl.tarray(hl.tint32))),
hl.struct(a=[3]),
hl.struct(a=[hl.null(hl.tint32)])
])
self.assertCountEqual(t.aggregate(hl.agg.explode(lambda elt: hl.agg.collect(elt), t.a)),
[1, 2, None, 3])
def test_agg_call_stats(self):
t = hl.Table.parallelize([
hl.struct(c=hl.call(0, 0)),
hl.struct(c=hl.call(0, 1)),
hl.struct(c=hl.call(0, 2, phased=True)),
hl.struct(c=hl.call(1)),
hl.struct(c=hl.call(0)),
hl.struct(c=hl.call())
])
actual = t.aggregate(hl.agg.call_stats(t.c, ['A', 'T', 'G']))
expected = hl.struct(AC=[5, 2, 1],
AF=[5.0 / 8.0, 2.0 / 8.0, 1.0 / 8.0],
AN=8,
homozygote_count=[1, 0, 0])
self.assertTrue(hl.Table.parallelize([actual]),
hl.Table.parallelize([expected]))
def test_hardy_weinberg_test(self):
mt = hl.import_vcf(resource('HWE_test.vcf'))
mt = mt.select_rows(**hl.agg.hardy_weinberg_test(mt.GT))
rt = mt.rows()
expected = hl.Table.parallelize([
hl.struct(
locus=hl.locus('20', pos),
alleles=alleles,
het_freq_hwe=r,
p_value=p)
for (pos, alleles, r, p) in [
(1, ['A', 'G'], 0.0, 0.5),
(2, ['A', 'G'], 0.25, 0.5),
(3, ['T', 'C'], 0.5357142857142857, 0.21428571428571427),
(4, ['T', 'A'], 0.5714285714285714, 0.6571428571428573),
(5, ['G', 'A'], 0.3333333333333333, 0.5)]],
key=['locus', 'alleles'])
self.assertTrue(rt.filter(rt.locus.position != 6)._same(expected))
rt6 = rt.filter(rt.locus.position == 6).collect()[0]
self.assertEqual(rt6['p_value'], 0.5)
self.assertTrue(math.isnan(rt6['het_freq_hwe']))
def test_hw_func_and_agg_agree(self):
mt = hl.import_vcf(resource('sample.vcf'))
mt = mt.annotate_rows(
stats=hl.agg.call_stats(mt.GT, mt.alleles),
hw=hl.agg.hardy_weinberg_test(mt.GT))
mt = mt.annotate_rows(
hw2=hl.hardy_weinberg_test(mt.stats.homozygote_count[0],
mt.stats.AC[1] - 2 * mt.stats.homozygote_count[1],
mt.stats.homozygote_count[1]))
rt = mt.rows()
self.assertTrue(rt.all(rt.hw == rt.hw2))
def test_write_stage_locally(self):
mt = self.get_mt()
f = new_temp_file(extension='mt')
mt.write(f, stage_locally=True)
mt2 = hl.read_matrix_table(f)
self.assertTrue(mt._same(mt2))
def test_nulls_in_distinct_joins(self):
# MatrixAnnotateRowsTable uses left distinct join
mr = hl.utils.range_matrix_table(7, 3, 4)
matrix1 = mr.key_rows_by(new_key=hl.cond((mr.row_idx == 3) | (mr.row_idx == 5),
hl.null(hl.tint32), mr.row_idx))
matrix2 = mr.key_rows_by(new_key=hl.cond((mr.row_idx == 4) | (mr.row_idx == 6),
hl.null(hl.tint32), mr.row_idx))
joined = matrix1.select_rows(idx1=matrix1.row_idx,
idx2=matrix2.rows()[matrix1.new_key].row_idx)
def row(new_key, idx1, idx2):
return hl.Struct(new_key=new_key, idx1=idx1, idx2=idx2)
expected = [row(0, 0, 0),
row(1, 1, 1),
row(2, 2, 2),
row(4, 4, None),
row(6, 6, None),
row(None, 3, None),
row(None, 5, None)]
self.assertEqual(joined.rows().collect(), expected)
# union_cols uses inner distinct join
matrix1 = matrix1.annotate_entries(ridx=matrix1.row_idx,
cidx=matrix1.col_idx)
matrix2 = matrix2.annotate_entries(ridx=matrix2.row_idx,
cidx=matrix2.col_idx)
matrix2 = matrix2.key_cols_by(col_idx=matrix2.col_idx + 3)
expected = hl.utils.range_matrix_table(3, 6, 1)
expected = expected.key_rows_by(new_key=expected.row_idx)
expected = expected.annotate_entries(ridx=expected.row_idx,
cidx=expected.col_idx % 3)
self.assertTrue(matrix1.union_cols(matrix2)._same(expected))
def test_row_joins_into_table(self):
rt = hl.utils.range_matrix_table(9, 13, 3)
mt1 = rt.key_rows_by(idx=rt.row_idx)
mt1 = mt1.select_rows(v=mt1.idx + 2)
mt2 = rt.key_rows_by(idx=rt.row_idx, idx2=rt.row_idx + 1)
mt2 = mt2.select_rows(v=mt2.idx + 2)
t1 = hl.utils.range_table(10, 3)
t2 = t1.key_by(t1.idx, idx2=t1.idx + 1)
t1 = t1.select(v=t1.idx + 2)
t2 = t2.select(v=t2.idx + 2)
tinterval1 = t1.key_by(k=hl.interval(t1.idx, t1.idx, True, True))
tinterval1 = tinterval1.select(v=tinterval1.idx + 2)
tinterval2 = t2.key_by(k=hl.interval(t2.key, t2.key, True, True))
tinterval2 = tinterval2.select(v=tinterval2.idx + 2)
values = [hl.Struct(v=i + 2) for i in range(9)]
# join on mt row key
self.assertEqual(t1.index(mt1.row_key).collect(), values)
self.assertEqual(t2.index(mt2.row_key).collect(), values)
self.assertEqual(t1.index(mt1.idx).collect(), values)
self.assertEqual(t2.index(mt2.idx, mt2.idx2).collect(), values)
self.assertEqual(t1.index(mt2.idx).collect(), values)
with self.assertRaises(hl.expr.ExpressionException):
t2.index(mt2.idx).collect()
with self.assertRaises(hl.expr.ExpressionException):
t2.index(mt1.row_key).collect()
# join on not mt row key
self.assertEqual(t1.index(mt1.v).collect(), [hl.Struct(v=i + 2) for i in range(2, 10)] + [None])
self.assertEqual(t2.index(mt2.idx2, mt2.v).collect(), [hl.Struct(v=i + 2) for i in range(1, 10)])
with self.assertRaises(hl.expr.ExpressionException):
t2.index(mt2.v).collect()
# join on interval of first field of mt row key
self.assertEqual(tinterval1.index(mt1.idx).collect(), values)
self.assertEqual(tinterval1.index(mt1.row_key).collect(), values)
self.assertEqual(tinterval1.index(mt2.idx).collect(), values)
with self.assertRaises(hl.expr.ExpressionException):
tinterval1.index(mt2.row_key).collect()
with self.assertRaises(hl.expr.ExpressionException):
tinterval2.index(mt2.idx).collect()
with self.assertRaises(hl.expr.ExpressionException):
tinterval2.index(mt2.row_key).collect()
with self.assertRaises(hl.expr.ExpressionException):
tinterval2.index(mt2.idx, mt2.idx2).collect()
def test_refs_with_process_joins(self):
mt = hl.utils.range_matrix_table(10, 10)
mt = mt.annotate_entries(
a_literal=hl.literal(['a']),
a_col_join=hl.is_defined(mt.cols()[mt.col_key]),
a_row_join=hl.is_defined(mt.rows()[mt.row_key]),
an_entry_join=hl.is_defined(mt[mt.row_key, mt.col_key]),
the_global_failure=hl.cond(True, mt.globals, hl.null(mt.globals.dtype)),
the_row_failure=hl.cond(True, mt.row, hl.null(mt.row.dtype)),
the_col_failure=hl.cond(True, mt.col, hl.null(mt.col.dtype)),
the_entry_failure=hl.cond(True, mt.entry, hl.null(mt.entry.dtype)),
)
mt.count()
def test_aggregate_localize_false(self):
dim1, dim2 = 10, 10
mt = hl.utils.range_matrix_table(dim1, dim2)
mt = mt.annotate_entries(x = mt.aggregate_rows(hl.agg.max(mt.row_idx), _localize=False)
+ mt.aggregate_cols(hl.agg.max(mt.col_idx), _localize=False)
+ mt.aggregate_entries(hl.agg.max(mt.row_idx * mt.col_idx), _localize=False)
)
assert mt.x.take(1)[0] == (dim1 - 1) + (dim2 - 1) + (dim1 -1) * (dim2 - 1)
def test_agg_cols_filter(self):
t = hl.utils.range_matrix_table(1, 10)
tests = [(agg.filter(t.col_idx > 7,
agg.collect(t.col_idx + 1).append(0)),
[9, 10, 0]),
(agg.filter(t.col_idx > 7,
agg.explode(lambda elt: agg.collect(elt + 1).append(0),
[t.col_idx, t.col_idx + 1])),
[9, 10, 10, 11, 0]),
(agg.filter(t.col_idx > 7,
agg.group_by(t.col_idx % 3,
hl.array(agg.collect_as_set(t.col_idx + 1)).append(0))),
{0: [10, 0], 2: [9, 0]})
]
for aggregation, expected in tests:
self.assertEqual(t.select_rows(result = aggregation).result.collect()[0], expected)
def test_agg_cols_explode(self):
t = hl.utils.range_matrix_table(1, 10)
tests = [(agg.explode(lambda elt: agg.collect(elt + 1).append(0),
hl.cond(t.col_idx > 7, [t.col_idx, t.col_idx + 1], hl.empty_array(hl.tint32))),
[9, 10, 10, 11, 0]),
(agg.explode(lambda elt: agg.explode(lambda elt2: agg.collect(elt2 + 1).append(0),
[elt, elt + 1]),
hl.cond(t.col_idx > 7, [t.col_idx, t.col_idx + 1], hl.empty_array(hl.tint32))),
[9, 10, 10, 11, 10, 11, 11, 12, 0]),
(agg.explode(lambda elt: agg.filter(elt > 8,
agg.collect(elt + 1).append(0)),
hl.cond(t.col_idx > 7, [t.col_idx, t.col_idx + 1], hl.empty_array(hl.tint32))),
[10, 10, 11, 0]),
(agg.explode(lambda elt: agg.group_by(elt % 3,
agg.collect(elt + 1).append(0)),
hl.cond(t.col_idx > 7,
[t.col_idx, t.col_idx + 1],
hl.empty_array(hl.tint32))),
{0: [10, 10, 0], 1: [11, 0], 2:[9, 0]})
]
for aggregation, expected in tests:
self.assertEqual(t.select_rows(result = aggregation).result.collect()[0], expected)
def test_agg_cols_group_by(self):
t = hl.utils.range_matrix_table(1, 10)
tests = [(agg.group_by(t.col_idx % 2,
hl.array(agg.collect_as_set(t.col_idx + 1)).append(0)),
{0: [1, 3, 5, 7, 9, 0], 1: [2, 4, 6, 8, 10, 0]}),
(agg.group_by(t.col_idx % 3,
agg.filter(t.col_idx > 7,
hl.array(agg.collect_as_set(t.col_idx + 1)).append(0))),
{0: [10, 0], 1: [0], 2: [9, 0]}),
(agg.group_by(t.col_idx % 3,
agg.explode(lambda elt: agg.collect(elt + 1).append(0),
hl.cond(t.col_idx > 7,
[t.col_idx, t.col_idx + 1],
hl.empty_array(hl.tint32)))),
{0: [10, 11, 0], 1: [0], 2:[9, 10, 0]}),
]
for aggregation, expected in tests:
self.assertEqual(t.select_rows(result = aggregation).result.collect()[0], expected)
def localize_entries_with_both_none_is_rows_table(self):
mt = hl.utils.range_matrix_table(10, 10)
mt = mt.select_entries(x = mt.row_idx * mt.col_idx)
localized = mt.localize_entries(entries_array_field_name=None,
columns_array_field_name=None)
rows_table = mt.rows()
assert rows_table.collect() == localized.collect()
assert rows_table.globals_table().collect() == localized.globals_table().collect()
def localize_entries_with_none_cols_adds_no_globals(self):
mt = hl.utils.range_matrix_table(10, 10)
mt = mt.select_entries(x = mt.row_idx * mt.col_idx)
localized = mt.localize_entries(entries_array_field_name=Env.get_uid(),
columns_array_field_name=None)
assert mt.globals_table().collect() == localized.globals_table().collect()
def localize_entries_with_none_entries_changes_no_rows(self):
mt = hl.utils.range_matrix_table(10, 10)
mt = mt.select_entries(x = mt.row_idx * mt.col_idx)
localized = mt.localize_entries(entries_array_field_name=None,
columns_array_field_name=Env.get_uid())
rows_table = mt.rows()
assert rows_table.collect() == localized.collect()
def localize_entries_creates_arrays_of_entries_and_array_of_cols(self):
mt = hl.utils.range_matrix_table(10, 10)
mt = mt.select_entries(x = mt.row_idx * mt.col_idx)
localized = mt.localize_entries(entries_array_field_name='entries',
columns_array_field_name='cols')
assert [[x * y for x in range(0, 10)] for y in range(0, 10)] == localized.entries.collect()
assert range(0, 10) == localized.cols.collect()
def test_multi_write(self):
mt = self.get_mt()
f = new_temp_file()
hl.experimental.write_matrix_tables([mt, mt], f)
path1 = f + '0.mt'
path2 = f + '1.mt'
mt1 = hl.read_matrix_table(path1)
mt2 = hl.read_matrix_table(path2)
self.assertTrue(mt._same(mt1))
self.assertTrue(mt._same(mt2))
self.assertTrue(mt1._same(mt2))
def test_matrix_type_equality(self):
mt = hl.utils.range_matrix_table(1, 1)
mt2 = mt.annotate_entries(foo=1)
assert mt._type == mt._type
assert mt._type != mt2._type
def test_entry_filtering(self):
mt = hl.utils.range_matrix_table(10, 10)
mt = mt.filter_entries((mt.col_idx + mt.row_idx) % 2 == 0)
assert mt.aggregate_entries(hl.agg.count()) == 50
assert all(x == 5 for x in mt.annotate_cols(x = hl.agg.count()).x.collect())
assert all(x == 5 for x in mt.annotate_rows(x = hl.agg.count()).x.collect())
mt = mt.unfilter_entries()
assert mt.aggregate_entries(hl.agg.count()) == 100
assert all(x == 10 for x in mt.annotate_cols(x = hl.agg.count()).x.collect())
assert all(x == 10 for x in mt.annotate_rows(x = hl.agg.count()).x.collect())
def test_entry_filter_stats(self):
mt = hl.utils.range_matrix_table(40, 20)
mt = mt.filter_entries((mt.row_idx % 4 == 0) & (mt.col_idx % 4 == 0), keep=False)
mt = mt.compute_entry_filter_stats()
row_expected = hl.dict({True: hl.struct(n_filtered=5,
n_remaining=15,
fraction_filtered=hl.float32(0.25)),
False: hl.struct(n_filtered=0,
n_remaining=20,
fraction_filtered=hl.float32(0.0))})
assert mt.aggregate_rows(hl.agg.all(mt.entry_stats_row == row_expected[mt.row_idx % 4 == 0]))
col_expected = hl.dict({True: hl.struct(n_filtered=10,
n_remaining=30,
fraction_filtered=hl.float32(0.25)),
False: hl.struct(n_filtered=0,
n_remaining=40,
fraction_filtered=hl.float32(0.0))})
assert mt.aggregate_cols(hl.agg.all(mt.entry_stats_col == col_expected[mt.col_idx % 4 == 0]))
def test_annotate_col_agg_lowering(self):
mt = hl.utils.range_matrix_table(10, 10, 2)
mt = mt.annotate_cols(c1=[mt.col_idx, mt.col_idx * 2])
mt = mt.annotate_entries(e1=mt.col_idx + mt.row_idx, e2=[mt.col_idx * mt.row_idx, mt.col_idx * mt.row_idx ** 2])
common_ref = mt.c1[1]
mt = mt.annotate_cols(exploded=hl.agg.explode(lambda e: common_ref + hl.agg.sum(e), mt.e2),
array=hl.agg.array_agg(lambda e: common_ref + hl.agg.sum(e), mt.e2),
filt=hl.agg.filter(mt.e1 < 5, hl.agg.sum(mt.e1) + common_ref),
grouped=hl.agg.group_by(mt.e1 % 5, hl.agg.sum(mt.e1) + common_ref))
mt.cols()._force_count()
def test_annotate_rows_scan_lowering(self):
mt = hl.utils.range_matrix_table(10, 10, 2)
mt = mt.annotate_rows(r1=[mt.row_idx, mt.row_idx * 2])
common_ref = mt.r1[1]
mt = mt.annotate_rows(exploded=hl.scan.explode(lambda e: common_ref + hl.scan.sum(e), mt.r1),
array=hl.scan.array_agg(lambda e: common_ref + hl.scan.sum(e), mt.r1),
filt=hl.scan.filter(mt.row_idx < 5, hl.scan.sum(mt.row_idx) + common_ref),
grouped=hl.scan.group_by(mt.row_idx % 5, hl.scan.sum(mt.row_idx) + common_ref),
an_agg = hl.agg.sum(mt.row_idx * mt.col_idx))
mt.cols()._force_count()
def test_show_runs(self):
mt = self.get_mt()
mt.show()
def test_show_header(self):
mt = hl.utils.range_matrix_table(1, 1)
mt = mt.annotate_entries(x=1)
mt = mt.key_cols_by(col_idx=mt.col_idx + 10)
def assert_res(x):
expect = ('+---------+-------+\n'
'| row_idx | 10.x |\n'
'+---------+-------+\n'
'| int32 | int32 |\n'
'+---------+-------+\n'
'| 0 | 1 |\n'
'+---------+-------+\n')
s = str(x)
assert s == expect
mt.show(handler=assert_res)
def test_partitioned_write(self):
mt = hl.utils.range_matrix_table(40, 3, 5)
def test_parts(parts, expected=mt):
parts = [
hl.Interval(start=hl.Struct(row_idx=s), end=hl.Struct(row_idx=e),
includes_start=_is, includes_end=ie)
for (s, e, _is, ie) in parts
]
tmp = new_temp_file(extension='mt')
mt.write(tmp, _partitions=parts)
mt2 = hl.read_matrix_table(tmp)
self.assertEqual(mt2.n_partitions(), len(parts))
self.assertTrue(mt2._same(expected))
test_parts([
(0, 40, True, False)
])
test_parts([
(-34, -31, True, True),
(-30, 9, True, True),
(10, 107, True, True),
(108, 1000, True, True)
])
test_parts([
(0, 5, True, False),
(35, 40, True, True)
],
mt.filter_rows((mt.row_idx < 5) | (mt.row_idx >= 35)))
test_parts([
(5, 35, True, False)
],
mt.filter_rows((mt.row_idx >= 5) & (mt.row_idx < 35)))
def test_partitioned_write_coerce(self):
mt = hl.import_vcf(resource('sample.vcf'))
parts = [
hl.Interval(hl.Locus('20', 10277621), hl.Locus('20', 11898992))
]
tmp = new_temp_file(extension='mt')
mt.write(tmp, _partitions=parts)
mt2 = hl.read_matrix_table(tmp)
assert mt2.n_partitions() == len(parts)
assert hl.filter_intervals(mt, parts)._same(mt2)
def test_overwrite(self):
mt = hl.utils.range_matrix_table(1, 1)
f = new_temp_file(extension='mt')
mt.write(f)
with pytest.raises(hl.utils.FatalError, match= "file already exists"):
mt.write(f)
mt.write(f, overwrite=True)
def test_invalid_metadata(self):
with pytest.raises(hl.utils.FatalError, match='metadata does not contain file version'):
hl.read_matrix_table(resource('0.1-1fd5cc7.vds'))
def test_legacy_files_with_required_globals(self):
hl.read_table(resource('required_globals.ht'))._force_count()
hl.read_matrix_table(resource('required_globals.mt'))._force_count_rows()
def test_matrix_native_write_range(self):
mt = hl.utils.range_matrix_table(11, 3, n_partitions=3)
f = new_temp_file()
mt.write(f)
assert hl.read_matrix_table(f)._same(mt)
def test_matrix_multi_write_range(self):
mts = [
hl.utils.range_matrix_table(11, 27, n_partitions=10),
hl.utils.range_matrix_table(11, 3, n_partitions=10)
]
f = new_temp_file()
hl.experimental.write_matrix_tables(mts, f)
assert hl.read_matrix_table(f + '0.mt')._same(mts[0])
assert hl.read_matrix_table(f + '1.mt')._same(mts[1])
def test_key_cols_by_extract_issue(self):
mt = hl.utils.range_matrix_table(1000, 100)
mt = mt.key_cols_by(col_id = hl.str(mt.col_idx))
mt = mt.add_col_index()
mt.show()
def test_filtered_entries_group_rows_by(self):
mt = hl.utils.range_matrix_table(1, 1)
mt = mt.filter_entries(False)
mt = mt.group_rows_by(x=mt.row_idx // 10).aggregate(c=hl.agg.count())
assert mt.entries().collect() == [hl.Struct(x=0, col_idx=0, c=0)]
def test_filtered_entries_group_cols_by(self):
mt = hl.utils.range_matrix_table(1, 1)
mt = mt.filter_entries(False)
mt = mt.group_cols_by(x=mt.col_idx // 10).aggregate(c=hl.agg.count())
assert mt.entries().collect() == [hl.Struct(row_idx=0, x=0, c=0)]
def test_read_write_all_types():
mt = create_all_values_matrix_table()
tmp_file = new_temp_file()
mt.write(tmp_file)
assert hl.read_matrix_table(tmp_file)._same(mt) | mit | 8,413,379,200,223,143,000 | 42.698292 | 127 | 0.546869 | false |
PaulSD/tendenci_autox | tendenci_autox/models.py | 1 | 70574 | from datetime import datetime, date, timedelta
import re
from django.db import models, transaction, IntegrityError
from django.db.models import Q
from django.contrib.auth.models import User
from django.conf import settings
from django.forms import ValidationError
from django.core.exceptions import PermissionDenied
from tendenci.apps.events.models import Event as TendenciEvent, Registrant
from . import config
from .utils import safe_log
import logging
log = logging.getLogger(__name__)
# Model.clean() is called automatically by ModelForm. If a Model is manipulated without using a
# ModelForm, then Model.clean() must be called manually.
# Model.clean() only has access to latest Model field values. If a Model is changed, clean() is
# called after the Model field values have been updated, and it does not have access to the previous
# field values. To handle Permission checks and other cases where validation of new field values
# may depend on the previous field values without splitting the validation logic between models.py
# and forms.py, form_clean() and/or form_clean_<field>() methods are implemented in some of the
# Models below and are called by forms.AutoxForm.
# Extend the Tendenci Event model to override the default string representation
class Event(TendenciEvent):
class Meta:
proxy = True
@property
def date(self):
return self.start_dt.strftime('%x')
def __str__(self):
return '%s %s'%(self.date, self.title)
def get_autox_events():
return Event.objects.filter(title__iregex=config.event_title_regex).order_by('start_dt', 'id')
def get_past_autox_events():
return get_autox_events().filter(end_dt__lt=date.today())
def get_current_and_future_autox_events():
return get_autox_events().filter(end_dt__gte=date.today())
event_title_regex = re.compile(config.event_title_regex, re.IGNORECASE)
def is_autox_event(event):
return event_title_regex.search(event.title)
def is_event_today(event):
today = date.today()
return (event.start_dt.date() <= today and event.end_dt.date() >= today)
def is_past_event(event):
return event.end_dt.date() < date.today()
def is_future_event(event):
return event.start_dt.date() > date.today()
# Returns the most recent AutoX Event that started today before the current time
# If none, returns the next AutoX Event that will start today
# If none, returns the most recent past AutoX Event
# If none, returns the next upcoming AutoX Event
# If no AutoX Events exist, returns None
def get_current_autox_event():
now = datetime.now()
today = date.today()
event = get_autox_events().filter(start_dt__lt=now, start_dt__gte=today
).order_by('-start_dt', 'id').first()
if event is not None:
return event
tomorrow = today + timedelta(days=1)
event = get_autox_events().filter(start_dt__lt=tomorrow, start_dt__gte=date.today()
).order_by('start_dt', 'id').first()
if event is not None:
return event
event = get_autox_events().filter(start_dt__lt=now).order_by('-start_dt', 'id').first()
if event is not None:
return event
return get_autox_events().order_by('start_dt', 'id').first()
# Returns the previous AutoX Event before the specified event, or None
def get_previous_autox_event(event):
# Handle the case where multiple Events start at the same time
prev_event = get_autox_events().filter(start_dt=event.start_dt, id__lt=event.id
).order_by('-id').first()
if prev_event is not None:
return prev_event
return get_autox_events().filter(start_dt__lt=event.start_dt
).order_by('-start_dt', '-id').first()
# Returns the next AutoX Event after the specified event, or None
def get_next_autox_event(event):
# Handle the case where multiple Events start at the same time
next_event = get_autox_events().filter(start_dt=event.start_dt, id__gt=event.id
).order_by('id').first()
if next_event is not None:
return next_event
return get_autox_events().filter(start_dt__gt=event.start_dt).order_by('start_dt', 'id').first()
# Extend the Tendenci Registrant model to make the name more intuitive and make it easier to access
# relevant custom registration fields.
class Driver(Registrant):
class Meta:
proxy = True
# Strip leading and trailing whitespace from all attributes
def __getattribute__(self, attrname):
attr = super(Driver, self).__getattribute__(attrname)
if type(attr) is str:
attr = attr.strip()
return attr
# Convenience methods for accessing event via registration
@property
def event(self):
return self.registration.event
@property
def event_id(self):
return self.registration.event_id
# Custom fields
@property
def display_name(self):
return '%s %s.'%(self.first_name, self.last_name[0:1])
def get_custom_reg_field(self, field, default=None):
# Retrieve all custom fields at the same time and cache them to avoid doing separate
# database queries for each property.
# We could use .prefetch_related(Prefetch(...)) in get_drivers_for_event() to avoid doing
# separate database queries for each Driver, but that is significantly more complicated to
# implement. We might also be able to use .annotate(Subquery(...)), but this is not
# straightforward either.
if hasattr(self, 'custom_reg_fields') and self.custom_reg_fields is not None:
return self.custom_reg_fields.get(field, default)
# See registrant_roster() in tendenci/apps/events/views.py for a more generic filter
custom_reg_fields = self.custom_reg_form_entry.field_entries.filter(field__label__in=[
'Car #', 'Car Year', 'Car Make', 'Car Model', 'Car Color', 'Autocross Class',
]).values_list('field__label', 'value')
self.custom_reg_fields = dict(custom_reg_fields)
return self.custom_reg_fields.get(field, default)
@property
def car_number(self):
return self.get_custom_reg_field('Car #')
@property
def car_year(self):
return self.get_custom_reg_field('Car Year')
@property
def car_make(self):
return self.get_custom_reg_field('Car Make')
@property
def car_model(self):
return self.get_custom_reg_field('Car Model')
@property
def car_color(self):
return self.get_custom_reg_field('Car Color')
@property
def car_class(self):
# Return only the abbreviation (strip the description)
return self.get_custom_reg_field('Autocross Class', '').split(' ', 1)[0]
def __str__(self):
#return '%s name: %s car_num: %s'%(self.id, self.display_name, self.car_number)
return '%s - %s - %s'%(self.car_number, self.car_model, self.car_color)
def get_drivers_for_event(event):
return Driver.objects.filter(registration__event=event, cancel_dt=None)
def get_drivers_for_user(user):
if user.is_anonymous():
return Driver.objects.none()
return Driver.objects.filter(user=user, cancel_dt=None)
def get_driver_for_user_at_event(user, event):
if user.is_anonymous():
return None
return Driver.objects.filter(registration__event=event, user=user, cancel_dt=None).first()
def is_autox_registrant(registrant):
return is_autox_event(registrant.registration.event)
# Driver model does not currently support editing
# (If necessary, use the standard Tendenci Registrant model in the Events app for editing)
# If event is not None, Permissions are deleted if/when the Event is deleted.
# For permanent Events, Permissions could be deleted at some point (maybe 24 hours) after the Event
# ends. We do not currently have any automated mechanism to do that, but it should be easy to write
# a manage.py script that can be run from a cron job to do that.
class Permission(models.Model):
class Meta:
# The database does not consider NULL values to be equal when checking uniqueness, so this
# would permit redundant user+role when event=NULL. Django doesn't natively support partial
# indexes (which handle that case), so they must be implemented manually in a migration:
# migrations.RunSQL('CREATE UNIQUE INDEX tendenci_autox_permission_user_event_role_uniq ON tendenci_autox_permission (user_id, event_id, role) WHERE event_id IS NOT NULL'),
# migrations.RunSQL('CREATE UNIQUE INDEX tendenci_autox_permission_user_role_uniq ON tendenci_autox_permission (user_id, role) WHERE event_id IS NULL'),
#unique_together = ('user', 'event', 'role')
pass
user = models.ForeignKey(User, on_delete=models.CASCADE)
# Multiple roles can be assigned to a single user using multiple Permissions
ROLE_CHOICES = (
('ADMIN', 'Admin'), # Can manage Permissions for this event
('JUDGE', 'Judge'), # Can manage anything except Permissions
('LINE', 'Line Worker'), # Can manage Runs
('COURSE', 'Course Worker'), # Can create RunStatuses
('TIME', 'Timing Equipment'), # Can manage Times
('GEOLOC', 'Geolocation Helper'), # Can manage all Geolocations/Sets
)
role = models.CharField(max_length=10, choices=ROLE_CHOICES)
# If event is None, user has this role on all events
event = models.ForeignKey(Event, blank=True, null=True, default=None, on_delete=models.CASCADE)
def form_clean(self, client_ip, js_id, auth_user, action, create, update, delete, new_fields, **kwargs):
if auth_user.is_anonymous():
raise PermissionDenied
if (not create and self.event_id is None) or (not delete and new_fields['event'] is None):
if not can_edit_all_events_permissions(auth_user):
log.warning("ACCESS DENIED: User '%s' (%s%s) attempted to %s all events Permission: %s -> %s"%(auth_user, client_ip, (' '+js_id if js_id else ''), action, self, safe_log(new_fields, 1024)))
raise PermissionDenied
else:
if ((not create and not can_edit_permissions_for_event(auth_user, self.event_id)) or # noqa:E271
(not delete and not can_edit_permissions_for_event(auth_user, new_fields['event']))):
log.warning("ACCESS DENIED: User '%s' (%s%s) attempted to %s Permission: %s -> %s"%(auth_user, client_ip, (' '+js_id if js_id else ''), action, self, safe_log(new_fields, 1024)))
raise PermissionDenied
return new_fields
def __str__(self):
return '%s'%self.id
def get_active_users():
# For use with ModelChoiceField queryset
return User.objects.filter(is_active=True).order_by('username')
def has_permission(auth_user, event, roles):
if auth_user.is_anonymous():
return False
if type(roles) is list:
roles_q = Q(role__in=roles)
else:
roles_q = Q(role=roles)
return Permission.objects.filter(
Q(user=auth_user),
Q(event=None)|Q(event=event),
roles_q
).exists()
def get_all_events_permissions():
return Permission.objects.filter(event=None)
def can_edit_all_events_permissions(auth_user):
if auth_user.is_anonymous():
return False
if auth_user.is_superuser:
return True
return Permission.objects.filter(user=auth_user, event=None, role='ADMIN').exists()
def get_permissions_for_event(event):
return Permission.objects.filter(Q(event=None)|Q(event=event)).order_by('event_id')
def can_edit_permissions_for_event(auth_user, event):
return (auth_user.is_superuser or has_permission(auth_user, event, 'ADMIN'))
# Standard database locking techniques (optimistic locking, row-level pessimistic locking, and
# table-level pessimistic locking) aren't a good fit for the use cases here.
#
# For the Event, Driver, and Permission Models, which are updated using traditional non-real-time
# Forms, pessimistic locking provides little or no value, but we could use optimistic locking to
# warn users about conflicting changes. However, these Models are changed infrequently and
# conflicting changes are unlikely to cause significant problems, so locking is probably not worth
# the effort.
#
# For the other Models, which are updated using Django Channels, real-time UI updates should enable
# users to easily identify and address conflicting changes, so locking is not necessary in many
# cases. In addition, most changes to these Models should be creates, which cannot benefit from
# optimistic locking or row-level pessimistic locking, so locking wouldn't help anyway.
#
# However, there are several cases where locking is necessary to avoid spurious errors, most notably
# when automatically setting the 'order' field in the Run and Time Models. This field must be
# unique and must be set based on the values used in adjacent rows, which causes several problems.
# For example, when inserting a new row that should be ordered after all existing rows, the code
# must select the highest existing 'order' from the database, then calculate the new 'order' from
# it, then insert the new row. If two create operations are processed in parallel, they could both
# select the same 'order' from the database and attempt to insert their rows with the same 'order',
# causing a unique constraint error that would be difficult to handle gracefully and could be very
# confusing for the user.
# Optimistic locking cannot help prevent this because it cannot block reads or insertion of new
# rows.
# Unfortunately, row-level pessimistic locking (select_for_update() or "SELECT ... FOR UPDATE")
# behaves as a non-atomic read-then-lock and only blocks locks and writes and does not block reads,
# so it also cannot block these reads or inserts. In addition, when inserting between two existing
# rows, both adjacent rows would need to be locked, but row-level pessimistic locking non-atomically
# reads-then-locks each selected row sequentially, making it impossible to guarantee selection and
# locking of adjacent rows, and making it impossible to prevent deadlocks if some adjacencies must
# be selected in increasing order and others must be selected in decreasing order.
# We could use table-level pessimistic locking, but this would lock far more than necessary and
# would impact performance. For example, this would block all table reads, but there is no need to
# block any reads that are not used to calculate new 'order' values. In addition, Django does not
# natively support table-level pessimistic locking, and the database syntax and semantics for it
# vary widely, so the code for this could ugly.
# A database sequence could be used to assign unique values to new rows, but sequence implementation
# is also database specific, and this would only solve the problem when inserting rows that should
# be ordered after all existing rows.
# PostgreSQL advisory locks could also be a good solution, but they are also database specific, and
# it is possible to encounter lock key conflicts if multiple applications or modules use advisory
# locks on the same database.
#
# To handle these cases, a special Model is defined for managing more generic mutexes (locks). Each
# row (Model instance) represents a mutex, and row-level pessimistic locking is used as the actual
# locking mechanism.
# This provides database-agnostic locking (for any database that supports row-level pessimistic
# locking; see the select_for_update() documentation), and allows us to arbitrarily define the
# scope/impact of each lock (via the behavior of the code that uses each lock).
class Mutex(models.Model):
class Meta:
unique_together = ('event', 'key')
event = models.ForeignKey(Event, on_delete=models.CASCADE)
key = models.CharField(max_length=30)
# This must be called within a "with transaction.atomic():" block. The mutex will remain locked
# until the transaction ends, then it will be automatically unlocked.
@classmethod
def lock(cls, event_id, key):
try:
cls.objects.select_for_update().values_list('id', flat=True).get(event_id=event_id, key=key)
return
except cls.DoesNotExist:
pass
# Django Model.objects.filter() supports event=<object or int>, but Model.objects.create()
# (or just Model()) requires event=<object> or event_id=<int>. For now, we require
# event_id=<int>, although we could support event=<object or int> using something like:
#try:
# kwargs = {'event_id': int(event)}
#except TypeError:
# kwargs = {'event': event}
try:
cls.objects.create(event_id=event_id, key=key)
except IntegrityError:
# Row was added after get() and before create()
pass
# If an exception is raised again, then either the new row was deleted after create() and
# before get() (extremely unlikely), or something is just broken. In either case, we should
# let the exception propagate.
cls.objects.select_for_update().values_list('id', flat=True).get(event_id=event_id, key=key)
# This may be called by get_unfinalized() to remove Mutex records that are no longer needed.
@classmethod
def clear(cls, event):
cls.objects.filter(event=event).delete()
# DurationField that will serialize into seconds+microseconds instead of HH:MM:SS.UUUUUU
# This should really be implemented in the presentation layer, not in the model layer. However, in
# Django <1.11, there is no good way to do this in the presentation layer.
# (See https://github.com/django/django/commit/c1b6f554e405fe733e8d80f7e3d77c277810e707 )
class ShortDurationField(models.DurationField):
def value_to_string(self, obj):
val = self.value_from_object(obj)
return '' if val is None else '{:.3f}'.format(val.total_seconds())
class Result(models.Model):
class Meta:
ordering = ['id'] # Default sort order
# Event is also available via driver.registration, but we denormalize this field to improve
# performance given that we always filter Runs by Event. Auto-populated from driver.
event = models.ForeignKey(Event, on_delete=models.CASCADE)
heat = models.PositiveSmallIntegerField(default=0)
run_set = models.PositiveSmallIntegerField(default=0)
driver = models.ForeignKey(Driver, on_delete=models.CASCADE)
start_timestamp = models.DateTimeField(blank=True, null=True, default=None)
finish_timestamp = models.DateTimeField(blank=True, null=True, default=None)
run_time = ShortDurationField(blank=True, null=True, default=None)
cones = models.PositiveSmallIntegerField(default=0)
STATUS_CHOICES = (
('', ''), # Good Run
('RERUN', 'Rerun'), # Bad Run, not Driver's fault, will Re-Run
('DNF', 'Did Not Finish'), # Bad Run, Driver's fault, will not Re-Run
)
status = models.CharField(max_length=10, blank=True, choices=STATUS_CHOICES, default='')
# Stored in case the algorithm for calculating penalties changes. Auto-populated.
run_time_with_penalties = ShortDurationField(blank=True, null=True, default=None)
def form_clean(self, client_ip, js_id, auth_user, event_id, action, create, update, delete, new_fields, **kwargs):
if not can_edit_results_for_event(auth_user, event_id):
log.warning("ACCESS DENIED: User '%s' (%s%s) attempted to %s Result for Event '%s': %s -> %s"%(auth_user, client_ip, (' '+js_id if js_id else ''), action, event_id, self, safe_log(new_fields, 1024)))
raise PermissionDenied
if not create and event_id != self.event_id:
log.warning("ACCESS DENIED: User '%s' (%s%s) attempted to %s Result with mis-matched Event ('%s' vs '%s'): %s -> %s"%(auth_user, client_ip, (' '+js_id if js_id else ''), action, event_id, self.event_id, self, safe_log(new_fields, 1024)))
raise PermissionDenied
if not delete:
if new_fields['driver'].event_id != event_id:
log.warning("ACCESS DENIED: User '%s' (%s%s) attempted to %s a Result associated with a Driver for the wrong Event ('%s'): %s -> %s"%(auth_user, client_ip, (' '+js_id if js_id else ''), action, event_id, self, safe_log(new_fields, 1024)))
raise PermissionDenied
self.event_id = event_id
return new_fields
def save(self, *args, **kwargs):
self.run_time_with_penalties = config.run_time_with_penalties(
self.run_time, self.cones, self.status
)
super(Result, self).save(*args, **kwargs)
def __str__(self):
return '%s'%self.id
def get_results_for_event(event):
return Result.objects.filter(event=event)
def can_edit_results_for_event(auth_user, event):
return has_permission(auth_user, event, 'JUDGE')
# The timer equipment does not know which Driver each timing event is associated with, so we somehow
# need to line up the sequence of timer outputs with the sequence of Drivers. It is too time
# consuming to enter each Driver individually as they start or finish each run, so the lineup needs
# to be mostly pre-populated. However, if a Driver is added or removed or given an out-of-sequence
# run in the middle of a heat, we need to be able to quickly and easily alter the Driver sequence.
# In addition, if any timer output is recorded before any necessary Driver sequence changes have
# been completed, we need to ensure that we can re-associate those previous timing records with the
# correct Drivers and also re-synchronize the future Driver and timer sequences without holding up
# the event.
#
# To accomplish this, we temporarily store the Driver sequence and timer outputs in separate tables
# which a person can manually align (using 'order') before permanently associating them and moving
# them to Results.
# Ideally the Driver sequence should be stored in such a way that we can easily create/manage a
# repeating sequence of Drivers, but can arbitrarily add to or remove from the sequence after some
# number of repeats, and can arbitrarily add or remove individual runs independently of the repeat
# sequence.
#
# To accomplish this, we store a sequence of Drivers for each repeating set of runs, and we allow
# the sequence for the current set to be duplicated for the next set so that mid-heat changes can be
# propagated to subsequent sets.
class Run(models.Model):
class Meta:
unique_together = ('event', 'order')
ordering = ['order'] # Default sort order
# Event is also available via driver.registration, but we denormalize this field to improve
# performance given that we always filter Runs by Event. Auto-populated from driver.
event = models.ForeignKey(Event, on_delete=models.CASCADE)
heat = models.PositiveSmallIntegerField(default=0)
# If run_set is 0 on save() then it will be automatically set to the current run_set or 1. If
# order is changed then run_set is automatically set regardless of its value on save().
run_set = models.PositiveSmallIntegerField(default=0)
# order should be one of: the previous value if updating a Run without changing order, None to
# automatically set order to a value larger than all values on all other Runs for this Event, or
# a value from another Run for this Event to automatically set order to a value immediately
# before that other Run.
order = models.FloatField(blank=True, default=None)
driver = models.ForeignKey(Driver, on_delete=models.CASCADE)
def form_clean(self, client_ip, js_id, auth_user, event_id, action, create, update, delete, new_fields, **kwargs):
if not can_edit_runs_for_event(auth_user, event_id):
log.warning("ACCESS DENIED: User '%s' (%s%s) attempted to %s Run for Event '%s': %s -> %s"%(auth_user, client_ip, (' '+js_id if js_id else ''), action, event_id, self, safe_log(new_fields, 1024)))
raise PermissionDenied
if not create and event_id != self.event_id:
log.warning("ACCESS DENIED: User '%s' (%s%s) attempted to %s Run with mis-matched Event ('%s' vs '%s'): %s -> %s"%(auth_user, client_ip, (' '+js_id if js_id else ''), action, event_id, self.event_id, self, safe_log(new_fields, 1024)))
raise PermissionDenied
if not delete:
if new_fields['driver'].event_id != event_id:
log.warning("ACCESS DENIED: User '%s' (%s%s) attempted to %s a Result associated with a Driver for the wrong Event ('%s'): %s -> %s"%(auth_user, client_ip, (' '+js_id if js_id else ''), action, event_id, self, safe_log(new_fields, 1024)))
raise PermissionDenied
self.event_id = event_id
self._order_relative = False
if not (new_fields['order'] is None or (update and new_fields['order'] == self.order)):
self._order_relative = True
try:
Run.objects.get(event_id=self.event_id, order=new_fields['order'])
except Run.DoesNotExist:
raise ValidationError('Neighboring Run was moved before creating/updating this Run')
return new_fields
def save(self, *args, **kwargs):
_order_relative = (self._order_relative if hasattr(self, '_order_relative') else False)
if not _order_relative and self.run_set == 0:
self.run_set = get_current_run_set(self.event_id, self.heat)
if self.run_set is None:
self.run_set = 1
if not (_order_relative or self.order is None):
super(Run, self).save(*args, **kwargs)
return
with transaction.atomic():
Mutex.lock(self.event_id, 'Run.order')
if self.order is None:
last_order = Run.objects.filter(event_id=self.event_id
).order_by('-order').values_list('order', flat=True).first()
self.order = last_order+1 if last_order is not None else 1
super(Run, self).save(*args, **kwargs)
return
#elif _order_relative:
neighbors = Run.objects.filter(event_id=self.event_id, order__lte=self.order
).order_by('-order').values('order', 'run_set')[0:2]
if len(neighbors) == 0 or neighbors[0]['order'] != self.order:
# The specified neighboring Run was moved after this thread called form_clean().
# This exception will cause a 500 error, which should cause the client to retry.
# On the next attempt, the client should get a ValidationError from form_clean(),
# which it should interpret as an unrecoverable error.
raise Run.DoesNotExist
if len(neighbors) == 1:
self.order = neighbors[0]['order']-1
self.run_set = neighbors[0]['run_set']
else: #elif len(neighbors) == 2:
next_order = neighbors[0]['order']
prev_order = neighbors[1]['order']
self.order = prev_order+((next_order-prev_order)/2)
# A Run could be added between run_sets either to add a re-run at the end of the
# previous run_set or to add a new Driver at the beginning of the next run_set.
# Since re-runs are much more common than new Driver additions, and new Drivers are
# unlikely to be added at the beginning of a run_set (other than the first run_set),
# use the run_set from the previous Run for new Runs.
# For existing Runs that are moved, use the run_set from the next Run if the Run was
# moved to an earlier position, or use the run_set from the previous Run if the Run
# was moved to a later position.
# If a Driver must be added at the beginning of a run_set then a new Run can added
# and moved after it is created to set the run_set appropriately based on the
# direction it is moved in.
if self.run_set < neighbors[0]['run_set']:
self.run_set = neighbors[1]['run_set']
else:
self.run_set = neighbors[0]['run_set']
super(Run, self).save(*args, **kwargs)
def merge_runstatuses(self):
self.cones = 0
self.status = ''
rss = []
try:
rss = self.runstatus_set.all()
except:
pass
for rs in rss:
if rs.override:
self.cones = rs.cones
self.status = rs.status
break
self.cones += rs.cones
# DNF overrides RERUN overrides None
if rs.status == 'DNF':
self.status = 'DNF'
if self.status is not None:
continue
if rs.status == 'RERUN':
self.status = 'RERUN'
def __str__(self):
return '%s'%self.id
def get_runs_for_event(event):
return Run.objects.filter(event=event)
def get_runs_and_runstatuses_for_event(event):
return Run.objects.filter(event=event).prefetch_related('runstatus_set')
def can_edit_runs_for_event(auth_user, event):
return has_permission(auth_user, event, ['JUDGE', 'LINE'])
def get_current_run_set(event, heat):
return Run.objects.filter(event=event, heat=heat
).order_by('-run_set').values_list('run_set', flat=True).first()
def duplicate_last_run_set(client_ip, js_id, auth_user, event_id, heat):
if not can_edit_runs_for_event(auth_user, event_id):
log.warning("ACCESS DENIED: User '%s' (%s%s) attempted to call duplicate_last_run_set() for Event '%s' Heat '%s'"%(auth_user, client_ip, (' '+js_id if js_id else ''), event_id, heat))
raise PermissionDenied
run_set = get_current_run_set(event_id, heat)
if run_set is None:
raise ValidationError('There is no previous run set to duplicate')
with transaction.atomic():
runs = Run.objects.filter(event_id=event_id, heat=heat, run_set=run_set).order_by('order')
# No simple way to have the database deduplicate this, so do it manually
drivers = {}
dup_runs = []
for run in runs:
if run.driver_id not in drivers:
drivers[run.driver_id] = True
dup_runs.append(run)
run_set += 1
last = len(dup_runs) - 1
for i, run in enumerate(dup_runs):
# We could create a new Run from the old Run simply by setting run.pk=None and
# run.order=None then calling run.save(), but that would cause the post_save signal to
# send an "update" message instead of "create". While the current client would handle
# that properly, it is best to avoid sending incorrect/confusing messages, so we instead
# create a new Run from scratch.
new_run = Run(event_id=event_id, heat=heat, run_set=run_set, driver=run.driver)
# Set a hint to allow Signal handlers and/or the UI to defer redundant operations until
# after they process the last instance in this bulk save
new_run.is_bulk_operation = (i != last)
new_run.save()
# To eliminate synchronization issues, simplify idempotence for error handling, and attribute
# changes to individual workers, cones and status changes are temporarily stored in a separate model
# with a one-to-many Run-to-RunStatus relationship. All relevant RunStatuses are later merged into
# a single Result.
class RunStatus(models.Model):
class Meta:
# Django doesn't natively support partial indexes, so they must be implemented manually in a
# migration:
# migrations.RunSQL("CREATE UNIQUE INDEX tendenci_autox_runstatus_run_reporter_uniq ON tendenci_autox_runstatus (run_id, reporter_id) WHERE override = 'f'"),
# migrations.RunSQL("CREATE UNIQUE INDEX tendenci_autox_runstatus_run_override_uniq ON tendenci_autox_runstatus (run_id) WHERE override = 't'"),
pass
run = models.ForeignKey(Run, on_delete=models.CASCADE)
# Auto-populated
reporter = models.ForeignKey(User, on_delete=models.PROTECT)
# To allow a Judge to override all other RunStatuses before the Result is created
override = models.BooleanField(default=False)
cones = models.PositiveSmallIntegerField(default=0)
STATUS_CHOICES = (
('', ''), # Good Run
('RERUN', 'Rerun'), # Bad Run, not Driver's fault, will Re-Run
('DNF', 'Did Not Finish'), # Bad Run, Driver's fault, will not Re-Run
)
status = models.CharField(max_length=10, blank=True, choices=STATUS_CHOICES, default='')
def form_clean(self, client_ip, js_id, auth_user, event_id, action, create, update, delete, new_fields, **kwargs):
if create:
if new_fields['override']:
if not can_edit_results_for_event(auth_user, new_fields['run'].event_id):
log.warning("ACCESS DENIED: User '%s' (%s%s) attempted to %s Override RunStatus: %s -> %s"%(auth_user, client_ip, (' '+js_id if js_id else ''), action, self, safe_log(new_fields, 1024)))
raise PermissionDenied
else:
if not can_edit_runstatuses_for_event(auth_user, new_fields['run'].event_id):
log.warning("ACCESS DENIED: User '%s' (%s%s) attempted to %s RunStatus: %s -> %s"%(auth_user, client_ip, (' '+js_id if js_id else ''), action, self, safe_log(new_fields, 1024)))
raise PermissionDenied
else:
if self.override:
if not can_edit_results_for_event(auth_user, self.run.event_id):
log.warning("ACCESS DENIED: User '%s' (%s%s) attempted to %s Override RunStatus: %s -> %s"%(auth_user, client_ip, (' '+js_id if js_id else ''), action, self, safe_log(new_fields, 1024)))
raise PermissionDenied
else:
if self.auth_user != self.reporter:
if not can_edit_results_for_event(auth_user, self.run.event_id):
log.warning("ACCESS DENIED: User '%s' (%s%s) attempted to %s RunStatus for another Reporter: %s -> %s"%(auth_user, client_ip, (' '+js_id if js_id else ''), action, self, safe_log(new_fields, 1024)))
raise PermissionDenied
if update:
log.warning("ACCESS DENIED: User '%s' (%s%s) attempted to %s non-Override RunStatus: %s -> %s"%(auth_user, client_ip, (' '+js_id if js_id else ''), action, self, safe_log(new_fields, 1024)))
raise PermissionDenied
if not delete:
if new_fields['run'].event_id != event_id:
log.warning("ACCESS DENIED: User '%s' (%s%s) attempted to %s a RunStatus associated with a Run for the wrong Event ('%s'): %s -> %s"%(auth_user, client_ip, (' '+js_id if js_id else ''), action, event_id, self, safe_log(new_fields, 1024)))
raise PermissionDenied
if update:
if new_fields['run'] != self.run:
log.warning("ACCESS DENIED: User '%s' (%s%s) attempted to %s Run associated with RunStatus: %s -> %s"%(auth_user, client_ip, (' '+js_id if js_id else ''), action, self, safe_log(new_fields, 1024)))
raise PermissionDenied
if new_fields['override'] != self.override:
log.warning("ACCESS DENIED: User '%s' (%s%s) attempted to %s Override flag on RunStatus: %s -> %s"%(auth_user, client_ip, (' '+js_id if js_id else ''), action, self, safe_log(new_fields, 1024)))
raise PermissionDenied
if not delete:
self.reporter = auth_user
return new_fields
def idempotent_create_or_update(self, *args, **kwargs):
# If a delete is processed in parallel with this, it is possible we could throw an error
# instead of either successfully creating and deleting or deleting and creating. However,
# if the client retries, the retry should work as expected.
try:
return super(RunStatus, self).save(*args, **kwargs)
except IntegrityError as ex:
# If this was an update, something is wrong
if self.pk is not None:
raise ex
# If this was a create, this may have been a unique constraint error, so try updating
try:
with transaction.atomic():
if self.override:
kwargs = {'run': self.run, 'override': True}
else:
kwargs = {'run': self.run, 'reporter': self.reporter, 'override': False}
rs = RunStatus.objects.select_for_update().get(**kwargs)
rs.cones = self.cones
rs.status = self.status
rs.save()
return rs
except RunStatus.DoesNotExist:
pass
# Re-raise any other errors
raise ex
def __str__(self):
return '%s'%self.id
def get_runstatuses_for_event(event):
return RunStatus.objects.filter(run__event=event)
def can_edit_runstatuses_for_event(auth_user, event):
return has_permission(auth_user, event, ['JUDGE', 'COURSE'])
class Time(models.Model):
class Meta:
unique_together = ('event', 'order')
ordering = ['order'] # Default sort order
event = models.ForeignKey(Event, on_delete=models.CASCADE)
# order should be one of: the previous value if updating a Time without changing order, None to
# automatically set order to a value larger than all values on all other Times for this Event,
# or a value from another Time for this Event to automatically set order to a value immediately
# before that other Time.
order = models.FloatField(blank=True, default=None)
start_timestamp = models.DateTimeField(blank=True, null=True, default=None)
finish_timestamp = models.DateTimeField(blank=True, null=True, default=None)
run_time = ShortDurationField(blank=True, null=True, default=None)
def form_clean(self, client_ip, js_id, auth_user, event_id, action, create, update, delete, new_fields, **kwargs):
if not can_edit_results_for_event(auth_user, event_id):
log.warning("ACCESS DENIED: User '%s' (%s%s) attempted to %s Time for Event '%s': %s -> %s"%(auth_user, client_ip, (' '+js_id if js_id else ''), action, event_id, self, safe_log(new_fields, 1024)))
raise PermissionDenied
if not create and event_id != self.event_id:
log.warning("ACCESS DENIED: User '%s' (%s%s) attempted to %s Time with mis-matched Event ('%s' vs '%s'): %s -> %s"%(auth_user, client_ip, (' '+js_id if js_id else ''), action, event_id, self.event_id, self, safe_log(new_fields, 1024)))
raise PermissionDenied
if not delete:
self.event_id = event_id
self._order_relative = False
if not (new_fields['order'] is None or (update and new_fields['order'] == self.order)):
self._order_relative = True
try:
Time.objects.get(event_id=self.event_id, order=new_fields['order'])
except Time.DoesNotExist:
raise ValidationError('Neighboring Time was moved before creating/updating this Time')
return new_fields
def save(self, *args, **kwargs):
# record_time_stop() and record_time_restart() below set 'order' appropriately then call
# save() without calling form_clean(). In that case, self._order_relative doesn't exist.
_order_relative = (self._order_relative if hasattr(self, '_order_relative') else False)
if not (_order_relative or self.order is None):
super(Time, self).save(*args, **kwargs)
return
with transaction.atomic():
Mutex.lock(self.event_id, 'Time.order')
if self.order is None:
last_order = Time.objects.filter(event=self.event_id
).order_by('-order').values_list('order', flat=True).first()
self.order = last_order+1 if last_order is not None else 1
super(Time, self).save(*args, **kwargs)
return
#elif self._order_relative:
neighbor_orders = Time.objects.filter(event_id=self.event_id, order__lte=self.order
).order_by('-order').values_list('order', flat=True)[0:2]
if len(neighbor_orders) == 0 or neighbor_orders[0] != self.order:
# The specified neighboring Time was moved after form_clean(). This exception will
# cause a 500 error, which should cause the client to retry. On the next attempt,
# the client should get a ValidationError from form_clean(), which it should
# interpret as an unrecoverable error.
raise Time.DoesNotExist
if len(neighbor_orders) == 1:
self.order = self.order-1
else: #elif len(neighbor_orders) == 2:
next_order = self.order
prev_order = neighbor_orders[1]
self.order = prev_order+((next_order-prev_order)/2)
super(Time, self).save(*args, **kwargs)
def __str__(self):
return '%s event: %s start: %s finish: %s run_time: %s'%(self.id, self.event_id, self.start_timestamp, self.finish_timestamp, self.run_time)
def get_times_for_event(event):
return Time.objects.filter(event=event)
# can_edit_results_for_event() determines permissions for Time objects.
# API for use by the timing equipment
# On errors, timer_interface will retry API calls. To ensure that we do the right thing if a
# redundant call is made due to an error occurring after a Time has been recorded, all of these API
# calls should be idempotent.
# Note that only one instance of timer_interface can read from the timing equipment at a time, and
# timer_interface sends all requests serially, so race conditions are highly unlikely. However, it
# is theoretically possible for the server to end up processing multiple API requests in parallel if
# a request is slow and timer_interface times out and retries before the slow request completes.
def can_use_timer_api(auth_user, event):
return has_permission(auth_user, event, 'TIME')
def timer_api_form_clean(client_ip, js_id, auth_user, event):
if not can_use_timer_api(auth_user, event):
log.warning("ACCESS DENIED: User '%s' (%s%s) attempted to call timer API"%(auth_user, client_ip, (' '+js_id if js_id else '')))
raise PermissionDenied
if not settings.DEBUG and not is_event_today(event):
log.warning("Request to timer API for event '%s' that is not happening today"%event)
raise ValidationError('Specified Event is not happening today')
def record_time_start(event_id, start_timestamp):
with transaction.atomic():
# Without this, it is theoretically possible for two Times to be created if redundant calls
# are processed in parallel. That is not a big deal since it is highly unlikely to happen
# and a Judge can simply delete the duplicate. However, Time.save() must lock this anyway
# to set 'order', so we may as well lock slightly early to avoid any chance of duplicates.
Mutex.lock(event_id, 'Time.order')
# The combination of event and start_timestamp should be unique, so we use that to identify
# redundant calls and avoid creating duplicate Times.
time = Time.objects.filter(event_id=event_id, start_timestamp=start_timestamp).last()
if time is None:
time = Time(event_id=event_id, start_timestamp=start_timestamp)
time.save()
return time
def record_time_reset(event_id, time_id, start_timestamp):
# If a Judge has changed anything other than 'order' on the Time created by record_time_start(),
# don't do anything; A Judge can manually delete the abandoned Time if necessary. Otherwise,
# delete the Time that was created by record_time_start() (if it hasn't already been deleted).
with transaction.atomic():
try:
time = Time.objects.select_for_update().get(
event_id=event_id, id=time_id,
start_timestamp=start_timestamp, finish_timestamp__isnull=True,
run_time__isnull=True,
)
except Time.DoesNotExist:
return
log.info('Deleting Time record per timer equipment Reset: %s'%time)
time.delete()
return time
def record_time_stop(event_id, finish_timestamp, run_time, time_id=None, start_timestamp=None):
# If record_time_start() was not called or a Judge has changed anything other than 'order' on
# the Time created by record_time_start(), create a new Time unless a duplicate is found.
# Otherwise update the Time created by record_time_start().
if time_id is not None:
try:
with transaction.atomic():
time = Time.objects.select_for_update().get(
event_id=event_id, id=time_id,
start_timestamp=start_timestamp, finish_timestamp__isnull=True,
run_time__isnull=True,
)
time.finish_timestamp = finish_timestamp
time.run_time = run_time
time.save()
return time
except Time.DoesNotExist:
pass
with transaction.atomic():
# This must be called before identifying the appropriate 'order'. It does not strictly need
# to be called before checking for duplicates, but if it is not then it is theoretically
# possible for two Times to be created if redundant calls are processed in parallel. If
# that happens, a Judge can simply delete the duplicate. However, it is easy enough to move
# this lock before the duplicate check, and the performance impact of this should be
# minimal, so we may as well use it to prevent duplicates too.
Mutex.lock(event_id, 'Time.order')
time = Time.objects.filter(
event_id=event_id,
start_timestamp=start_timestamp, finish_timestamp=finish_timestamp,
run_time=run_time,
).last()
if time is None:
# Identify the appropriate 'order' for the new Time (either immediately after the
# original Time if the Time has been modified or immediately after the last Time with a
# run_time).
order = None
prev_order = None
if time_id is not None:
prev_order = Time.objects.filter(id=time_id).values_list('order', flat=True).first()
if prev_order is None:
prev_order = Time.objects.exclude(run_time__isnull=True
).order_by('-order').values_list('order', flat=True).first()
if prev_order is not None:
# We could set order=prev_order and time._order_relative=True and let Time.save()
# handle this, but we're almost done so we may as well finish here and skip there.
next_order = Time.objects.filter(event_id=event_id, order__gt=prev_order
).order_by('order').values_list('order', flat=True).first()
if next_order is not None:
order = prev_order+((next_order-prev_order)/2)
time = Time(
event_id=event_id, order=order,
start_timestamp=start_timestamp, finish_timestamp=finish_timestamp,
run_time=run_time,
)
time.save()
return time
def record_time_restart(event_id, time_id, finish_timestamp, run_time, start_timestamp=None):
# Unless this is a redundant call, always create a new Time (as opposed to removing the
# finish_timestamp and run_time from an existing Time) to ensure that a valid run_time cannot be
# deleted by a bogus or accidental Restart. A Judge will need to manually delete either the
# original Time or the restarted Time.
if start_timestamp is None:
return
with transaction.atomic():
# This must be called before identifying the appropriate 'order'. It does not strictly need
# to be called before checking for duplicates, but if it is not then it is theoretically
# possible for two Times to be created if redundant calls are processed in parallel. If
# that happens, a Judge can simply delete the duplicate. However, it is easy enough to move
# this lock before the duplicate check, and the performance impact of this should be
# minimal, so we may as well use it to prevent duplicates too.
Mutex.lock(event_id, 'Time.order')
time = Time.objects.filter(
event_id=event_id,
start_timestamp=start_timestamp, finish_timestamp__isnull=True,
run_time__isnull=True,
).last()
if time is None:
# Identify the appropriate 'order' for the new Time (either immediately after the Time
# being restarted or immediately after the last Time with a run_time).
order = None
prev_order = Time.objects.filter(id=time_id).values_list('order', flat=True).first()
if prev_order is None:
prev_order = Time.objects.exclude(run_time__isnull=True
).order_by('-order').values_list('order', flat=True).first()
if prev_order is not None:
# We could set order=prev_order and time._order_relative=True and let Time.save()
# handle this, but we're almost done so we may as well finish here and skip there.
next_order = Time.objects.filter(event_id=event_id, order__gt=prev_order
).order_by('order').values_list('order', flat=True).first()
if next_order is not None:
order = prev_order+((next_order-prev_order)/2)
time = Time(event_id=event_id, order=order, start_timestamp=start_timestamp)
time.save()
return time
# Merge Runs, RunStatuses, and Times into Results and associate GeolocationSets with the Results.
#
# To be absolutely sure that we are associating Run and Time records in the same way as they are
# associated in the UI, we require the UI to pass in an array of (run.id, time.id) pairs to be
# associated.
#
# Since GeolocationSets are not used for scoring and could be manually reassociated later if
# necessary, they are automatically associated with Results. There are likely some corner cases
# where this association will be incorrect, but this is probably not worth worrying about.
#
# To ensure that Results are recorded in order and that GeolocationSets can be automatically
# associated with Results, the request will be rejected if any Runs or Times are found in the
# database which are missing from 'associations' but should come before the highest ordered
# Run/Time in 'associations'.
#
# While this could be called at any time after each Time has been properly associated with a Run,
# all necessary RunStatus changes have been made, and all relevant GeolocationSets have been
# created, it is probably easiest to finalize the records in bulk at the end of each heat.
class MissingAssociationsError(ValidationError):
def __init__(self, message, missing_runs, missing_times, *args, **kwargs):
self.missing_runs = missing_runs
self.missing_times = missing_times
super(MissingAssociationsError, self).__init__(message, *args, **kwargs)
def finalize_results(client_ip, js_id, auth_user, event_id, associations):
if not can_edit_results_for_event(auth_user, event_id):
log.warning("ACCESS DENIED: User '%s' (%s%s) attempted to call finalize_results() for Event '%s': %s"%(auth_user, client_ip, (' '+js_id if js_id else ''), event_id, associations))
raise PermissionDenied
with transaction.atomic():
# This is currently the only place where multiple Mutexes are locked simultaneously by a
# single thread. If code is added elsewhere which also locks multiple Mutexes, that code
# must lock them in the same order to prevent deadlocks.
Mutex.lock(event_id, 'Run.order')
Mutex.lock(event_id, 'Time.order')
run_ids = [r for r, t in associations]
runs = Run.objects.select_for_update().filter(event_id=event_id, run_id__in=run_ids
).order_by('order').prefetch_related('runstatus_set')
time_ids = [t for r, t in associations]
times = Time.objects.select_for_update().filter(event_id=event_id, time_id__in=time_ids
).order_by('order')
# Check for duplicate or bad Runs or Times in associations
num = len(associations)
num_runs = len(runs)
num_times = len(times)
if num_runs != num or num_times != num:
raise ValidationError('Found unexpected number of Runs/Times in database (%s/%s vs %s)'%(num_runs, num_times, num))
# Check for Runs or Times missing from associations
missing_runs = Run.objects.filter(event_id=event_id, order__lt=runs[-1].order)
missing_times = Time.objects.filter(event_id=event_id, order__lt=times[-1].order)
if missing_runs or missing_times:
raise MissingAssociationsError('Unassociated Runs and/or Times found', missing_runs, missing_times)
# Merge
time_ids_to_run_ids = {t: r for r, t in associations}
run_ids_to_times = {time_ids_to_run_ids(t.id): t for t in times}
last = num - 1
for i, run in enumerate(runs):
time = run_ids_to_times[run.id]
run.merge_runstatuses()
result = Result(
event_id=event_id, heat=run.heat, run_set=run.run_set, driver=run.driver,
start_timestamp=time.start_timestamp, finish_timestamp=time.finish_timestamp,
cones=run.cones, status=run.status,
)
# Set a hint to allow Signal handlers and/or the UI to defer redundant operations until
# after they process the last instance in this bulk save.
result.is_bulk_operation = True
result.save()
# Reassociate GeolocationSets
loc_sets = []
if result.finish_timestamp is not None:
# Associate each Driver GeolocationSet that started before finish_timestamp with
# this Result
loc_sets = get_geolocation_sets_for_driver(result.driver).filter(
timestamp__lt=result.finish_timestamp
)
else:
# Without finish_timestamp, the best we can do is assume that one GeolocationSet
# was recorded per Run and associate the next GeolocationSet with this Result.
loc_set = get_geolocation_sets_for_driver(result.driver).first()
if loc_set is not None:
loc_sets = [loc_set]
for j, loc_set in enumerate(loc_sets):
loc_set.result = result
loc_set.driver = None
loc_set.is_bulk_operation = True
loc_set.save()
# Delete old RunStatuses, Run, and Time
for runstatus in run.runstatus_set:
runstatus.is_bulk_operation = True
runstatus.delete()
run.is_bulk_operation = True
time.is_bulk_operation = (i != last)
run.delete()
time.delete()
# Find any Runs / Times / GeolocationSets that have not been finalized and clean up Mutexes for
# this event.
#
# This should be checked at the end of each Event to ensure that we haven't lost any records:
# runs, times, loc_sets = get_unfinalized(event)
# if runs or times or loc_sets:
# ...
def get_unfinalized(event):
runs = get_runs_for_event(event)
times = get_times_for_event(event)
loc_sets = get_geolocation_sets_for_drivers(get_drivers_for_event(event))
if not (runs or times or loc_sets):
Mutex.clear(event)
return(runs, times, loc_sets)
# GeolocationSet is used to associate a collection of Geolocations with an Event (for tracking
# course layout and cone positions) or with a Result or Driver (for tracking a driver during a run).
# Multiple GeolocationSets may be associated with the same Event/Result/Driver, for example to
# record multiple course layouts if the layout changes between heats, to store data for multiple
# Runs for the same Driver before Results are finalized, or if multiple devices are used to track
# the same Driver during the same Run.
# For Event associations, GeolocationSets may be explicitly created or may be implicitly created by
# adding a Geolocation with loc_set=None and assoc='event'.
# For Driver associations, to avoid orphaned GeolocationSets with no associated Geolocations,
# GeolocationSets must be implicitly created by adding a Geolocation with loc_set=None.
# Driver associations are automatically converted to Result associations by finalize_results().
class GeolocationSet(models.Model):
class Meta:
ordering = ['id'] # Default sort order
# Exactly one of event, result, or driver must be populated
event = models.ForeignKey(Event, blank=True, null=True, on_delete=models.CASCADE)
result = models.ForeignKey(Result, blank=True, null=True, on_delete=models.CASCADE)
driver = models.ForeignKey(Driver, blank=True, null=True, on_delete=models.CASCADE)
# Optional human-readable description
desc = models.CharField(max_length=50, blank=True, default='')
# For Driver associations, the timestamp of the first associated Geolocation. This is used by
# finalize_results() when re-associating the Set with a Result.
timestamp = models.DateTimeField(blank=True, null=True, default=None)
def form_clean(self, client_ip, js_id, auth_user, event_id, action, create, update, delete, new_fields, **kwargs):
if not can_edit_geolocations_for_event(auth_user, event_id):
if create:
log.warning("ACCESS DENIED: User '%s' (%s%s) attempted to %s GeolocationSet for Event '%s': %s"%(auth_user, client_ip, (' '+js_id if js_id else ''), action, event_id, safe_log(new_fields, 1024)))
raise PermissionDenied
driver = get_driver_for_user_at_event(auth_user, event_id)
if (driver is None or # noqa:E271
(self.driver_id is not None and driver.id != self.driver_id) or
(self.result is not None and driver.id != self.result.driver_id)):
log.warning("ACCESS DENIED: User '%s' (%s%s) attempted to %s GeolocationSet for Event '%s': %s -> %s"%(auth_user, client_ip, (' '+js_id if js_id else ''), action, event_id, self, safe_log(new_fields, 1024)))
raise PermissionDenied
return new_fields
#if can_edit_geolocations_for_event(auth_user, event_id):
if create:
self.event_id = event_id
return new_fields
if ((self.event_id is not None and event_id != self.event_id) or # noqa:E271
(self.result is not None and event_id != self.result.event_id) or
(self.driver is not None and event_id != self.driver.event_id)):
log.warning("ACCESS DENIED: User '%s' (%s%s) attempted to %s GeolocationSet associated with another Event ('%s'): %s"%(auth_user, client_ip, (' '+js_id if js_id else ''), action, event_id, self))
raise PermissionDenied
return new_fields
#def clean(self):
# super(GeolocationSet, self).clean()
# if [self.event_id, self.result_id, self.driver_id].count(None) != 2:
# raise ValidationError('Exactly one of Event, Result, or Driver must be specified')
def __str__(self):
return '%s'%self.id
def get_geolocation_sets_at_event(event):
return GeolocationSet.objects.filter(
Q(event=event)|Q(result__event=event)|Q(driver__registration__event=event)
)
def have_geolocation_sets_at_event(event):
return get_geolocation_sets_at_event(event).exists()
def get_geolocation_sets_for_event(event):
return GeolocationSet.objects.filter(event=event)
def have_geolocation_sets_for_event(event):
return get_geolocation_sets_for_event(event).exists()
def get_geolocation_sets_for_result(result):
return GeolocationSet.objects.filter(result=result)
def have_geolocation_sets_for_result(result):
return get_geolocation_sets_for_result(result).exists()
def get_geolocation_sets_for_results(results):
return GeolocationSet.objects.filter(result__in=results)
def have_geolocation_sets_for_results(results):
rids_with_sets = GeolocationSet.objects.filter(result__in=results
).order_by('result_id').values_list('result_id', flat=True).distinct()
results_have_sets = {result.id: False for result in results}
results_have_sets.update({result_id: True for result_id in rids_with_sets})
return results_have_sets
def get_geolocation_sets_for_driver(driver):
return GeolocationSet.objects.filter(driver=driver)
def have_geolocation_sets_for_driver(driver):
return get_geolocation_sets_for_driver(driver).exists()
def get_geolocation_sets_for_drivers(drivers):
return GeolocationSet.objects.filter(driver__in=drivers)
def have_geolocation_sets_for_drivers(drivers):
dids_with_sets = GeolocationSet.objects.filter(driver__in=drivers
).order_by('driver_id').values_list('driver_id', flat=True).distinct()
drivers_have_sets = {driver.id: False for driver in drivers}
drivers_have_sets.update({driver_id: True for driver_id in dids_with_sets})
return drivers_have_sets
class Geolocation(models.Model):
class Meta:
unique_together = ('loc_set', 'timestamp')
ordering = ['timestamp'] # Default sort order
# May be blank in Forms (to auto-create a GeolocationSet), but cannot be null in the database
loc_set = models.ForeignKey(GeolocationSet, blank=True, on_delete=models.CASCADE)
# As reported by the geolocation API
timestamp = models.DateTimeField()
latitude = models.FloatField()
longitude = models.FloatField()
accuracy = models.FloatField()
speed = models.FloatField(blank=True, null=True, default=None)
heading = models.FloatField(blank=True, null=True, default=None)
# Acceleration with gravity
# This is produced using an accelerometer and is supported by most phones.
# For phones that do not support acceleration without gravity (below): After detecting the
# direction of gravity while stationary before a run, and detecting the direction of forward
# acceleration during launch at the start of a run, we can calculate approximate (affected
# slightly by body roll, since that changes the direction of gravity) forward and lateral
# acceleration for the rest of the run.
accel_wg_x = models.FloatField(blank=True, null=True, default=None)
accel_wg_y = models.FloatField(blank=True, null=True, default=None)
accel_wg_z = models.FloatField(blank=True, null=True, default=None)
# Acceleration in spherical coordinates (radius / inclination / azimuth) without gravity.
# This is produced using a gyroscope and is only supported by high-end phones and some newer
# low-end phones. It is required for VR, so it is becoming more prevalent.
# By subtracting this from acceleration with gravity, we can determine the direction of gravity
# at each point, which means we can calculate forward and lateral acceleration more accurately
# and we don't need to detect the direction of gravity while stationary.
accel_ng_x = models.FloatField(blank=True, null=True, default=None)
accel_ng_y = models.FloatField(blank=True, null=True, default=None)
accel_ng_z = models.FloatField(blank=True, null=True, default=None)
def form_clean(self, client_ip, js_id, auth_user, event_id, action, create, update, delete, new_fields, **kwargs):
# Since Geolocation contains an observed data point, there should be no need to change it
if update:
log.warning("ACCESS DENIED: User '%s' (%s%s) attempted to %s Geolocation: %s -> %s"%(auth_user, client_ip, (' '+js_id if js_id else ''), action, self, safe_log(new_fields, 1024)))
raise PermissionDenied
if not can_edit_geolocations_for_event(auth_user, event_id):
# Drivers are not currently allowed to delete individual Geolocations, although they can
# delete an entire GeolocationSet, which will delete all associated Geolocations
if delete:
log.warning("ACCESS DENIED: User '%s' (%s%s) attempted to %s Geolocation for Event '%s': %s"%(auth_user, client_ip, (' '+js_id if js_id else ''), action, event_id, self))
raise PermissionDenied
#if create:
driver = get_driver_for_user_at_event(auth_user, event_id)
if driver is None:
log.warning("ACCESS DENIED: User '%s' (%s%s) attempted to %s Geolocation for Event '%s': %s"%(auth_user, client_ip, (' '+js_id if js_id else ''), action, event_id, safe_log(new_fields, 1024)))
raise PermissionDenied
self.assoc = 'driver'
self.assoc_obj = driver
return new_fields
#if can_edit_geolocations_for_event(auth_user, event_id):
if delete:
loc_set = self.loc_set
if ((loc_set.event_id is not None and event_id != loc_set.event_id) or # noqa:E271
(loc_set.result_id is not None and event_id != loc_set.result.event_id) or
(loc_set.driver_id is not None and event_id != loc_set.driver.event_id)):
log.warning("ACCESS DENIED: User '%s' (%s%s) attempted to %s Geolocation %s associated with another Event ('%s'): %s"%(auth_user, client_ip, (' '+js_id if js_id else ''), action, self.id, event_id, loc_set))
raise PermissionDenied
return new_fields
#if create:
if new_fields['loc_set'] is not None:
loc_set = self.loc_set
if ((loc_set.event_id is not None and event_id != loc_set.event_id) or # noqa:E271
(loc_set.result_id is not None and event_id != loc_set.result.event_id) or
(loc_set.driver_id is not None and event_id != loc_set.driver.event_id)):
log.warning("ACCESS DENIED: User '%s' (%s%s) attempted to %s Geolocation associated with another Event ('%s'): %s %s"%(auth_user, client_ip, (' '+js_id if js_id else ''), action, self.id, event_id, loc_set, safe_log(new_fields, 1024)))
raise PermissionDenied
return new_fields
#if new_fields['loc_set'] is None:
if new_fields['assoc'] == 'event':
self.assoc = 'event'
self.assoc_obj = event_id
return new_fields
if new_fields['assoc'] == 'driver':
if new_fields['driver'] is None:
raise ValidationError('Driver must be specified')
self.assoc = 'driver'
self.assoc_obj = new_fields['driver']
return new_fields
#if new_fields['assoc'] == '':
driver = get_driver_for_user_at_event(auth_user, event_id)
if driver is None:
raise ValidationError('User is not a Driver')
self.assoc = 'driver'
self.assoc_obj = driver
return new_fields
def idempotent_create(self, *args, **kwargs):
with transaction.atomic():
new_loc_set = None
if self.loc_set is None:
# This could be used to handle duplicate creates when loc_set is None.
# However, it is theoretically possible (although exceedingly unlikely) for a
# single driver to record two sets simultaneously with the same starting timestamp,
# which would cause this to use the same loc_set for both sets.
# Since AutoxBinding provides another mechanism for preventing duplicate creates
# without any potential for loc_set conflicts, we currently use that instead when
# loc_set is None.
# However, that is only done in the interest of being overly cautious, and this
# should be more than sufficient for this use case if there is some reason to not
# use the mechanism in AutoxBinding.
#loc = None
#if self.assoc == 'event':
# loc = Geolocation.objects.filter(
# loc_set__event=self.assoc_obj, timestamp=self.timestamp
# ).first()
#else: #if self.assoc == 'driver':
# loc = Geolocation.objects.filter(
# loc_set__driver=self.assoc_obj, timestamp=self.timestamp
# ).first()
#if loc is not None:
# return loc
if self.assoc == 'event':
new_loc_set = GeolocationSet(event=self.assoc_obj)
else: #if self.assoc == 'driver':
new_loc_set = GeolocationSet(driver=self.assoc_obj, timestamp=self.timestamp)
new_loc_set.save()
self.loc_set = new_loc_set
try:
try:
return super(Geolocation, self).save(*args, **kwargs)
except IntegrityError as ex:
# Ignore unique constraint errors due to duplicate creates
loc = Geolocation.objects.filter(loc_set=self.loc_set, timestamp=self.timestamp
).first()
if loc is not None:
return loc
# Re-raise any other errors
raise ex
except:
if new_loc_set is not None:
new_loc_set.delete()
raise
def __str__(self):
return '%s'%self.id
def get_geolocations_in_set(loc_set):
return Geolocation.objects.filter(loc_set=loc_set)
def get_geolocations_in_sets(loc_sets):
return Geolocation.objects.filter(loc_set__in=loc_sets)
def can_edit_geolocations_for_event(auth_user, event):
return has_permission(auth_user, event, ['JUDGE', 'GEOLOC'])
def can_edit_own_geolocations_at_event(auth_user, event):
return (get_driver_for_user_at_event(auth_user, event) is not None)
| gpl-3.0 | -6,137,469,636,408,634,000 | 58.405724 | 254 | 0.651118 | false |
mikhaildubov/Rhinoceros-Python-Scripts | code/curves/isometricflow.py | 1 | 2639 | # Edge flow
# Mikhail Dubov
# 2015-11-24
from math import radians, pi, sin, tan
import rhinoscriptsyntax as rs
# Some shortcuts for vector operations to improve code readability
unit = rs.VectorUnitize
subtract = rs.VectorSubtract
scale = rs.VectorScale
rev = rs.VectorReverse
length = rs.VectorLength
rotate = rs.VectorRotate
def angle(v1, v2):
# rs.VectorAngle returns degrees which is not very convenient
return radians(rs.VectorAngle(v1, v2))
def AddVector(vecdir, base_point=None):
# Draws a vector (for visualization purposes)
base_point = base_point or [0, 0, 0]
tip_point = rs.PointAdd(base_point, vecdir)
line = rs.AddLine(base_point, tip_point)
if line: return rs.CurveArrows(line, 2) # adds an arrow tip
def isometricflow(polyline=None, t=0.1):
polyline = polyline or rs.GetObject("Select a polyline", rs.filter.curve, True, True)
if polyline is None: return
vertices = rs.PolylineVertices(polyline)
n = len(vertices) - 1
lengths = []
angles = []
if vertices:
for i in range(n):
if i > 0: prev = i-1
else: prev = n-1
next = i+1
l_i = subtract(vertices[next], vertices[i])
l_j = subtract(vertices[i], vertices[prev])
lengths.append(length(l_i))
# TODO: Is this working only for convex polygons? Does rs.VectorAngle return negative values when it's not convex?
angles.append(angle(l_i, l_j))
angles_sum = sum(angles)
for a in angles:
a = a - t * (a - angles_sum / n)
# TODO(mikhaildubov): This is a dead piece of code
prev_edge = subtract(vertices[1], vertices[0])
newvertices = [vertices[0]]
for i in range(1, n):
newvertices.append(rs.PointAdd(newvertices[-1], prev_edge))
next_edge = scale(unit(rotate(prev_edge, angles[i], [0, 0, 1])), lengths[i])
prev_edge = next_edge
newvertices.append(newvertices[0]) # first point closes the polyline
# new_polyline = rs.AddPolyline(newvertices, polyline) # replaces previous polyline
new_polyline = rs.AddPolyline(newvertices) # keeps previous polyline
return new_polyline
def iterate(flow_func, iterations, *args):
pl_id = None
for i in xrange(iterations):
pl_id = flow_func(pl_id, *args)
return pl_id
if __name__ == "__main__":
iterate(isometricflow, 1)
#iterate(isometricflow, 1)
| mit | -6,460,669,401,087,479,000 | 29.047059 | 126 | 0.595301 | false |
DataPilot/notebook-miner | freq_itemsets/frequent_itemsets.py | 1 | 7264 | import pyfpgrowth
from apyori import apriori
import time
from nbminer.freq_itemsets.buckets.buckets import Bucket
from nbminer.freq_itemsets.buckets.buckets import Patterns
class FrequentItemsetsBase:
def __init__(self, threshold=10, itemset_min=1):
self.threshold = threshold
self.itemset_min = itemset_min
def transform(self, corpus):
self.corpus = corpus
self.get_itemsets()
return self.patterns.get_patterns()
def get_buckets(self):
pass
def get_itemsets(self):
self.buckets = self.get_buckets()
input_buckets = [el.get_items() for el in self.buckets]
s = time.time()
orig_patterns = pyfpgrowth.find_frequent_patterns(input_buckets, self.threshold)
print(time.time()-s)
patterns = [tuple([el for el in pattern if el != '']) for pattern in orig_patterns if len(pattern) >= self.itemset_min]
self.patterns = Patterns(patterns, self.buckets)
def print_itemset_examples(self, key, n_examples=1):
s = 'Template: ' + str(key) + "\n"
examples = self.patterns.get_code_examples(key, n_examples)
for i, el in enumerate(examples[0]):
s += 'EXAMPLE CELL CODE FOR THIS TEMPLATE\n'
#s += 'From notebook ' + examples[1][i]
s += '*'*100
s += '\n' + el
s += '*'*100
s += '\n\n\n\n'
return s
def get_function_dict(self, pattern, functions=None):
return self.patterns.get_functions(pattern, functions=functions)
def get_full_function_dict(self, pattern, functions=None, name_type='full_name_string'):
return self.patterns.get_all_functions(pattern, functions=functions, name_type=name_type)
def get_number_matches(self, pattern):
return self.patterns.get_number_matches(pattern)
def get_patterns(self, min_pattern=1):
return self.patterns.get_patterns(min_pattern)
def get_pattern_lengths(self):
return [len(p) for p in self.get_patterns()]
def get_number_buckets(self):
return len(self.buckets)
def get_number_itemsets(self, min_pattern=1):
total = 0
for key in self.patterns.get_patterns():
if len(key) >= min_pattern:
total += 1
return total
def get_percentage(self, min_pattern=1):
total = 0
has_itemset = 0
for bucket in self.buckets:
if len(bucket.get_patterns()) >= min_pattern:
has_itemset += 1
total += 1
return has_itemset/total
def get_avg_number(self, min_pattern=1):
total = 0
itemsets = 0
for bucket in self.buckets:
itemsets += len(bucket.get_patterns())
if len(bucket.get_patterns()) > 0:
total += 1
return itemsets/total
def get_patterns_by_notebook_name(self):
notebook_patterns = {}
for bucket in self.buckets:
for cell in bucket.items:
name = cell.get_feature('notebook_name')
if name not in notebook_patterns:
notebook_patterns[name] = []
notebook_patterns[name].append(bucket.get_patterns())
return notebook_patterns
def remove_unilateral(self, input_buckets):
names = {}
groups = {}
for b in input_buckets:
for template in b.get_items():
if template not in names:
names[template] = set()
groups[template] = set()
names[template].add(b.notebook_name)
groups[template].add(b.notebook_group)
return_buckets = []
for b in input_buckets:
temp = []
for template in b.get_items():
if len(groups[template]) >= 2:
temp.append(template)
return_buckets.append(tuple(temp))
return return_buckets
def remove_common(self, input_buckets, common=58):
# Remove any templates deemed too common (hopefully these will be templates that appear in each hw)
counts = {}
before = 0
after = 0
for b in input_buckets:
for template in b:
if template not in counts:
counts[template] = 0
counts[template] += 1
return_buckets = []
for b in input_buckets:
temp = []
for template in b:
if counts[template] < common:
temp.append(template)
after += 1
before += 1
return_buckets.append(tuple(temp))
print ('removed', before-after, 'buckets from', before, 'buckets')
return return_buckets
class FrequentGramItemsets(FrequentItemsetsBase):
def __init__(self, threshold=10, itemset_min=1, n_gram=4):
super(FrequentItemsets, self).__init__()
self.n_gram = n_gram
def get_buckets(self):
buckets = []
og_code = []
segs = self.corpus.get_list_segments()
for i in range(len(segs) - self.n_gram):
buckets.append([segs[i:i+self.n_gram]])
#buckets.append(list(set([el.get_feature('template') for el in segs[i:i+self.n_gram] if el.get_feature('template') != ''])))
#og_code.append([astor.to_source(el.get_feature('ast')) for el in segs[i:i+self.n_gram]])
return buckets
class FrequentItemsets(FrequentItemsetsBase):
def __init__(self, threshold=10, itemset_min=1):
super(FrequentItemsets, self).__init__()
def get_buckets(self):
buckets = []
temp = Bucket()
last = None
for el in self.corpus.get_list_segments():
if el.get_feature('cell_id') != last:
buckets.append(temp)
temp = Bucket(notebook_group=el.get_feature('import_name'), notebook_name=el.get_feature('notebook_name'))
temp.append_item(el)
last = el.get_feature('cell_id')
print (len(buckets))
return buckets
class FrequentItemsetsNotebook(FrequentItemsetsBase):
def __init__(self, threshold=10, itemset_min=1):
super(FrequentItemsetsNotebook, self).__init__()
def get_buckets(self):
buckets = []
last = None
for el in self.corpus.get_list_notebooks():
temp = Bucket(notebook_group=el.get_feature('import_name'), notebook_name=el.get_feature('notebook_name'))
for cell in el.get_all_cells():
temp.append_item(cell)
buckets.append(temp)
print (len(buckets))
#import pdb; pdb.set_trace()
return buckets
'''
def get_itemsets(self):
self.buckets = self.get_buckets()
#import pdb; pdb.set_trace()
input_buckets = [el.get_items() for el in self.buckets]
s = time.time()
orig_patterns = list(apriori(input_buckets, support=.001))
orig_patterns = [set(el.items) for el in orig_patterns]
#print(orig_patterns)
print(time.time()-s)
patterns = [tuple([el for el in pattern if el != '']) for pattern in orig_patterns if len(pattern) >= self.itemset_min]
self.patterns = Patterns(patterns, self.buckets)
'''
| apache-2.0 | 1,007,300,429,106,448,000 | 35.502513 | 136 | 0.578331 | false |
shoopio/shoop | shuup_tests/admin/test_chart.py | 2 | 4360 | # -*- coding: utf-8 -*-
# This file is part of Shuup.
#
# Copyright (c) 2012-2019, Shoop Commerce Ltd. All rights reserved.
#
# This source code is licensed under the OSL-3.0 license found in the
# LICENSE file in the root directory of this source tree.
from collections import OrderedDict
import pytest
from babel.numbers import format_decimal, format_percent
from shuup.admin.dashboard.charts import (
BarChart, Chart, ChartDataType, ChartType, MixedChart
)
from shuup.utils.i18n import format_money
from shuup.utils.money import Money
def test_chart_is_abstract_enough():
with pytest.raises(TypeError):
Chart("Derp").get_config()
@pytest.mark.django_db
def test_bar_chart():
labels = ["One", "Two", "Three"]
locale = "pt_br"
chart = BarChart("ma biultiful xart", labels, data_type=ChartDataType.NUMBER, locale=locale)
# add line data here
with pytest.raises(AssertionError):
chart.add_data("some lines", [1, 2, 3], ChartType.LINE)
dataset1 = OrderedDict({"type": ChartType.BAR, "label": "some bars #1", "data": [1, 2, 3]})
dataset2 = OrderedDict({"type": ChartType.BAR, "label": "some bars #2", "data": [2, 3, 4]})
datasets = [dataset1, dataset2]
chart.add_data(dataset1["label"], dataset1["data"], dataset1["type"])
chart.add_data(dataset2["label"], dataset2["data"], dataset2["type"])
chart_config = chart.get_config()
assert chart_config["type"] == ChartType.BAR
assert chart_config["data"]["labels"] == labels
for i in range(len(chart_config["data"]["datasets"])):
for j in range(len(chart_config["data"]["datasets"][i]["data"])):
assert chart_config["data"]["datasets"][i]["data"][j] == datasets[i]["data"][j]
formatted_data = chart_config["data"]["datasets"][i]["formatted_data"][j]
assert formatted_data == format_decimal(datasets[i]["data"][j], locale=locale)
@pytest.mark.django_db
def test_bar_chart_percent():
labels = ["One", "Two", "Three"]
locale = "pt_br"
chart = BarChart("ma biultiful xart %", labels, data_type=ChartDataType.PERCENT, locale=locale)
dataset1 = OrderedDict({"type": ChartType.BAR, "label": "some bars #1", "data": [0.1, 0.2, 0.3]})
dataset2 = OrderedDict({"type": ChartType.BAR, "label": "some bars #2", "data": [0.45, 0.55, .999]})
datasets = [dataset1, dataset2]
chart.add_data(dataset1["label"], dataset1["data"], dataset1["type"])
chart.add_data(dataset2["label"], dataset2["data"], dataset2["type"])
chart_config = chart.get_config()
assert chart_config["type"] == ChartType.BAR
assert chart_config["data"]["labels"] == labels
for i in range(len(chart_config["data"]["datasets"])):
for j in range(len(chart_config["data"]["datasets"][i]["data"])):
assert chart_config["data"]["datasets"][i]["data"][j] == datasets[i]["data"][j]
formatted_data = chart_config["data"]["datasets"][i]["formatted_data"][j]
assert formatted_data == format_percent(datasets[i]["data"][j], locale=locale)
@pytest.mark.django_db
def test_mixed_chart():
labels = ["One", "Two", "Three"]
locale = "pt_br"
currency = "BRL"
chart = MixedChart("ma biultiful xart", labels, data_type=ChartDataType.CURRENCY, locale=locale, currency=currency)
dataset1 = OrderedDict({"type": ChartType.BAR, "label": "some bars #1", "data": [1, 2, 3]})
dataset2 = OrderedDict({"type": ChartType.BAR, "label": "some bars #2", "data": [2, 3, 4]})
dataset3 = OrderedDict({"type": ChartType.LINE, "label": "some lines #1", "data": [5, 6, 7]})
dataset4 = OrderedDict({"type": ChartType.LINE, "label": "some lines #2", "data": [8, 9, 10]})
datasets = [dataset1, dataset2, dataset3, dataset4]
for dataset in datasets:
chart.add_data(dataset["label"], dataset["data"], dataset["type"])
chart_config = chart.get_config()
assert chart_config["type"] == "mixed"
assert chart_config["labels"] == labels
for i in range(len(chart_config["data"])):
for j in range(len(chart_config["data"][i]["data"])):
assert chart_config["data"][i]["data"][j] == datasets[i]["data"][j]
formatted_data = chart_config["data"][i]["formatted_data"][j]
assert formatted_data == format_money(Money(datasets[i]["data"][j], currency=currency).as_rounded())
| agpl-3.0 | -6,993,107,601,462,338,000 | 40.923077 | 119 | 0.637615 | false |
sadig/foreman-api | foreman/api/resources.py | 1 | 1767 | # -*- coding: utf-8 -*-
###############################################################################
#
# foreman-api - python foreman api wrapper
# Copyright (C) 2010, 2011, 2012, 2013 Stephan Adig <[email protected]>
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
###############################################################################
import sys
import json
try:
from restkit import Resource
except ImportError as e:
print('You didn\'t install python-restkit')
print(e)
sys.exit(1)
class ForemanResource(Resource):
def __init__(self, url=None, pool_instance=None, **kwargs):
super(ForemanResource, self).__init__(url, follow_redirect=True, max_follow_redirect=10, pool=pool_instance, **kwargs)
def request(self, *args, **kwargs):
headers = {
'Content-Type':'application/json; charset=utf-8',
'Accept':'application/json'
}
kwargs['headers'] = headers
resp = super(ForemanResource, self).request(*args, **kwargs)
return json.loads(resp.body_string())
| lgpl-2.1 | -2,692,801,138,572,101,000 | 40.093023 | 126 | 0.617997 | false |
andela-wcyn/bucketlist | manage.py | 1 | 1374 | import os
from flask_migrate import Migrate, MigrateCommand
from flask_script import Manager, prompt_bool
from flask_script import Server
from api import create_app, db
from api.models import User, Bucketlist, BucketlistItem
app = create_app(os.getenv('BUCKETLIST_ENV') or 'dev')
manager = Manager(app)
migrate = Migrate(app, db)
manager.add_command('db', MigrateCommand)
manager.add_command('runserver', Server(host=app.config.get('HOST')))
@manager.command
def initdb():
db.create_all()
user1 = User(username="wcyn", email="[email protected]",
password="1234567")
user2 = User(username="paul", email="[email protected]", password="1234567")
bucketlist = Bucketlist(description="My Bucketlist", user=user1)
bucketlist_item = BucketlistItem(description="An item",
bucketlist=bucketlist)
db.session.add(user1)
db.session.add(user2)
db.session.add(bucketlist)
db.session.add(bucketlist_item)
db.session.commit()
print("Initialized the database")
@manager.command
def create_tables():
db.create_all()
print("Created model tables")
@manager.command
def dropdb():
if prompt_bool("Are you sure you want to lose all your data?"):
db.drop_all()
print("Dropped the database")
if __name__ == "__main__":
manager.run()
db.create_all()
| mit | 3,664,571,395,458,831,400 | 27.040816 | 78 | 0.678311 | false |
WikiWatershed/gwlf-e | gwlfe/Input/WaterBudget/InitSnow.py | 1 | 1041 | from numpy import zeros
from gwlfe.Memoization import memoize
try:
from InitSnow_f_inner_compiled import InitSnow_f_inner
except ImportError:
print("Unable to import compiled InitSnow_inner, using slower version")
from gwlfe.Input.WaterBudget.InitSnow_inner import InitSnow_f_inner
def InitSnow(NYrs, DaysMonth, InitSnow_0, Temp, Prec):
result = zeros((NYrs, 12, 31))
yesterday = InitSnow_0
for Y in range(NYrs):
for i in range(12):
for j in range(DaysMonth[Y][i]):
if Temp[Y][i][j] <= 0:
result[Y][i][j] = yesterday + Prec[Y][i][j]
else:
if yesterday > 0.001:
result[Y][i][j] = max(yesterday - 0.45 * Temp[Y][i][j], 0)
else:
result[Y][i][j] = yesterday
yesterday = result[Y][i][j]
return result
@memoize
def InitSnow_f(NYrs, DaysMonth, InitSnow_0, Temp, Prec):
return InitSnow_f_inner(NYrs, DaysMonth, InitSnow_0, Temp, Prec)
| apache-2.0 | -6,762,851,379,802,523,000 | 32.580645 | 82 | 0.576369 | false |
abawchen/leetcode | tests/088.py | 1 | 2297 | import unittest
import sys
sys.path.append('./')
solutions = __import__('solutions.088_merge_sorted_array', fromlist='*')
class Test088(unittest.TestCase):
def test_merge(self):
s = solutions.Solution()
nums1 = []
nums2 = []
self.assertEqual(s.merge(nums1, len(nums1), nums2, len(nums2)), [])
nums1 = [1, 2, 3, 4]
nums2 = [0, 0, 0, 0]
self.assertEqual(s.merge(nums1, 4, nums2, 0), [1, 2, 3, 4])
nums1 = [0, 0, 0, 0]
nums2 = [1, 2, 3, 4]
self.assertEqual(s.merge(nums1, 0, nums2, 4), [1, 2, 3, 4])
nums1 = [1, 2, 3, 0, 0, 0, 0]
nums2 = [1, 2, 3]
self.assertEqual(s.merge(nums1, 3, nums2, 3), [1, 1, 2, 2, 3, 3])
nums1 = [1, 2, 3, 4, 0, 0, 0, 0]
nums2 = [1, 2, 3, 4]
self.assertEqual(s.merge(nums1, 4, nums2, len(nums2)), [1, 1, 2, 2, 3, 3, 4, 4])
nums1 = [1, 2, 3, 3, 3, 4, 0, 0, 0, 0]
nums2 = [1, 2, 3, 4]
self.assertEqual(s.merge(nums1, 6, nums2, 4), [1, 1, 2, 2, 3, 3, 3, 3, 4, 4])
nums1 = [1, 2, 3, 4, 0, 0, 0, 0, 0, 0]
nums2 = [1, 2, 3, 3, 3, 4]
self.assertEqual(s.merge(nums1, 4, nums2, 6), [1, 1, 2, 2, 3, 3, 3, 3, 4, 4])
nums1 = [0, 2, 4, 6, 0, 0, 0, 0, 0, 0, 0, -1]
nums2 = [1, 3, 5, 7, 9, 10, 11, 0, 0]
self.assertEqual(s.merge(nums1, 4, nums2, 7), [0, 1, 2, 3, 4, 5, 6, 7, 9, 10, 11])
nums1 = [1, 3, 5, 7, 9, 10, 11, 0, 0, 0, 0, 0]
nums2 = [0, 2, 4, 6]
self.assertEqual(s.merge(nums1, 7, nums2, 4), [0, 1, 2, 3, 4, 5, 6, 7, 9, 10, 11])
self.assertEqual(s.merge([0], 0, [1], 1), [1])
nums1 = [1, 3, 5, 7, 0, 0, 0, 0]
nums2 = [0, 2, 4, 6]
self.assertEqual(s.merge(nums1, 4, nums2, 2), [0, 1, 2, 3, 5, 7])
nums1 = [1, 3, 5, 7, 0, 0, 0, 0]
nums2 = [0, 2, 7, 9]
self.assertEqual(s.merge(nums1, 4, nums2, 4), [0, 1, 2, 3, 5, 7, 7, 9])
nums1 = [1, 3, 5, 7, 7, 8, 0, 0]
nums2 = [0, 2, 7, 9]
self.assertEqual(s.merge(nums1, 6, nums2, 4), [0, 1, 2, 3, 5, 7, 7, 7, 8, 9])
nums1 = [0, 2, 0, 0, 0, 0, 0]
nums2 = [1, 3, 5, 7]
self.assertEqual(s.merge(nums1, 2, nums2, 4), [0, 1, 2, 3, 5, 7])
if __name__ == '__main__':
unittest.main()
| mit | 2,570,857,993,665,662,500 | 31.352113 | 90 | 0.448846 | false |
HybridF5/jacket | jacket/worker/api.py | 1 | 13188 | # All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import functools
from oslo_config import cfg
from oslo_log import log as logging
import six
from jacket.compute import image
from jacket.db import base
from jacket.db.extend import api as db_api
from jacket import objects
import jacket.policy
from jacket import rpc
from jacket.worker import rpcapi as worker_rpcapi
LOG = logging.getLogger(__name__)
get_notifier = functools.partial(rpc.get_notifier, service='jacket')
CONF = cfg.CONF
def policy_decorator(scope):
"""Check corresponding policy prior of wrapped method to execution."""
def outer(func):
@functools.wraps(func)
def wrapped(self, context, target, *args, **kwargs):
if not self.skip_policy_check:
check_policy(context, func.__name__, target, scope)
return func(self, context, target, *args, **kwargs)
return wrapped
return outer
wrap_check_policy = policy_decorator(scope='jacket')
def check_policy(context, action, target, scope='jacket'):
_action = '%s:%s' % (scope, action)
jacket.policy.enforce(context, _action, target)
def _diff_dict(orig, new):
"""Return a dict describing how to change orig to new. The keys
correspond to values that have changed; the value will be a list
of one or two elements. The first element of the list will be
either '+' or '-', indicating whether the key was updated or
deleted; if the key was updated, the list will contain a second
element, giving the updated value.
"""
# Figure out what keys went away
result = {k: ['-'] for k in set(orig.keys()) - set(new.keys())}
# Compute the updates
for key, value in new.items():
if key not in orig or value != orig[key]:
result[key] = ['+', value]
return result
class API(base.Base):
"""API for interacting with the compute manager."""
def __init__(self, skip_policy_check=False, **kwargs):
self.skip_policy_check = skip_policy_check
self.db_api = db_api
self.worker_rpcapi = worker_rpcapi.JacketAPI()
self.image_api = image.API()
super(API, self).__init__(**kwargs)
def image_mapper_all(self, context):
return self.db_api.image_mapper_all(context)
def image_mapper_get(self, context, image_id, project_id=None):
return self.db_api.image_mapper_get(context, image_id, project_id)
def image_mapper_create(self, context, image_id, project_id, values):
return self.db_api.image_mapper_create(context, image_id, project_id,
values)
def image_mapper_update(self, context, image_id, project_id, values):
set_properties = values.get("set_properties", {})
unset_properties = values.get("unset_properties", {})
image_info = self.image_mapper_get(context, image_id, project_id)
for key, value in set_properties.iteritems():
image_info[key] = value
for key in unset_properties.keys():
if key in image_info:
del image_info[key]
# del image_info['image_id']
# del image_info['project_id']
return self.db_api.image_mapper_update(context, image_id, project_id,
image_info, delete=True)
def image_mapper_delete(self, context, image_id, project_id=None):
return self.db_api.image_mapper_delete(context, image_id, project_id)
def flavor_mapper_all(self, context):
return self.db_api.flavor_mapper_all(context)
def flavor_mapper_get(self, context, flavor_id, project_id=None):
return self.db_api.flavor_mapper_get(context, flavor_id, project_id)
def flavor_mapper_create(self, context, flavor_id, project_id, values):
return self.db_api.flavor_mapper_create(context, flavor_id, project_id,
values)
def flavor_mapper_update(self, context, flavor_id, project_id, values):
set_properties = values.get("set_properties", {})
unset_properties = values.get("unset_properties", {})
flavor_info = self.flavor_mapper_get(context, flavor_id, project_id)
for key, value in set_properties.iteritems():
flavor_info[key] = value
for key in unset_properties.keys():
if key in flavor_info:
del flavor_info[key]
del flavor_info['flavor_id']
del flavor_info['project_id']
return self.db_api.flavor_mapper_update(context, flavor_id, project_id,
flavor_info, delete=True)
def flavor_mapper_delete(self, context, flavor_id, project_id=None):
return self.db_api.flavor_mapper_delete(context, flavor_id, project_id)
def project_mapper_all(self, context):
return self.db_api.project_mapper_all(context)
def project_mapper_get(self, context, project_id):
return self.db_api.project_mapper_get(context, project_id)
def project_mapper_create(self, context, project_id, values):
return self.db_api.project_mapper_create(context, project_id, values)
def project_mapper_update(self, context, project_id, values):
set_properties = values.get("set_properties", {})
unset_properties = values.get("unset_properties", {})
project_info = self.project_mapper_get(context, project_id)
for key, value in set_properties.iteritems():
project_info[key] = value
for key in unset_properties.keys():
if key in project_info:
del project_info[key]
del project_info['project_id']
return self.db_api.project_mapper_update(context, project_id,
project_info,
delete=True)
def project_mapper_delete(self, context, project_id):
return self.db_api.project_mapper_delete(context, project_id)
def sub_flavor_detail(self, context):
return self.worker_rpcapi.sub_flavor_detail(context)
def sub_vol_type_detail(self, context):
return self.worker_rpcapi.sub_vol_type_detail(context)
def instance_mapper_all(self, context):
return self.db_api.instance_mapper_all(context)
def instance_mapper_get(self, context, instance_id, project_id=None):
return self.db_api.instance_mapper_get(context, instance_id, project_id)
def instance_mapper_create(self, context, instance_id, project_id, values):
return self.db_api.instance_mapper_create(context, instance_id,
project_id,
values)
def instance_mapper_update(self, context, instance_id, project_id, values):
set_properties = values.get("set_properties", {})
unset_properties = values.get("unset_properties", {})
instance_info = self.instance_mapper_get(context, instance_id,
project_id)
for key, value in set_properties.iteritems():
instance_info[key] = value
for key in unset_properties.keys():
if key in instance_info:
del instance_info[key]
del instance_info['instance_id']
del instance_info['project_id']
return self.db_api.instance_mapper_update(context, instance_id,
project_id,
instance_info, delete=True)
def instance_mapper_delete(self, context, instance_id, project_id=None):
return self.db_api.instance_mapper_delete(context, instance_id,
project_id)
def volume_mapper_all(self, context):
return self.db_api.volume_mapper_all(context)
def volume_mapper_get(self, context, volume_id, project_id=None):
return self.db_api.volume_mapper_get(context, volume_id, project_id)
def volume_mapper_create(self, context, volume_id, project_id, values):
return self.db_api.volume_mapper_create(context, volume_id,
project_id,
values)
def volume_mapper_update(self, context, volume_id, project_id, values):
set_properties = values.get("set_properties", {})
unset_properties = values.get("unset_properties", {})
volume_info = self.volume_mapper_get(context, volume_id,
project_id)
for key, value in set_properties.iteritems():
volume_info[key] = value
for key in unset_properties.keys():
if key in volume_info:
del volume_info[key]
return self.db_api.volume_mapper_update(context, volume_id,
project_id,
volume_info, delete=True)
def volume_mapper_delete(self, context, volume_id, project_id=None):
return self.db_api.volume_mapper_delete(context, volume_id,
project_id)
def volume_snapshot_mapper_all(self, context):
return self.db_api.volume_snapshot_mapper_all(context)
def volume_snapshot_mapper_get(self, context, volume_snapshot_id,
project_id=None):
return self.db_api.volume_snapshot_mapper_get(context,
volume_snapshot_id,
project_id)
def volume_snapshot_mapper_create(self, context, volume_snapshot_id,
project_id, values):
return self.db_api.volume_snapshot_mapper_create(context,
volume_snapshot_id,
project_id,
values)
def volume_snapshot_mapper_update(self, context, volume_snapshot_id,
project_id, values):
set_properties = values.get("set_properties", {})
unset_properties = values.get("unset_properties", {})
volume_snapshot_info = self.volume_snapshot_mapper_get(context,
volume_snapshot_id,
project_id)
for key, value in set_properties.iteritems():
volume_snapshot_info[key] = value
for key in unset_properties.keys():
if key in volume_snapshot_info:
del volume_snapshot_info[key]
del volume_snapshot_info['snapshot_id']
del volume_snapshot_info['project_id']
return self.db_api.volume_snapshot_mapper_update(context,
volume_snapshot_id,
project_id,
volume_snapshot_info,
delete=True)
def volume_snapshot_mapper_delete(self, context, volume_snapshot_id,
project_id=None):
return self.db_api.volume_snapshot_mapper_delete(context,
volume_snapshot_id,
project_id)
def image_sync_get(self, context, image_id):
return objects.ImageSync.get_by_image_id(context, image_id)
def image_sync(self, context, image, flavor=None, ret_volume=False):
if isinstance(image, six.string_types):
image = self.image_api.get(context, image, show_deleted=False)
LOG.debug("image = %s", image)
image_sync = objects.ImageSync(context, image_id=image['id'],
project_id=context.project_id,
status="creating")
image_sync.create()
return self.worker_rpcapi.image_sync(context, image, flavor,
image_sync, ret_volume)
def image_sync_get(self, context, image_id):
image_sync = objects.ImageSync.get_by_image_id(context, image_id)
return {'image_sync': {'image_id': image_sync.image_id,
'project_id': image_sync.project_id,
'status': image_sync.status}}
| apache-2.0 | 3,362,914,868,334,997,000 | 41.541935 | 82 | 0.573324 | false |
lenoch/tagsetbench | tagsetbench.py | 1 | 5829 | from contextlib import contextmanager
from copy import deepcopy
from pathlib import PosixPath
from log import log
# WISH: zahodit (z annotate, bootstrap, create_model, majka, makefile, split_corpus a test_configure)
ShellPath = PosixPath
class ExpandableList(list):
def __init__(self, iterable):
super().__init__(iterable)
self.prepended_items = []
def __iter__(self):
for item in super().__iter__():
if self.prepended_items:
yield from self.prepended_items
self.prepended_items = []
yield item
# leftovers
if self.prepended_items:
yield from self.prepended_items
self.prepended_items = []
# TODO: read_params
# TODO: považovat chybějící hodnotu za chybu (nejdřív asi u skalárů, nebo i u
# seznamů?)
def read_args(argv, args={}):
"""
Convert argv to a dictionary of parameters.
All parameters start with "--". Only parameters declared in the 'args'
dictionary are understood. Their types are inferred from default values, or
in the case of lists, from the first, sentinel value, which is removed at
the end.
"""
argv_expandable = ExpandableList(argv)
argv_iter = iter(argv_expandable)
executable = next(argv_iter)
args = deepcopy(args)
expected = None
expected_action = True
action = None # TODO: první parametr před --
for arg in argv_iter:
if arg.startswith('--'):
arg = arg[2:]
if arg in args:
if isinstance(args[arg], bool):
args[arg] = True
expected = arg
else:
raise ValueError('Unknown parameter --{}'.format(arg))
elif expected:
# if expected == 'preset': # shorthands for parameter combinations
# argv_expandable.prepended_items.extend(presets[arg]['argv'])
# argv_expandable.prepended_items.append('--preset')
if isinstance(args[expected], list):
converted_value = _convert_arg(arg, sentinel=args[expected][0])
args[expected].append(converted_value)
else:
args[expected] = _convert_arg(arg, sentinel=args[expected])
# continue reading lists; don't convert scalars to lists implicitly
# (yet); allow replacing scalars (for now)
if not isinstance(args[expected], list):
expected = None
_remove_sentinels_from_lists(args)
# WISH: args['cwd'] = PosixPath(__file__), ale to musí předat volající
args['argv'] = argv
return args
def _convert_arg(value, sentinel=None):
if isinstance(sentinel, bool):
return value.lower() in ('yes', 'true', 'on', '1')
elif isinstance(sentinel, int):
try:
return int(value)
except ValueError:
return None
elif isinstance(sentinel, PosixPath):
return PosixPath(value)
else:
return value
def _remove_sentinels_from_lists(args):
for option, value in args.items():
if isinstance(value, list):
# log.debug('Removing sentinel value from option %s: %r', option,
value.pop(0)
def serialize_input_params(args): # JSONize
args = deepcopy(args)
for key, value in args.items():
if isinstance(value, PosixPath):
args[key] = str(value)
elif isinstance(value, list):
for i, item in enumerate(value):
if isinstance(item, PosixPath):
value[i] = str(item)
return args
# TODO: někde podpořit i in-line výpis (ale asi radši obarvenej):
#
# <s p="59%">"/"/kIx"<g/>Až/až/k6eAd1 v/v/k7c6 dopise/dopis/k1gInSc6 ze/z/k7c2
# <phr w="16." l="##." t="k2gInSc2xO">16/##/k4<g/>././kIx. </phr>
# července/červenec/k1gInSc2 mi/já/k3xPp1nSc3 …
#
# místo tabulátorů / a // zas místo / s tím, že by byly zakázaný prázdný hodno-
# ty; musely by se nahrazovat za nějakej placeholder, třeba EMPTY nebo něco
# chytřejšího
#
# anebo případně jinej oddělovač (Unicode má úzkou mezeru)
#
# místo <g/> by šlo takový to mezerový podtržítko („tisknutelná mezera“)
def print_sentence(sentence, output_fd):
print('<{} number="{}">'.format(sentence.kind, sentence.number),
file=output_fd)
for line in sentence.tokens:
print(line, file=output_fd)
print('</{}>'.format(sentence.kind), file=output_fd)
# Functions used in evaluation (gone)
# Functions used in tests
def assert_equal(computed, expected):
if computed is not expected:
if not (isinstance(computed, str) or isinstance(expected, str)):
log.warning('%s is not identical to %s', computed, expected)
if computed != expected:
raise AssertionError('{} != {}'.format(computed, expected))
# log.info('OK %s == %s', computed, expected)
@contextmanager
def assert_raises(error):
try:
yield
except error as e:
log.debug('correctly raised %s(%s)', error.__name__, e)
else:
raise AssertionError('{} was not triggered'.format(error.__name__))
def custom_repr(cls):
def repr_with_custom_fields(cls):
# NOTE: names of fields must not lead to cycles (e.g. through parent-
# child relations)
fields_and_values = ('='.join((field, repr(getattr(cls, field))))
for field in cls.CUSTOM_REPR_FIELDS)
# asi bych si měl tu rekurzi nahoře přepsat spíš na procházení
# zásobníku, abych měl přehled o úrovni zanoření…
return '{}({})'.format(cls.__class__.__name__,
',\n\t'.join(fields_and_values)) # ', '
cls.__repr__ = repr_with_custom_fields
return cls
| mit | -836,647,996,692,605,200 | 32.643275 | 101 | 0.605076 | false |
Cito/DBUtils | dbutils/steady_pg.py | 1 | 11312 | """SteadyPg - hardened classic PyGreSQL connections.
Implements steady connections to a PostgreSQL database
using the classic (not DB-API 2 compliant) PyGreSQL API.
The connections are transparently reopened when they are
closed or the database connection has been lost or when
they are used more often than an optional usage limit.
Only connections which have been marked as being in a database
transaction with a begin() call will not be silently replaced.
A typical situation where database connections are lost
is when the database server or an intervening firewall is
shutdown and restarted for maintenance reasons. In such a
case, all database connections would become unusable, even
though the database service may be already available again.
The "hardened" connections provided by this module will
make the database connections immediately available again.
This results in a steady PostgreSQL connection that can be used
by PooledPg or PersistentPg to create pooled or persistent
connections to a PostgreSQL database in a threaded environment
such as the application server of "Webware for Python."
Note, however, that the connections themselves are not thread-safe.
For more information on PostgreSQL, see:
https://www.postgresql.org/
For more information on PyGreSQL, see:
http://www.pygresql.org
For more information on Webware for Python, see:
https://webwareforpython.github.io/w4py/
Usage:
You can use the class SteadyPgConnection in the same way as you
would use the class DB from the classic PyGreSQL API module db.
The only difference is that you may specify a usage limit as the
first parameter when you open a connection (set it to None
if you prefer unlimited usage), and an optional list of commands
that may serve to prepare the session as the second parameter,
and you can specify whether is is allowed to close the connection
(by default this is true). When the connection to the PostgreSQL
database is lost or has been used too often, it will be automatically
reset, without further notice.
from dbutils.steady_pg import SteadyPgConnection
db = SteadyPgConnection(10000, ["set datestyle to german"],
host=..., dbname=..., user=..., ...)
...
result = db.query('...')
...
db.close()
Ideas for improvement:
* Alternatively to the maximum number of uses,
implement a maximum time to live for connections.
* Optionally log usage and loss of connection.
Copyright, credits and license:
* Contributed as supplement for Webware for Python and PyGreSQL
by Christoph Zwerschke in September 2005
Licensed under the MIT license.
"""
from pg import DB as PgConnection
from . import __version__
try:
baseint = (int, long)
except NameError: # Python 3
baseint = int
class SteadyPgError(Exception):
"""General SteadyPg error."""
class InvalidConnection(SteadyPgError):
"""Database connection is invalid."""
class SteadyPgConnection:
"""Class representing steady connections to a PostgreSQL database.
Underlying the connection is a classic PyGreSQL pg API database
connection which is reset if the connection is lost or used too often.
Thus the resulting connection is steadier ("tough and self-healing").
If you want the connection to be persistent in a threaded environment,
then you should not deal with this class directly, but use either the
PooledPg module or the PersistentPg module to get the connections.
"""
version = __version__
def __init__(
self, maxusage=None, setsession=None, closeable=True,
*args, **kwargs):
"""Create a "tough" PostgreSQL connection.
maxusage: maximum usage limit for the underlying PyGreSQL connection
(number of uses, 0 or None means unlimited usage)
When this limit is reached, the connection is automatically reset.
setsession: optional list of SQL commands that may serve to prepare
the session, e.g. ["set datestyle to ...", "set time zone ..."]
closeable: if this is set to false, then closing the connection will
be silently ignored, but by default the connection can be closed
args, kwargs: the parameters that shall be used to establish
the PostgreSQL connections with PyGreSQL using pg.DB()
"""
# basic initialization to make finalizer work
self._con = None
self._closed = True
# proper initialization of the connection
if maxusage is None:
maxusage = 0
if not isinstance(maxusage, baseint):
raise TypeError("'maxusage' must be an integer value.")
self._maxusage = maxusage
self._setsession_sql = setsession
self._closeable = closeable
self._con = PgConnection(*args, **kwargs)
self._transaction = False
self._closed = False
self._setsession()
self._usage = 0
def __enter__(self):
"""Enter the runtime context. This will start a transaction."""
self.begin()
return self
def __exit__(self, *exc):
"""Exit the runtime context. This will end the transaction."""
if exc[0] is None and exc[1] is None and exc[2] is None:
self.commit()
else:
self.rollback()
def _setsession(self):
"""Execute the SQL commands for session preparation."""
if self._setsession_sql:
for sql in self._setsession_sql:
self._con.query(sql)
def _close(self):
"""Close the tough connection.
You can always close a tough connection with this method
and it will not complain if you close it more than once.
"""
if not self._closed:
try:
self._con.close()
except Exception:
pass
self._transaction = False
self._closed = True
def close(self):
"""Close the tough connection.
You are allowed to close a tough connection by default
and it will not complain if you close it more than once.
You can disallow closing connections by setting
the closeable parameter to something false. In this case,
closing tough connections will be silently ignored.
"""
if self._closeable:
self._close()
elif self._transaction:
self.reset()
def reopen(self):
"""Reopen the tough connection.
It will not complain if the connection cannot be reopened.
"""
try:
self._con.reopen()
except Exception:
if self._transcation:
self._transaction = False
try:
self._con.query('rollback')
except Exception:
pass
else:
self._transaction = False
self._closed = False
self._setsession()
self._usage = 0
def reset(self):
"""Reset the tough connection.
If a reset is not possible, tries to reopen the connection.
It will not complain if the connection is already closed.
"""
try:
self._con.reset()
self._transaction = False
self._setsession()
self._usage = 0
except Exception:
try:
self.reopen()
except Exception:
try:
self.rollback()
except Exception:
pass
def begin(self, sql=None):
"""Begin a transaction."""
self._transaction = True
try:
begin = self._con.begin
except AttributeError:
return self._con.query(sql or 'begin')
else:
# use existing method if available
if sql:
return begin(sql=sql)
else:
return begin()
def end(self, sql=None):
"""Commit the current transaction."""
self._transaction = False
try:
end = self._con.end
except AttributeError:
return self._con.query(sql or 'end')
else:
if sql:
return end(sql=sql)
else:
return end()
def commit(self, sql=None):
"""Commit the current transaction."""
self._transaction = False
try:
commit = self._con.commit
except AttributeError:
return self._con.query(sql or 'commit')
else:
if sql:
return commit(sql=sql)
else:
return commit()
def rollback(self, sql=None):
"""Rollback the current transaction."""
self._transaction = False
try:
rollback = self._con.rollback
except AttributeError:
return self._con.query(sql or 'rollback')
else:
if sql:
return rollback(sql=sql)
else:
return rollback()
def _get_tough_method(self, method):
"""Return a "tough" version of a connection class method.
The tough version checks whether the connection is bad (lost)
and automatically and transparently tries to reset the connection
if this is the case (for instance, the database has been restarted).
"""
def tough_method(*args, **kwargs):
transaction = self._transaction
if not transaction:
try:
# check whether connection status is bad
# or the connection has been used too often
if not self._con.db.status or (
self._maxusage and self._usage >= self._maxusage):
raise AttributeError
except Exception:
self.reset() # then reset the connection
try:
result = method(*args, **kwargs) # try connection method
except Exception: # error in query
if transaction: # inside a transaction
self._transaction = False
raise # propagate the error
elif self._con.db.status: # if it was not a connection problem
raise # then propagate the error
else: # otherwise
self.reset() # reset the connection
result = method(*args, **kwargs) # and try one more time
self._usage += 1
return result
return tough_method
def __getattr__(self, name):
"""Inherit the members of the standard connection class.
Some methods are made "tougher" than in the standard version.
"""
if self._con:
attr = getattr(self._con, name)
if (name in ('query', 'get', 'insert', 'update', 'delete')
or name.startswith('get_')):
attr = self._get_tough_method(attr)
return attr
else:
raise InvalidConnection
def __del__(self):
"""Delete the steady connection."""
try:
self._close() # make sure the connection is closed
except: # builtin Exceptions might not exist any more
pass
| mit | 3,576,289,393,901,881,000 | 33.487805 | 79 | 0.606082 | false |
prophile/jacquard | jacquard/storage/utils.py | 1 | 3766 | """General storage engine utilities."""
import json
import logging
import functools
import collections.abc
from jacquard.plugin import plug
from jacquard.storage.exceptions import Retry
def retrying(fn):
"""Decorator: reissues the function if it raises Retry."""
logger = logging.getLogger("jacquard.storage.retrying")
@functools.wraps(fn)
def wrapper(*args, **kwargs):
while True:
try:
return fn(*args, **kwargs)
except Retry:
callable_name = getattr(fn, "__name__", "anonymous function")
logger.debug("Retry issued from %s, reissuing", callable_name)
return wrapper
def copy_data(from_engine, to_engine, flush=False):
"""Copy all keys between two storage engines."""
with from_engine.transaction(read_only=True) as src:
with to_engine.transaction() as dst:
if flush:
dst.clear()
dst.update(src)
_MISSING = object()
class TransactionMap(collections.abc.MutableMapping):
"""
Mutable mapping built on storage engines.
Data are fetched through `.get` and `.keys` on `StorageEngine`, but changes
are kept in the `changes` and `deletions` attributes which correspond with
the two arguments of the same name to `.commit`.
"""
def __init__(self, store):
"""Initialise from storage engine."""
self.store = store
self._store_keys = None
self.changes = {}
self.deletions = set()
self._cache = {}
def _get_keys(self):
"""Get all (decoded) keys from storage engine."""
if self._store_keys is None:
self._store_keys = list(self.store.keys())
current_keys = {x for x in self._store_keys if x not in self.deletions}
current_keys.update(self.changes.keys())
return sorted(self.store.decode_key(x) for x in current_keys)
def __len__(self):
"""Number of keys."""
return len(self._get_keys())
def __iter__(self):
"""Iterator over keys."""
return iter(self._get_keys())
def __getitem__(self, key):
"""Lookup by key. Respects any pending changes/deletions."""
try:
cached_value = self._cache[key]
except KeyError:
result = self.store.get(self.store.encode_key(key))
else:
if cached_value is _MISSING:
raise KeyError(key)
return cached_value
if result is None:
self._cache[key] = _MISSING
raise KeyError(key)
# UTF-8 decoding
if isinstance(result, bytes):
result = result.decode("utf-8")
result = json.loads(result)
self._cache[key] = result
return result
def __setitem__(self, key, value):
"""Overwrite or set key."""
self._cache[key] = value
encoded_key = self.store.encode_key(key)
self.changes[encoded_key] = json.dumps(value)
self.deletions.discard(encoded_key)
def __delitem__(self, key):
"""Delete key."""
old_value = self.get(key)
if old_value is None:
raise KeyError(key)
self._cache[key] = None
encoded_key = self.store.encode_key(key)
try:
del self.changes[encoded_key]
except KeyError:
pass
self.deletions.add(encoded_key)
def open_engine(config, engine, url):
"""
Open and connect to a given engine and URL.
This looks up the backend through the entry points mechanism, and is
pluggable by adding `StorageEngine` subclasses to the entry points
group `jacquard.storage_engines`.
"""
cls = plug("storage_engines", engine, config=config)()
return cls(url)
| mit | 1,159,509,810,565,048,000 | 28.193798 | 79 | 0.594796 | false |
pyfa-org/eos | tests/integration/calculator/mod_operator/mixed/test_all_in.py | 1 | 7011 | # ==============================================================================
# Copyright (C) 2011 Diego Duclos
# Copyright (C) 2011-2018 Anton Vorobyov
#
# This file is part of Eos.
#
# Eos is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Eos is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Eos. If not, see <http://www.gnu.org/licenses/>.
# ==============================================================================
from eos import Implant
from eos import Rig
from eos.const.eos import ModAffecteeFilter
from eos.const.eos import ModDomain
from eos.const.eos import ModOperator
from eos.const.eve import EffectCategoryId
from tests.integration.calculator.testcase import CalculatorTestCase
class TestOperatorAllIn(CalculatorTestCase):
"""Test interaction of all operators, besides post-assignment."""
def test_all_in(self):
tgt_attr = self.mkattr(stackable=0)
src_attr = self.mkattr()
modifier_pre_ass = self.mkmod(
affectee_filter=ModAffecteeFilter.domain,
affectee_domain=ModDomain.ship,
affectee_attr_id=tgt_attr.id,
operator=ModOperator.pre_assign,
affector_attr_id=src_attr.id)
effect_pre_ass = self.mkeffect(
category_id=EffectCategoryId.passive,
modifiers=[modifier_pre_ass])
value_pre_ass = 5
influence_src_pre_ass = Implant(self.mktype(
attrs={src_attr.id: value_pre_ass},
effects=[effect_pre_ass]).id)
self.fit.implants.add(influence_src_pre_ass)
modifier_pre_mul = self.mkmod(
affectee_filter=ModAffecteeFilter.domain,
affectee_domain=ModDomain.ship,
affectee_attr_id=tgt_attr.id,
operator=ModOperator.pre_mul,
affector_attr_id=src_attr.id)
effect_pre_mul = self.mkeffect(
category_id=EffectCategoryId.passive,
modifiers=[modifier_pre_mul])
value_pre_mul = 50
influence_src_pre_mul = Implant(self.mktype(
attrs={src_attr.id: value_pre_mul},
effects=[effect_pre_mul]).id)
self.fit.implants.add(influence_src_pre_mul)
modifier_pre_div = self.mkmod(
affectee_filter=ModAffecteeFilter.domain,
affectee_domain=ModDomain.ship,
affectee_attr_id=tgt_attr.id,
operator=ModOperator.pre_div,
affector_attr_id=src_attr.id)
effect_pre_div = self.mkeffect(
category_id=EffectCategoryId.passive,
modifiers=[modifier_pre_div])
value_pre_div = 0.5
influence_src_pre_div = Implant(self.mktype(
attrs={src_attr.id: value_pre_div},
effects=[effect_pre_div]).id)
self.fit.implants.add(influence_src_pre_div)
modifier_mod_add = self.mkmod(
affectee_filter=ModAffecteeFilter.domain,
affectee_domain=ModDomain.ship,
affectee_attr_id=tgt_attr.id,
operator=ModOperator.mod_add,
affector_attr_id=src_attr.id)
effect_mod_add = self.mkeffect(
category_id=EffectCategoryId.passive,
modifiers=[modifier_mod_add])
value_mod_add = 10
influence_src_mod_add = Implant(self.mktype(
attrs={src_attr.id: value_mod_add},
effects=[effect_mod_add]).id)
self.fit.implants.add(influence_src_mod_add)
modifier_mod_sub = self.mkmod(
affectee_filter=ModAffecteeFilter.domain,
affectee_domain=ModDomain.ship,
affectee_attr_id=tgt_attr.id,
operator=ModOperator.mod_sub,
affector_attr_id=src_attr.id)
effect_mod_sub = self.mkeffect(
category_id=EffectCategoryId.passive,
modifiers=[modifier_mod_sub])
value_mod_sub = 63
influence_src_mod_sub = Implant(self.mktype(
attrs={src_attr.id: value_mod_sub},
effects=[effect_mod_sub]).id)
self.fit.implants.add(influence_src_mod_sub)
modifier_post_mul = self.mkmod(
affectee_filter=ModAffecteeFilter.domain,
affectee_domain=ModDomain.ship,
affectee_attr_id=tgt_attr.id,
operator=ModOperator.post_mul,
affector_attr_id=src_attr.id)
effect_post_mul = self.mkeffect(
category_id=EffectCategoryId.passive,
modifiers=[modifier_post_mul])
value_post_mul = 1.35
influence_src_post_mul = Implant(self.mktype(
attrs={src_attr.id: value_post_mul},
effects=[effect_post_mul]).id)
self.fit.implants.add(influence_src_post_mul)
modifier_post_div = self.mkmod(
affectee_filter=ModAffecteeFilter.domain,
affectee_domain=ModDomain.ship,
affectee_attr_id=tgt_attr.id,
operator=ModOperator.post_div,
affector_attr_id=src_attr.id)
effect_post_div = self.mkeffect(
category_id=EffectCategoryId.passive,
modifiers=[modifier_post_div])
value_post_div = 2.7
influence_src_post_div = Implant(self.mktype(
attrs={src_attr.id: value_post_div},
effects=[effect_post_div]).id)
self.fit.implants.add(influence_src_post_div)
modifier_post_perc = self.mkmod(
affectee_filter=ModAffecteeFilter.domain,
affectee_domain=ModDomain.ship,
affectee_attr_id=tgt_attr.id,
operator=ModOperator.post_percent,
affector_attr_id=src_attr.id)
effect_post_perc = self.mkeffect(
category_id=EffectCategoryId.passive,
modifiers=[modifier_post_perc])
value_post_perc = 15
influence_src_post_perc = Implant(self.mktype(
attrs={src_attr.id: value_post_perc},
effects=[effect_post_perc]).id)
self.fit.implants.add(influence_src_post_perc)
influence_tgt = Rig(self.mktype(attrs={tgt_attr.id: 100}).id)
# Action
self.fit.rigs.add(influence_tgt)
# Verification
# Operators shouldn't be penalized and should go in this order
expected_value = ((
value_pre_ass * value_pre_mul / value_pre_div +
value_mod_add - value_mod_sub) *
value_post_mul / value_post_div * (1 + value_post_perc / 100))
self.assertAlmostEqual(influence_tgt.attrs[tgt_attr.id], expected_value)
# Cleanup
self.assert_solsys_buffers_empty(self.fit.solar_system)
self.assert_log_entries(0)
| lgpl-3.0 | -828,864,303,853,738,000 | 42.546584 | 80 | 0.61432 | false |
OpenMined/PySyft | packages/grid/apps/domain/src/__main__.py | 1 | 1688 | """
Note:
This file should be used only for development purposes.
Use the Flask built-in web server isn't suitable for production.
For production, we need to put it behind real web server able to communicate
with Flask through a WSGI protocol.
A common choice for that is Gunicorn.
"""
# stdlib
import argparse
import os
# third party
from app import create_app
from gevent import pywsgi
from geventwebsocket.handler import WebSocketHandler
parser = argparse.ArgumentParser(description="Run PyGrid application.")
parser.add_argument(
"--port",
"-p",
type=int,
help="Port number of the socket server, e.g. --port=5000. Default is os.environ.get('GRID_NODE_PORT', 5000).",
default=os.environ.get("GRID_NODE_PORT", 5000),
)
parser.add_argument(
"--host",
type=str,
help="Grid node host, e.g. --host=0.0.0.0. Default is os.environ.get('GRID_NODE_HOST','0.0.0.0').",
default=os.environ.get("GRID_NODE_HOST", "0.0.0.0"),
)
parser.add_argument(
"--name",
type=str,
help="Grid node name, e.g. --name=OpenMined. Default is os.environ.get('GRID_NODE_NAME','OpenMined').",
default=os.environ.get("GRID_NODE_NAME", "OpenMined"),
)
parser.add_argument(
"--start_local_db",
dest="start_local_db",
action="store_true",
help="If this flag is used a SQLAlchemy DB URI is generated to use a local db.",
)
parser.set_defaults(use_test_config=False)
if __name__ == "__main__":
args = parser.parse_args()
app = create_app(args)
_address = "http://{}:{}".format(args.host, args.port)
server = pywsgi.WSGIServer(
(args.host, args.port), app, handler_class=WebSocketHandler
)
server.serve_forever()
| apache-2.0 | -5,058,307,953,008,342,000 | 25.793651 | 114 | 0.675948 | false |
manglakaran/TrafficKarmaSent | src/traffickarmasent/test.py | 1 | 2759 | import tweepy
import json,re,sys
# Authentication details. To obtain these visit dev.twitter.com
consumer_key = 'ufulV3imKoYNzdh58LotTC1YD'
consumer_secret = '2A781ma736HTenAXXYn9tRIelQYJkbCqY0GLi7W71ZwwDmNU59'
access_token = '2564905075-MY9osfHabaRnonQVHHhHeA1vCLSOhuHWjBNBiIY'
access_token_secret = 'JsD8Woc7iiFiDSwoCwjNAb6KNEurz7tBqSj9pJV8WXabr'
# This is the listener, resposible for receiving data
'''
class StdOutListener(tweepy.StreamListener):
def on_data(self, data):
# Twitter returns data in JSON format - we need to decode it first
decoded = json.loads(data)
element = decoded['text']
print element
#print '@%s Name %s ID %s:TWID %s %s' % (decoded['user']['screen_name'],decoded['user']['name'], decoded['user']['id'],decoded['id'] ,decoded['text'].encode('ascii', 'ignore'))
print ''
return True
def on_error(self, status):
print status
if __name__ == '__main__':
l = StdOutListener()
auth = tweepy.OAuthHandler(consumer_key, consumer_secret)
auth.set_access_token(access_token, access_token_secret)
print "Showing all new tweets for #programming:"
# There are different kinds of streams: public stream, user stream, multi-user streams
# In this example follow #programming tag
# For more details refer to https://dev.twitter.com/docs/streaming-apis
stream = tweepy.Stream(auth, l)
stream.filter(track=['games'])
'''
class StdOutListener(tweepy.StreamListener):
def on_data(self, data):
# Twitter returns data in JSON format - we need to decode it first
decoded = json.loads(data)
# Also, we convert UTF-8 to ASCII ignoring all bad characters sent by users
#print '@%s: %s' % (decoded['user']['screen_name'], decoded['text'].encode('ascii', 'ignore'))
element = decoded['text']
print element ,
element = re.sub(r"htt\S+", "", element)
element = re.sub("@(\w+)| #","",element)
#element = re.sub("(@[A-Za-z0-9]+)","",element)
#element = ' '.join(re.sub("(@[A-Za-z0-9]+)|([^0-9A-Za-z \t])"," ",element).split())
print element
return True
def on_error(self, status):
print status
if __name__ == '__main__':
l = StdOutListener()
auth = tweepy.OAuthHandler(consumer_key, consumer_secret)
auth.set_access_token(access_token, access_token_secret)
print "Showing all new tweets for #programming:"
# There are different kinds of streams: public stream, user stream, multi-user streams
# In this example follow #programming tag
# For more details refer to https://dev.twitter.com/docs/streaming-apis
stream = tweepy.Stream(auth, l)
stream.filter(track=['ping'])
| mit | -482,987,571,816,521,100 | 26.868687 | 185 | 0.654223 | false |
scionrep/scioncc | src/ion/util/test/test_parse_utils.py | 1 | 11720 | #!/usr/bin/env python
__author__ = 'Michael Meisinger'
from nose.plugins.attrib import attr
from pyon.public import BadRequest
from pyon.util.unit_test import UnitTestCase
from ion.util.parse_utils import get_typed_value
@attr('UNIT')
class TestParseUtils(UnitTestCase):
def test_get_typed_value(self):
# TEST: Integers
ret_val = get_typed_value(999, targettype="int", strict=True)
self.assertEqual(ret_val, 999)
with self.assertRaises(BadRequest):
get_typed_value("999", targettype="int", strict=True)
with self.assertRaises(BadRequest):
get_typed_value("999.9", targettype="int", strict=False)
with self.assertRaises(BadRequest):
get_typed_value(None, targettype="int", strict=False)
with self.assertRaises(BadRequest):
get_typed_value("", targettype="int", strict=False)
ret_val = get_typed_value("999", targettype="int", strict=False)
self.assertEqual(ret_val, 999)
long_val = 9999999999999999999
self.assertEqual(type(long_val), long)
ret_val = get_typed_value(long_val, targettype="int", strict=True)
self.assertEqual(ret_val, long_val)
schema_entry = dict(type="int")
ret_val = get_typed_value(999, schema_entry=schema_entry, strict=True)
self.assertEqual(ret_val, 999)
# TEST: Float
ret_val = get_typed_value(999.9, targettype="float", strict=True)
self.assertEqual(ret_val, 999.9)
with self.assertRaises(BadRequest):
get_typed_value("999.9", targettype="float", strict=True)
with self.assertRaises(BadRequest):
get_typed_value(None, targettype="float", strict=False)
with self.assertRaises(BadRequest):
get_typed_value("", targettype="float", strict=False)
ret_val = get_typed_value("999.9", targettype="float", strict=False)
self.assertEqual(ret_val, 999.9)
ret_val = get_typed_value("999", targettype="float", strict=False)
self.assertEqual(ret_val, 999.0)
# TEST: String
ret_val = get_typed_value("foo", targettype="str", strict=True)
self.assertEqual(ret_val, "foo")
with self.assertRaises(BadRequest):
get_typed_value(999, targettype="str", strict=True)
with self.assertRaises(BadRequest):
get_typed_value(None, targettype="str", strict=True)
ret_val = get_typed_value(999, targettype="str", strict=False)
self.assertEqual(ret_val, "999")
ret_val = get_typed_value(True, targettype="str", strict=False)
self.assertEqual(ret_val, "True")
unicode_val = u'foo \u20ac foo'
ret_val = get_typed_value(unicode_val, targettype="str", strict=True)
self.assertEqual(type(ret_val), str)
self.assertEqual(ret_val, "foo \xe2\x82\xac foo")
# TEST: Bool
ret_val = get_typed_value(True, targettype="bool", strict=True)
self.assertEqual(ret_val, True)
ret_val = get_typed_value(False, targettype="bool", strict=True)
self.assertEqual(ret_val, False)
with self.assertRaises(BadRequest):
get_typed_value("True", targettype="bool", strict=True)
with self.assertRaises(BadRequest):
get_typed_value(None, targettype="bool", strict=True)
with self.assertRaises(BadRequest):
get_typed_value("", targettype="bool", strict=True)
with self.assertRaises(BadRequest):
get_typed_value(123, targettype="bool", strict=True)
with self.assertRaises(BadRequest):
get_typed_value(0, targettype="bool", strict=True)
ret_val = get_typed_value("True", targettype="bool", strict=False)
self.assertEqual(ret_val, True)
ret_val = get_typed_value("true", targettype="bool", strict=False)
self.assertEqual(ret_val, True)
ret_val = get_typed_value("TRUE", targettype="bool", strict=False)
self.assertEqual(ret_val, True)
ret_val = get_typed_value("1", targettype="bool", strict=False)
self.assertEqual(ret_val, True)
ret_val = get_typed_value(1, targettype="bool", strict=False)
self.assertEqual(ret_val, True)
ret_val = get_typed_value("False", targettype="bool", strict=False)
self.assertEqual(ret_val, False)
ret_val = get_typed_value("FALSE", targettype="bool", strict=False)
self.assertEqual(ret_val, False)
ret_val = get_typed_value("false", targettype="bool", strict=False)
self.assertEqual(ret_val, False)
ret_val = get_typed_value("0", targettype="bool", strict=False)
self.assertEqual(ret_val, False)
ret_val = get_typed_value("", targettype="bool", strict=False)
self.assertEqual(ret_val, False)
ret_val = get_typed_value(None, targettype="bool", strict=False)
self.assertEqual(ret_val, False)
with self.assertRaises(BadRequest):
get_typed_value("F", targettype="bool", strict=False)
with self.assertRaises(BadRequest):
get_typed_value("Falsy", targettype="bool", strict=False)
with self.assertRaises(BadRequest):
get_typed_value("Truey", targettype="bool", strict=False)
with self.assertRaises(BadRequest):
get_typed_value(" True", targettype="bool", strict=False)
# TEST: List
list_val = [1, True, "foo"]
ret_val = get_typed_value(list_val, targettype="list", strict=True)
self.assertEqual(ret_val, list_val)
ret_val = get_typed_value([], targettype="list", strict=True)
self.assertEqual(ret_val, [])
with self.assertRaises(BadRequest):
get_typed_value(None, targettype="list", strict=True)
with self.assertRaises(BadRequest):
get_typed_value("[]", targettype="list", strict=True)
with self.assertRaises(BadRequest):
get_typed_value("", targettype="list", strict=True)
with self.assertRaises(BadRequest):
get_typed_value(tuple(), targettype="list", strict=True)
ret_val = get_typed_value(1, targettype="list", strict=False)
self.assertEqual(ret_val, [1])
ret_val = get_typed_value(tuple(list_val), targettype="list", strict=False)
self.assertEqual(ret_val, list_val)
ret_val = get_typed_value(set(list_val), targettype="list", strict=False)
self.assertEqual(type(ret_val), list)
self.assertEqual(set(ret_val), set(list_val))
ret_val = get_typed_value("1", targettype="list", strict=False)
self.assertEqual(ret_val, ["1"])
ret_val = get_typed_value("a,b,c", targettype="list", strict=False)
self.assertEqual(ret_val, ["a", "b", "c"])
ret_val = get_typed_value("[a,b,c]", targettype="list", strict=False)
self.assertEqual(ret_val, ["a", "b", "c"])
ret_val = get_typed_value("['a','b',3]", targettype="list", strict=False)
self.assertEqual(ret_val, ["a", "b", 3])
ret_val = get_typed_value("[]", targettype="list", strict=False)
self.assertEqual(ret_val, [])
ret_val = get_typed_value(None, targettype="list", strict=False)
self.assertEqual(ret_val, [None])
ret_val = get_typed_value(True, targettype="list", strict=False)
self.assertEqual(ret_val, [True])
# TEST: Simplelist
ret_val = get_typed_value("a,b,c", targettype="simplelist")
self.assertEqual(ret_val, ["a", "b", "c"])
# TEST: Dict
dict_val = {'a': 1, 'b': True}
ret_val = get_typed_value(dict_val, targettype="dict", strict=True)
self.assertEqual(ret_val, dict_val)
ret_val = get_typed_value({}, targettype="dict", strict=True)
self.assertEqual(ret_val, {})
with self.assertRaises(BadRequest):
get_typed_value(None, targettype="dict", strict=True)
with self.assertRaises(BadRequest):
get_typed_value("{}", targettype="dict", strict=True)
with self.assertRaises(BadRequest):
get_typed_value("", targettype="dict", strict=True)
ret_val = get_typed_value("{}", targettype="dict", strict=False)
self.assertEqual(ret_val, {})
ret_val = get_typed_value("{'a': 1, 'b': True}", targettype="dict", strict=False)
self.assertEqual(ret_val, dict_val)
ret_val = get_typed_value("a: 1, b: c, c: True", targettype="dict", strict=False)
self.assertEqual(ret_val, {'a': 1, 'b': 'c', 'c': True})
ret_val = get_typed_value("a.x: 1, a.y: 2.2, b: false", targettype="dict", strict=False)
self.assertEqual(ret_val, {'a': {'x': 1, 'y': 2.2}, 'b': 'false'})
# TEST: None
ret_val = get_typed_value(None, targettype="NoneType", strict=True)
self.assertEqual(ret_val, None)
ret_val = get_typed_value(1, targettype="NoneType", strict=True)
self.assertEqual(ret_val, 1)
ret_val = get_typed_value(True, targettype="NoneType", strict=True)
self.assertEqual(ret_val, True)
ret_val = get_typed_value("foo", targettype="NoneType", strict=True)
self.assertEqual(ret_val, "foo")
ret_val = get_typed_value("None", targettype="NoneType", strict=True)
self.assertEqual(ret_val, "None")
ret_val = get_typed_value("None", targettype="NoneType", strict=False)
self.assertEqual(ret_val, None)
ret_val = get_typed_value("NONE", targettype="NoneType", strict=False)
self.assertEqual(ret_val, None)
ret_val = get_typed_value("none", targettype="NoneType", strict=False)
self.assertEqual(ret_val, None)
ret_val = get_typed_value("Null", targettype="NoneType", strict=False)
self.assertEqual(ret_val, None)
ret_val = get_typed_value("NULL", targettype="NoneType", strict=False)
self.assertEqual(ret_val, None)
ret_val = get_typed_value("null", targettype="NoneType", strict=False)
self.assertEqual(ret_val, None)
ret_val = get_typed_value("", targettype="NoneType", strict=False)
self.assertEqual(ret_val, None)
ret_val = get_typed_value(1, targettype="NoneType", strict=False)
self.assertEqual(ret_val, 1)
ret_val = get_typed_value("1", targettype="NoneType", strict=False)
self.assertEqual(ret_val, 1)
ret_val = get_typed_value(1.1, targettype="NoneType", strict=False)
self.assertEqual(ret_val, 1.1)
ret_val = get_typed_value("1.1", targettype="NoneType", strict=False)
self.assertEqual(ret_val, 1.1)
# TEST: Enum
from interface.objects import SampleEnum
schema_entry = dict(type="int", enum_type="SampleEnum")
ret_val = get_typed_value(SampleEnum.MONDAY, schema_entry=schema_entry, strict=True)
self.assertEqual(ret_val, SampleEnum.MONDAY)
ret_val = get_typed_value("MONDAY", schema_entry=schema_entry, strict=True)
self.assertEqual(ret_val, SampleEnum.MONDAY)
with self.assertRaises(BadRequest):
get_typed_value("Monday", schema_entry=schema_entry, strict=True)
ret_val = get_typed_value("MONDAY", schema_entry=schema_entry, strict=False)
self.assertEqual(ret_val, SampleEnum.MONDAY)
ret_val = get_typed_value("Monday", schema_entry=schema_entry, strict=False)
self.assertEqual(ret_val, SampleEnum.MONDAY)
with self.assertRaises(BadRequest):
get_typed_value("HOLIDAY", schema_entry=schema_entry)
# TEST: Error conditions
with self.assertRaises(BadRequest):
get_typed_value(1)
with self.assertRaises(BadRequest):
get_typed_value(1, targettype="FOO")
| bsd-2-clause | -1,885,126,484,622,097,700 | 48.87234 | 96 | 0.626621 | false |
haiyangd/cockpit_view | test/testinfra.py | 1 | 9279 |
import argparse
import errno
import httplib
import json
import urllib
import os
import shutil
import subprocess
import sys
import tarfile
import tempfile
import urlparse
TOKEN = "~/.config/github-token"
WHITELIST = "~/.config/github-whitelist"
OS = os.environ.get("TEST_OS", "fedora-22")
ARCH = os.environ.get("TEST_ARCH", "x86_64")
TESTING = "Testing in progress"
__all__ = (
'Sink',
'GitHub',
'arg_parser',
'OS',
'ARCH',
'TESTING'
)
def arg_parser():
parser = argparse.ArgumentParser(description='Run Cockpit test(s)')
parser.add_argument('-j', '--jobs', dest="jobs", type=int,
default=os.environ.get("TEST_JOBS", 1), help="Number of concurrent jobs")
parser.add_argument('-v', '--verbose', dest="verbosity", action='store_const',
const=2, help='Verbose output')
parser.add_argument('-t', "--trace", dest='trace', action='store_true',
help='Trace machine boot and commands')
parser.add_argument('-q', '--quiet', dest='verbosity', action='store_const',
const=0, help='Quiet output')
parser.add_argument('--thorough', dest='thorough', action='store',
help='Thorough mode, no skipping known issues')
parser.add_argument('-s', "--sit", dest='sit', action='store_true',
help="Sit and wait after test failure")
parser.add_argument('tests', nargs='*')
parser.set_defaults(verbosity=1)
return parser
class Sink(object):
def __init__(self, host, identifier, status=None):
self.attachments = tempfile.mkdtemp(prefix="attachments.", dir=".")
self.status = status
# Start a gzip and cat processes
self.ssh = subprocess.Popen([ "ssh", host, "--", "python", "sink", identifier ], stdin=subprocess.PIPE)
# Send the status line
if status is None:
line = "\n"
else:
json.dumps(status) + "\n"
self.ssh.stdin.write(json.dumps(status) + "\n")
# Now dup our own output and errors into the pipeline
sys.stdout.flush()
self.fout = os.dup(1)
os.dup2(self.ssh.stdin.fileno(), 1)
sys.stderr.flush()
self.ferr = os.dup(2)
os.dup2(self.ssh.stdin.fileno(), 2)
def attach(self, filename):
shutil.move(filename, self.attachments)
def flush(self, status=None):
assert self.ssh is not None
# Reset stdout back
sys.stdout.flush()
os.dup2(self.fout, 1)
os.close(self.fout)
self.fout = -1
# Reset stderr back
sys.stderr.flush()
os.dup2(self.ferr, 2)
os.close(self.ferr)
self.ferr = -1
# Splice in the github status
if status is None:
status = self.status
if status is not None:
self.ssh.stdin.write("\n" + json.dumps(status))
# Send a zero character and send the attachments
files = os.listdir(self.attachments)
print >> sys.stderr, "attachments are", files
if len(files):
self.ssh.stdin.write('\x00')
self.ssh.stdin.flush()
with tarfile.open(name="attachments.tgz", mode="w:gz", fileobj=self.ssh.stdin) as tar:
for filename in files:
tar.add(os.path.join(self.attachments, filename), arcname=filename, recursive=True)
shutil.rmtree(self.attachments)
# All done sending output
self.ssh.stdin.close()
# SSH should terminate by itself
ret = self.ssh.wait()
if ret != 0:
raise subprocess.CalledProcessError(ret, "ssh")
self.ssh = None
def dict_is_subset(full, check):
for (key, value) in check.items():
if not key in full or full[key] != value:
return False
return True
class GitHub(object):
def __init__(self, base="/repos/cockpit-project/cockpit/"):
self.base = base
self.conn = None
self.token = None
try:
gt = open(os.path.expanduser(TOKEN), "r")
self.token = gt.read().strip()
gt.close()
except IOError as exc:
if exc.errno == errno.ENOENT:
pass
else:
raise
self.available = self.token and True or False
def context(self):
return "test/" + OS + "/" + ARCH
def qualify(self, resource):
return urlparse.urljoin(self.base, resource)
def request(self, method, resource, data="", headers=None):
if headers is None:
headers = { }
headers["User-Agent"] = "Cockpit Tests"
if self.token:
headers["Authorization"] = "token " + self.token
if not self.conn:
self.conn = httplib.HTTPSConnection("api.github.com", strict=True)
# conn.set_debuglevel(1)
self.conn.request(method, self.qualify(resource), data, headers)
response = self.conn.getresponse()
output = response.read()
if method == "GET" and response.status == 404:
return ""
elif response.status < 200 or response.status >= 300:
sys.stderr.write(output)
raise Exception("GitHub API problem: {0}".format(response.reason or response.status))
return output
def get(self, resource):
output = self.request("GET", resource)
if not output:
return None
return json.loads(output)
def post(self, resource, data):
headers = { "Content-Type": "application/json" }
return json.loads(self.request("POST", resource, json.dumps(data), headers))
def prioritize(self, revision, labels=[], update=None, baseline=10):
last = { }
state = None
statuses = self.get("commits/{0}/statuses".format(revision))
if statuses:
for status in statuses:
if status["context"] == self.context():
state = status["state"]
last = status
break
priority = baseline
# This commit definitively succeeds or fails
if state in [ "success", "failure" ]:
return 0
# This test errored, we try again but low priority
elif state in [ "error" ]:
update = None
priority = 4
if priority > 0:
if "priority" in labels:
priority += 2
if "needsdesign" in labels:
priority -= 2
if "needswork" in labels:
priority -= 3
if "blocked" in labels:
priority -= 1
# Is testing already in progress?
if last.get("description", None) == TESTING:
update = None
priority = 0
if update and priority <= 0:
update = update.copy()
update["description"] = "Manual testing required"
if update and not dict_is_subset(last, update):
self.post("statuses/" + revision, update)
return priority
def scan(self, update=False):
pulls = []
# Try to load the whitelist
whitelist = None
try:
wh = open(os.path.expanduser(WHITELIST), "r")
whitelist = [x.strip() for x in wh.read().split("\n") if x.strip()]
except IOError as exc:
if exc.errno == errno.ENOENT:
pass
else:
raise
# The whitelist defaults to the current user
if whitelist is None:
user = self.get("/user")
if user:
whitelist = [ user["login"] ]
results = []
if update:
status = { "state": "pending", "description": "Not yet tested", "context": self.context() }
else:
status = None
master = self.get("git/refs/heads/master")
priority = self.prioritize(master["object"]["sha"], update=status, baseline=9)
if priority > 0:
results.append((priority, "master", master["object"]["sha"], "master"))
# Load all the pull requests
for pull in self.get("pulls"):
baseline = 10
# It needs to be in the whitelist
login = pull["head"]["user"]["login"]
if login not in whitelist:
if status:
status["description"] = "Manual testing required"
baseline = 0
# Pull in the labels for this pull
labels = []
for label in self.get("issues/{0}/labels".format(pull["number"])):
labels.append(label["name"])
number = pull["number"]
revision = pull["head"]["sha"]
priority = self.prioritize(revision, labels, update=status, baseline=baseline)
if priority > 0:
results.append((priority, "pull-%d" % number, revision, "pull/%d/head" % number))
results.sort(key=lambda v: v[0], reverse=True)
return results
if __name__ == '__main__':
github = GitHub("/repos/cockpit-project/cockpit/")
for (priority, name, revision, ref) in github.scan(True):
sys.stdout.write("{0}: {1} ({2})\n".format(name, revision, priority))
| lgpl-2.1 | 2,327,842,785,275,821,000 | 32.021352 | 111 | 0.55243 | false |
martinp/jarvis2 | jarvis/jobs/imap.py | 1 | 1559 | # -*- coding: utf-8 -*-
import imaplib
import re
try:
# urlparse was moved to urllib.parse in Python 3
from urllib.parse import urlparse
except ImportError:
from urlparse import urlparse
from jobs import AbstractJob
class IMAP(AbstractJob):
def __init__(self, conf):
self.interval = conf['interval']
self.email = conf['email']
self.url = urlparse(conf['url'])
self.tls = conf.get('tls', True)
self.starttls = conf.get('starttls', False)
self.folder = conf['folder']
def _parse_count(self, message):
count = re.search(r'\w+ (\d+)', message.decode('utf-8'))
return int(count.group(1)) if count is not None else 0
def _get_count(self):
_, message = self.mail.status(self.folder, '(MESSAGES)')
return self._parse_count(message[0])
def _get_unread_count(self):
_, message = self.mail.status(self.folder, '(UNSEEN)')
return self._parse_count(message[0])
def get(self):
if self.tls:
self.mail = imaplib.IMAP4_SSL(self.url.hostname, self.url.port)
else:
self.mail = imaplib.IMAP4(self.url.hostname, self.url.port)
if self.starttls:
self.mail.starttls()
self.mail.login(self.url.username, self.url.password)
count = self._get_count()
unread = self._get_unread_count()
self.mail.logout()
return {
'email': self.email,
'folder': self.folder,
'count': count,
'unread': unread
}
| mit | -1,302,837,366,633,347,300 | 28.980769 | 75 | 0.584349 | false |
Unofficial-Extend-Project-Mirror/openfoam-extend-Breeder-other-scripting-PyFoam | PyFoam/Infrastructure/Logging.py | 1 | 1801 | # ICE Revision: $Id$
"""Writes Logfiles"""
from PyFoam.ThirdParty.six import print_
try:
import logging
hasLogging=True
except ImportError:
# For Python-versions < 2.3
print_("Warning: old python-version. No logging-support")
hasLogging=False
from PyFoam.Infrastructure.Hardcoded import assertDirectory,logDirectory
from os import path
from platform import uname
from PyFoam import configuration as config
_definedLoggers=[]
def _getLoggingLevel(name):
"""Gets the logging level value from its name"""
level=config().get("Logging","default")
try:
level=config().get("Logging",name)
except:
pass
value=logging.INFO
try:
value=getattr(logging,level)
except AttributeError:
print_("WARNING: Wrong specification of debug level "+level+" for log "+name)
return value
class DummyLogger:
def __init__(self):
pass
def info(self,arg):
pass
def foamLogger(name="general"):
"""
:param name: name of the logfile
:return: a logger that is correctly set up for pyFoam
"""
if not hasLogging:
return DummyLogger()
log=logging.getLogger(name)
if not (name in _definedLoggers):
assertDirectory(logDirectory())
lname=path.join(logDirectory(),name)
# rot=logging.TimedRotatingFileHandler(lname,when="M",interval=2,backupCount=5)
rot=logging.FileHandler(lname)
machine=uname()[1].split(".")[0]
rot.setFormatter(logging.Formatter(fmt="%(asctime)s "+("%15s" % machine)+":%(process)-6d %(levelname)-8s %(message)s - in %(filename)s:%(lineno)d"))
log.addHandler(rot)
log.setLevel(_getLoggingLevel(name))
_definedLoggers.append(name)
return log
# Should work with Python3 and Python2 | gpl-2.0 | 4,549,308,299,438,506,500 | 25.115942 | 156 | 0.660744 | false |
mlundblad/telepathy-gabble | tests/twisted/caps/hashed-caps.py | 1 | 11926 | # coding=utf-8
"""
Test the verification string introduced in version 1.5 of XEP-0115
This test changes the caps several times:
- Initial presence to be online
- Change presence to handle audio calls, using XEP-0115-v1.3. Check that
'CapabilitiesChanged' *is* fired
- Change presence *not* to handle audio calls, using XEP-0115-v1.5, but with a
*bogus* hash. Check that 'CapabilitiesChanged' is *not* fired
- Change presence *not* to handle audio calls, using XEP-0115-v1.5, with a
*good* hash. Check that 'CapabilitiesChanged' *is* fired
- Change presence to handle audio calls, using XEP-0115-v1.5, with a XEP-0128
dataform. Check that 'CapabilitiesChanged' is fired
This is done for 2 contacts
Then, this test announce 2 contacts with the same hash.
- Gabble must ask only once for the hash and update the caps for both contacts
- When the caps advertised by the first contact does not match, Gabble asks
the second and update only the caps of the second contact
"""
import dbus
from twisted.words.xish import xpath
from gabbletest import (
exec_test, make_result_iq, make_presence, sync_stream, elem,
)
from servicetest import sync_dbus, EventPattern, assertLength
import constants as cs
import ns
from caps_helper import (
compute_caps_hash, make_caps_disco_reply, send_disco_reply,
fake_client_dataforms)
from config import VOIP_ENABLED
if not VOIP_ENABLED:
print "NOTE: built with --disable-voip"
raise SystemExit(77)
caps_changed_flag = False
some_identities = [
'client/pc/fr/le gabble',
'client/pc/en/gabble',
]
jingle_av_features = [
ns.JINGLE_015,
ns.JINGLE_015_AUDIO,
ns.JINGLE_015_VIDEO,
ns.GOOGLE_P2P,
]
def caps_changed_cb(dummy):
# Workaround to bug 9980: do not raise an error but use a flag
# https://bugs.freedesktop.org/show_bug.cgi?id=9980
global caps_changed_flag
caps_changed_flag = True
def test_hash(q, bus, conn, stream, contact, contact_handle, client):
global caps_changed_flag
presence = make_presence(contact, status='hello')
stream.send(presence)
q.expect('dbus-signal', signal='PresencesChanged',
args=[{contact_handle:
(2, u'available', 'hello')}])
# no special capabilities
basic_caps = [(contact_handle, cs.CHANNEL_TYPE_TEXT, 3, 0)]
assert conn.Capabilities.GetCapabilities([contact_handle]) == basic_caps
# send updated presence with Jingle caps info
presence = make_presence(contact, status='hello',
caps={'node': client,
'ver': '0.1',
})
stream.send(presence)
# Gabble looks up our capabilities
event = q.expect('stream-iq', to=contact,
query_ns='http://jabber.org/protocol/disco#info')
query_node = xpath.queryForNodes('/iq/query', event.stanza)[0]
assert query_node.attributes['node'] == \
client + '#' + '0.1'
# send good reply
send_disco_reply(stream, event.stanza, [], jingle_av_features)
# we can now do audio calls
event = q.expect('dbus-signal', signal='CapabilitiesChanged')
caps_diff = event.args[0]
media_diff = [c for c in caps_diff
if c[1] == cs.CHANNEL_TYPE_STREAMED_MEDIA][0]
assert media_diff[5] & cs.MEDIA_CAP_AUDIO, media_diff[5]
caps_changed_flag = False
# Send presence without any capabilities. XEP-0115 §8.4 Caps Optimization
# says “receivers of presence notifications MUST NOT expect an annotation
# on every presence notification they receive”, so the contact should still
# be media-capable afterwards.
stream.send(make_presence(contact, status='very capable'))
q.expect('dbus-signal', signal='PresencesChanged',
args=[{contact_handle: (2, u'available', 'very capable')}])
ye_olde_caps = conn.Capabilities.GetCapabilities([contact_handle])
assertLength(1, [c for c in ye_olde_caps
if c[1] == cs.CHANNEL_TYPE_STREAMED_MEDIA and
c[3] & cs.MEDIA_CAP_AUDIO])
# send bogus presence
caps = {
'node': client,
'ver': 'ceci=nest=pas=un=hash',
'hash': 'sha-1',
}
presence = make_presence(contact, status='hello', caps=caps)
stream.send(presence)
# Gabble looks up our capabilities
event = q.expect('stream-iq', to=contact,
query_ns='http://jabber.org/protocol/disco#info')
query_node = xpath.queryForNodes('/iq/query', event.stanza)[0]
assert query_node.attributes['node'] == \
client + '#' + caps['ver']
# send bogus reply
send_disco_reply(stream, event.stanza, [],
['http://jabber.org/protocol/bogus-feature'])
# don't receive any D-Bus signal
sync_dbus(bus, q, conn)
sync_stream(q, stream)
assert caps_changed_flag == False
# send presence with empty caps
presence = make_presence(contact, status='hello',
caps={'node': client,
'ver': '0.0',
})
stream.send(presence)
# Gabble looks up our capabilities
event = q.expect('stream-iq', to=contact,
query_ns='http://jabber.org/protocol/disco#info')
query_node = xpath.queryForNodes('/iq/query', event.stanza)[0]
assert query_node.attributes['node'] == \
client + '#' + '0.0'
# still don't receive any D-Bus signal
sync_dbus(bus, q, conn)
assert caps_changed_flag == False
# send good reply
result = make_result_iq(stream, event.stanza)
query = result.firstChildElement()
stream.send(result)
# we can now do nothing
event = q.expect('dbus-signal', signal='CapabilitiesChanged')
assert caps_changed_flag == True
caps_changed_flag = False
# send correct presence
ver = compute_caps_hash(some_identities, jingle_av_features, fake_client_dataforms)
caps = {
'node': client,
'ver': ver,
'hash': 'sha-1',
}
presence = make_presence(contact, status='hello', caps=caps)
stream.send(presence)
# Gabble looks up our capabilities
event = q.expect('stream-iq', to=contact,
query_ns='http://jabber.org/protocol/disco#info')
query_node = xpath.queryForNodes('/iq/query', event.stanza)[0]
assert query_node.attributes['node'] == \
client + '#' + caps['ver']
# don't receive any D-Bus signal
sync_dbus(bus, q, conn)
assert caps_changed_flag == False
# send good reply
send_disco_reply(
stream, event.stanza, some_identities, jingle_av_features, fake_client_dataforms)
# we can now do audio calls
event = q.expect('dbus-signal', signal='CapabilitiesChanged',
)
assert caps_changed_flag == True
caps_changed_flag = False
def test_two_clients(q, bus, conn, stream, contact1, contact2,
contact_handle1, contact_handle2, client, broken_hash):
global caps_changed_flag
presence = make_presence(contact1, status='hello')
stream.send(presence)
q.expect('dbus-signal', signal='PresencesChanged',
args=[{contact_handle1:
(2, u'available', 'hello')}])
presence = make_presence(contact2, status='hello')
stream.send(presence)
q.expect('dbus-signal', signal='PresencesChanged',
args=[{contact_handle2:
(2, u'available', 'hello')}])
# no special capabilities
basic_caps = [(contact_handle1, cs.CHANNEL_TYPE_TEXT, 3, 0)]
assert conn.Capabilities.GetCapabilities([contact_handle1]) == basic_caps
basic_caps = [(contact_handle2, cs.CHANNEL_TYPE_TEXT, 3, 0)]
assert conn.Capabilities.GetCapabilities([contact_handle2]) == basic_caps
# send updated presence with Jingle caps info
ver = compute_caps_hash(some_identities, jingle_av_features, {})
caps = {
'node': client,
'ver': ver,
'hash': 'sha-1',
}
presence = make_presence(contact1, status='hello', caps=caps)
stream.send(presence)
presence = make_presence(contact2, status='hello', caps=caps)
stream.send(presence)
# Gabble looks up our capabilities
event = q.expect('stream-iq', to=contact1,
query_ns='http://jabber.org/protocol/disco#info')
query_node = xpath.queryForNodes('/iq/query', event.stanza)[0]
assert query_node.attributes['node'] == \
client + '#' + ver
# don't receive any D-Bus signal
sync_dbus(bus, q, conn)
assert caps_changed_flag == False
result = make_caps_disco_reply(
stream, event.stanza, some_identities, jingle_av_features)
if broken_hash:
# make the hash break!
query = result.firstChildElement()
query.addElement('feature')['var'] = 'http://example.com/another-feature'
stream.send(result)
if broken_hash:
# Gabble looks up our capabilities again because the first contact
# failed to provide a valid hash
event = q.expect('stream-iq', to=contact2,
query_ns='http://jabber.org/protocol/disco#info')
query_node = xpath.queryForNodes('/iq/query', event.stanza)[0]
assert query_node.attributes['node'] == \
client + '#' + ver
# don't receive any D-Bus signal
sync_dbus(bus, q, conn)
assert caps_changed_flag == False
# send good reply
send_disco_reply(stream, event.stanza, some_identities, jingle_av_features)
# we can now do audio calls with both contacts
event = q.expect('dbus-signal', signal='CapabilitiesChanged',
args=[[(contact_handle2, cs.CHANNEL_TYPE_STREAMED_MEDIA, 0, 3, 0,
cs.MEDIA_CAP_AUDIO | cs.MEDIA_CAP_VIDEO)]])
if not broken_hash:
# if the first contact failed to provide a good hash, it does not
# deserve its capabilities to be understood by Gabble!
event = q.expect('dbus-signal', signal='CapabilitiesChanged',
args=[[(contact_handle1, cs.CHANNEL_TYPE_STREAMED_MEDIA, 0, 3, 0,
cs.MEDIA_CAP_AUDIO | cs.MEDIA_CAP_VIDEO)]])
caps_changed_flag = False
# don't receive any D-Bus signal
sync_dbus(bus, q, conn)
assert caps_changed_flag == False
def test_39464(q, bus, conn, stream):
"""
Regression test for an issue where a form with no type='' attribute on the
<x/> node would crash Gabble.
"""
client = 'fake:qutim'
hash = 'blahblah'
contact = '[email protected]/foo'
caps = {
'node': client,
'ver': hash,
'hash': 'sha-1',
}
presence = make_presence(contact, status='hello', caps=caps)
stream.send(presence)
# Gabble looks up our capabilities
event = q.expect('stream-iq', to=contact, query_ns=ns.DISCO_INFO)
# Send a reply with a form without a type=''
result = make_result_iq(stream, event.stanza, add_query_node=False)
result.addChild(
elem(ns.DISCO_INFO, 'query', node='%s#%s' % (client, hash))(
# NB. no type='' attribute
elem(ns.X_DATA, 'x')
)
)
stream.send(result)
# We don't really care what Gabble does, as long as it doesn't crash.
sync_stream(q, stream)
def test(q, bus, conn, stream):
# be notified when the signal CapabilitiesChanged is fired
conn_caps_iface = dbus.Interface(conn, cs.CONN_IFACE_CAPS)
conn_caps_iface.connect_to_signal('CapabilitiesChanged', caps_changed_cb)
test_hash(q, bus, conn, stream, '[email protected]/Foo', 2L, 'http://telepathy.freedesktop.org/fake-client')
test_hash(q, bus, conn, stream, '[email protected]/Foo', 3L, 'http://telepathy.freedesktop.org/fake-client2')
test_two_clients(q, bus, conn, stream, '[email protected]/Res',
'[email protected]/Res', 4L, 5L,
'http://telepathy.freedesktop.org/fake-client3', 0)
test_two_clients(q, bus, conn, stream, '[email protected]/Res',
'[email protected]/Res', 6L, 7L,
'http://telepathy.freedesktop.org/fake-client4', 1)
test_39464(q, bus, conn, stream)
if __name__ == '__main__':
exec_test(test)
| lgpl-2.1 | 8,532,957,967,630,063,000 | 34.269231 | 108 | 0.64357 | false |
edx/ecommerce | ecommerce/extensions/refund/tests/test_signals.py | 1 | 4852 |
from mock import patch
from ecommerce.core.constants import SEAT_PRODUCT_CLASS_NAME
from ecommerce.core.models import SegmentClient
from ecommerce.extensions.analytics.utils import ECOM_TRACKING_ID_FMT
from ecommerce.extensions.refund.api import create_refunds
from ecommerce.extensions.refund.tests.mixins import RefundTestMixin
from ecommerce.tests.factories import UserFactory
from ecommerce.tests.testcases import TransactionTestCase
@patch.object(SegmentClient, 'track')
class RefundTrackingTests(RefundTestMixin, TransactionTestCase):
"""Tests verifying the behavior of refund tracking."""
def setUp(self):
super(RefundTrackingTests, self).setUp()
self.user = UserFactory(lms_user_id=6179)
self.refund = create_refunds([self.create_order()], self.course.id)[0]
def assert_refund_event_fired(self, mock_track, refund, tracking_context=None, expected_user_id=None):
(event_user_id, event_name, event_payload), kwargs = mock_track.call_args
self.assertTrue(mock_track.called)
self.assertEqual(event_name, 'Order Refunded')
if tracking_context is not None:
expected_context = {
'ip': tracking_context['lms_ip'],
'Google Analytics': {
'clientId': tracking_context['ga_client_id']
},
'page': {
'url': 'https://testserver.fake/'
},
}
else:
expected_context = {
'ip': None,
'Google Analytics': {'clientId': None},
'page': {'url': 'https://testserver.fake/'}
}
if expected_user_id is None:
expected_user_id = refund.user.lms_user_id
self.assertEqual(event_user_id, expected_user_id)
self.assertEqual(kwargs['context'], expected_context)
self.assertEqual(event_payload['orderId'], refund.order.number)
expected_products = [
{
'id': line.order_line.partner_sku,
'quantity': line.quantity,
} for line in refund.lines.all()
]
total = refund.total_credit_excl_tax
first_product = refund.lines.first().order_line.product
product_class = first_product.get_product_class().name
if product_class == SEAT_PRODUCT_CLASS_NAME:
title = first_product.course.name
else:
title = first_product.title
self.assertEqual(event_payload['products'], expected_products)
self.assertEqual(event_payload['total'], total)
self.assertEqual(event_payload['title'], title)
def test_successful_refund_tracking(self, mock_track):
"""Verify that a successfully placed refund is tracked when Segment is enabled."""
tracking_context = {'ga_client_id': 'test-client-id', 'lms_user_id': 'test-user-id', 'lms_ip': '127.0.0.1'}
self.refund.user.tracking_context = tracking_context
self.refund.user.save()
self.approve(self.refund)
self.assert_refund_event_fired(mock_track, self.refund, tracking_context)
def test_successful_refund_tracking_without_context(self, mock_track):
"""Verify that a successfully placed refund is tracked, even if no tracking context is available."""
self.approve(self.refund)
self.assert_refund_event_fired(mock_track, self.refund)
def test_refund_tracking_without_lms_user_id(self, mock_track):
"""Verify that a successfully placed refund is tracked, even if no LMS user id is available."""
self.refund.user.lms_user_id = None
self.approve(self.refund)
expected_user_id = ECOM_TRACKING_ID_FMT.format(self.refund.user.id)
self.assert_refund_event_fired(mock_track, self.refund, expected_user_id=expected_user_id)
def test_successful_refund_no_segment_key(self, mock_track):
"""Verify that a successfully placed refund is not tracked when Segment is disabled."""
self.site.siteconfiguration.segment_key = None
self.approve(self.refund)
self.assertFalse(mock_track.called)
def test_successful_refund_tracking_segment_error(self, mock_track):
"""Verify that errors during refund tracking are logged."""
# Approve the refund, forcing an exception to be raised when attempting to emit a corresponding event
with patch('ecommerce.extensions.analytics.utils.logger.exception') as mock_log_exc:
mock_track.side_effect = Exception('boom!')
self.approve(self.refund)
# Verify that an attempt was made to emit a business intelligence event.
self.assertTrue(mock_track.called)
# Verify that an error message was logged.
self.assertTrue(mock_log_exc.called_with('Failed to emit tracking event upon refund completion.'))
| agpl-3.0 | -6,382,021,938,198,452,000 | 43.513761 | 115 | 0.655812 | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.