repo_name
stringlengths 5
92
| path
stringlengths 4
232
| copies
stringclasses 19
values | size
stringlengths 4
7
| content
stringlengths 721
1.04M
| license
stringclasses 15
values | hash
int64 -9,223,277,421,539,062,000
9,223,102,107B
| line_mean
float64 6.51
99.9
| line_max
int64 15
997
| alpha_frac
float64 0.25
0.97
| autogenerated
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|
johnmgregoire/NanoCalorimetry | accalinitcode_Sn1.py | 1 | 2899 | import numpy, h5py, pylab
from PnSC_h5io import *
from matplotlib.ticker import FuncFormatter
def myexpformat(x, pos):
for ndigs in range(5):
lab=(('%.'+'%d' %ndigs+'e') %x).replace('e+0','e').replace('e+','e').replace('e0','').replace('e-0','e-')
if eval(lab)==x:
return lab
return lab
ExpTickLabels=FuncFormatter(myexpformat)
p='C:/Users/JohnnyG/Documents/PythonCode/Vlassak/NanoCalorimetry/20110714_SnACcal.h5'
#f=h5py.File(p,mode='r')
seg=3
exp='Sn_1kHz'
skip=200
skipe=100
f, hpp=experimenthppaths(p, exp)
daqHz=f[hpp[0]].attrs['daqHz']
f.close()
hpp=['/Calorimetry/Sn_1kHz/measurement/HeatProgram/cell29_17.5dc17ac_269ms_1kHz_1_of_1', '/Calorimetry/Sn_1kHz/measurement/HeatProgram/cell7_17.5dc17ac_269ms_1kHz_1_of_1', '/Calorimetry/Sn_1kHz/measurement/HeatProgram/cell7_17.5dc17ac_269ms_1kHzagain_1_of_1']
labs=['1kHz, 10Ohm Res','slow ramp, scan1', 'slow ramp, scan2']
targetf=1.e3
#labs=[hp.rpartition('/')[2] for hp in hpp]
nplots=4
pylab.figure(figsize=(20, 8))
for i, (hp, title) in enumerate(zip(hpp, labs)):
hpsdl=CreateHeatProgSegDictList(p, exp, hp.rpartition('/')[2])
sampv=hpsdl[seg]['samplevoltage'][0][skip:-1*skipe]
diffv=hpsdl[seg]['samplehighpassacvoltage'][0][skip:-1*skipe]
t=hpsdl[seg]['cycletime'][0][skip:-1*skipe]
pylab.subplot(len(hpp), nplots, nplots*i+1)
sy=sampv
pylab.plot((t*1000.)[:4000], sy[:4000], 'g.', markersize=1)
pylab.gca().yaxis.set_major_formatter(ExpTickLabels)
#pylab.ylim(-620, 620)
pylab.title(title)
pylab.ylabel('sample channel V')
pylab.subplot(len(hpp), nplots, nplots*i+2)
y=diffv
pylab.plot((t*1000.)[:4000], y[:4000], 'r.', markersize=1)
pylab.gca().yaxis.set_major_formatter(ExpTickLabels)
#pylab.ylim(-620, 620)
pylab.title(title)
pylab.ylabel('filtered channel, V')
pylab.subplot(len(hpp), nplots, nplots*i+3)
fft=numpy.fft.fft(y)
freq=numpy.fft.fftfreq(len(y))*daqHz
pylab.loglog(freq[:len(freq)//2], numpy.abs(fft[:len(freq)//2]))
pylab.ylabel('filtered channel fft mag.')
pylab.subplot(len(hpp), nplots, nplots*i+4)
pylab.loglog(freq[:len(freq)//2], numpy.abs(fft[:len(freq)//2]))
pylab.xlim(.9*targetf, 4*targetf)
pylab.xticks([targetf, 2.*targetf, 3.*targetf])
pylab.ylabel('filtered channel fft mag.')
pylab.subplot(len(hpp), nplots, nplots*i+1)
pylab.xlabel('time (ms)')
pylab.subplot(len(hpp), nplots, nplots*i+2)
pylab.xlabel('time (ms)')
pylab.subplot(len(hpp), nplots, nplots*i+3)
pylab.xlabel('freq (Hz)')
pylab.subplot(len(hpp), nplots, nplots*i+4)
pylab.xlabel('freq (Hz)')
pylab.suptitle('response for 10mAdc+9mAac into 10$\Omega$')
pylab.subplots_adjust(left=.07, right=.97, wspace=.35, hspace=.25)
if True:
pylab.savefig(os.path.join('C:/Users/JohnnyG/Documents/HarvardWork/ACcal/20110714_Sn_analysis', '_'.join(('FFT', exp)))+'.png')
pylab.show()
| bsd-3-clause | -1,861,564,002,955,912,200 | 34.353659 | 259 | 0.673681 | false |
hmln/pds | parte4/q2.py | 1 | 1384 | import matplotlib.pyplot as plt
import numpy as np
from scipy import fftpack
from scipy.signal import kaiserord, firwin, lfilter
import scipy.io.wavfile as wavfile
FREQ = 466.16
fs, data = wavfile.read('teste_de_som.wav')
print("Taxa de amostragem: {} Hz".format(fs))
t = np.arange(0, len(data) / fs, 1 / fs)
new = 30 * data.astype('float64') + 10000 * np.sin(2 * np.pi * FREQ * t)
O = fftpack.fft(data) / len(t)
N = fftpack.fft(new) / len(t)
f = fftpack.fftfreq(len(t), d=1 / fs)
plt.subplot(2, 1, 1)
plt.plot(t, data)
plt.xlabel('time')
plt.ylabel('original')
plt.subplot(2, 1, 2)
plt.plot(t, new)
plt.xlabel('time')
plt.ylabel('new')
plt.show()
plt.subplot(2, 1, 1)
plt.plot(f, np.abs(O))
plt.xlabel('freq')
plt.ylabel('original')
plt.subplot(2, 1, 2)
plt.plot(f, np.abs(N))
plt.xlabel('freq')
plt.ylabel('new')
plt.show()
wavfile.write('noisy.wav', 8000, new)
REC = N
df = fs / len(REC)
wavfile.write('fft_normalized.wav', 8000, np.abs(fftpack.ifft(REC)))
REC[np.where(np.abs(466.16 - f) < 30)] = 0
wavfile.write('fft_treated.wav', 8000, np.abs(fftpack.ifft(REC)))
# FIR FILTER - KAISER
nyq = fs / 2
width = 1 / fs
ripple_db = 11
numtaps, beta = kaiserord(ripple_db, width)
fir = firwin(
numtaps, [(FREQ - 30) / nyq, (FREQ + 30) / nyq], window=('kaiser', beta))
fir_response = lfilter(fir/10000, 1, new)
wavfile.write('fir_filter.wav', 8000, fir_response)
| mit | 8,085,503,915,704,743,000 | 19.969697 | 77 | 0.65896 | false |
nicko7i/vcnc | api-python/velstor/vclc/__main__.py | 1 | 3489 | #!python3.5
# For command aliases prior to 3.2 - https://bugs.python.org/issue25299
#
# https://pythonconquerstheuniverse.wordpress.com/2011/08/29/lambda_tutorial/
from __future__ import print_function
import sys
import re
import json
import requests
import errno
from velstor.restapi import Session
from functools import partial
from velstor.vclc.vclc_parser import vclc_parser
from velstor.vclc.handler import Handler
from velstor.vclc.handler import error_response
from velstor.vclc.vClcException import vClcException
print_error = partial(print, file=sys.stderr)
#
# Yeah, yeah, globals are bad form...
#
quiet = False
def main(args=None):
"""The main routine."""
if args is None:
args = sys.argv[1:]
with Session() as session:
handler = Handler(session)
parser = vclc_parser(handler)
#
try:
global quiet
results = parser.parse_args(args, handler)
quiet = results.quiet
return results.action()
except requests.exceptions.RequestException as e:
#
# Requests raised an exception. Probably couldn't reach the vCNC
# server There is no HTTP code for this error, so we adopt 504,
# which is similar.
#
# Yes, it would have been cooler to have done this with a single
# RE.
#
details = str(e)
match_host = re.search("host='(\S+)'", details)
match_port = re.search("port=(\d+)", details)
match_error = re.search('NewConnectionError', details)
suffix = '.'
#
# If the server happens to match the vCNC server's default value,
# then add the additional suggestion to check configuration.
#
if match_host and match_port and match_error:
host = match_host.group(1)
port = match_port.group(1)
if host == 'vcnc' and port == "6130":
suffix = ''.join([
' Did you mean to set a command line switch',
' or environment variable?'])
return error_response('Could not reach vCNC server at '
+ match_host.group(1)
+ ':'
+ match_port.group(1)
+ suffix,
http_status=504,
error_sym='EHOSTDOWN')
else:
#
# We don't really know what happened. Just dump the raw data
# as the message.
#
return error_response(details)
#
#
except vClcException:
#
# Calling 'vclc' with no arguments isn't trapped as an error by
# argparse.
#
m = parser.format_usage()
m = re.sub('\n[ ]+', ' ', m)
return error_response(m, http_status=400, error_sym='EINVAL')
except SystemExit:
raise
except KeyboardInterrupt:
sys.exit(errno.EINVAL)
except BaseException:
raise
if __name__ == "__main__":
(exit_code, response) = main()
if not quiet:
print(json.dumps(response, sort_keys=True, indent=2))
sys.exit(127 if (exit_code > 127) else exit_code)
| apache-2.0 | -6,190,060,355,032,620,000 | 33.205882 | 78 | 0.524792 | false |
FernanOrtega/DAT210x | Module6/assignment1.py | 1 | 5367 | import matplotlib as mpl
import matplotlib.pyplot as plt
import pandas as pd
import numpy as np
import time
#
# INFO: Your Parameters.
# You can adjust them after completing the lab
C = 1
kernel = 'linear'
gamma = 'auto'
iterations = 5000 # TODO: Change to 200000 once you get to Question#2
#
# INFO: You can set this to false if you want to
# draw the full square matrix
FAST_DRAW = True
def drawPlots(model, X_train, X_test, y_train, y_test, wintitle='Figure 1'):
# INFO: A convenience function for you
# You can use this to break any higher-dimensional space down
# And view cross sections of it.
# If this line throws an error, use plt.style.use('ggplot') instead
mpl.style.use('ggplot') # Look Pretty
padding = 3
resolution = 0.5
max_2d_score = 0
y_colors = ['#ff0000', '#00ff00', '#0000ff']
my_cmap = mpl.colors.ListedColormap(['#ffaaaa', '#aaffaa', '#aaaaff'])
colors = [y_colors[i] for i in y_train]
num_columns = len(X_train.columns)
fig = plt.figure()
fig.canvas.set_window_title(wintitle)
cnt = 0
for col in range(num_columns):
for row in range(num_columns):
# Easy out
if FAST_DRAW and col > row:
cnt += 1
continue
ax = plt.subplot(num_columns, num_columns, cnt + 1)
plt.xticks(())
plt.yticks(())
# Intersection:
if col == row:
plt.text(0.5, 0.5, X_train.columns[row], verticalalignment='center', horizontalalignment='center', fontsize=12)
cnt += 1
continue
# Only select two features to display, then train the model
X_train_bag = X_train.ix[:, [row,col]]
X_test_bag = X_test.ix[:, [row,col]]
model.fit(X_train_bag, y_train)
# Create a mesh to plot in
x_min, x_max = X_train_bag.ix[:, 0].min() - padding, X_train_bag.ix[:, 0].max() + padding
y_min, y_max = X_train_bag.ix[:, 1].min() - padding, X_train_bag.ix[:, 1].max() + padding
xx, yy = np.meshgrid(np.arange(x_min, x_max, resolution),
np.arange(y_min, y_max, resolution))
# Plot Boundaries
plt.xlim(xx.min(), xx.max())
plt.ylim(yy.min(), yy.max())
# Prepare the contour
Z = model.predict(np.c_[xx.ravel(), yy.ravel()])
Z = Z.reshape(xx.shape)
plt.contourf(xx, yy, Z, cmap=my_cmap, alpha=0.8)
plt.scatter(X_train_bag.ix[:, 0], X_train_bag.ix[:, 1], c=colors, alpha=0.5)
score = round(model.score(X_test_bag, y_test) * 100, 3)
plt.text(0.5, 0, "Score: {0}".format(score), transform = ax.transAxes, horizontalalignment='center', fontsize=8)
max_2d_score = score if score > max_2d_score else max_2d_score
cnt += 1
print "Max 2D Score: ", max_2d_score
fig.set_tight_layout(True)
def benchmark(model, X_train, X_test, y_train, y_test, wintitle='Figure 1'):
print '\n\n' + wintitle + ' Results'
s = time.time()
for i in range(iterations):
#
# TODO: train the classifier on the training data / labels:
#
model.fit(X_train, y_train)
print "{0} Iterations Training Time: ".format(iterations), time.time() - s
s = time.time()
for i in range(iterations):
#
# TODO: score the classifier on the testing data / labels:
#
# .. your code here ..
score = model.score(X_test, y_test)
print "{0} Iterations Scoring Time: ".format(iterations), time.time() - s
print "High-Dimensionality Score: ", round((score*100), 3)
#
# TODO: Load up the wheat dataset into dataframe 'X'
# Verify you did it properly.
# Indices shouldn't be doubled, nor weird headers...
#
X = pd.read_csv('Datasets/wheat.data', index_col=0)
# INFO: An easy way to show which rows have nans in them
# print X[pd.isnull(X).any(axis=1)]
#
# TODO: Go ahead and drop any row with a nan
#
X = X.dropna(axis=0)
# print X[pd.isnull(X).any(axis=1)]
#
# INFO: # In the future, you might try setting the nan values to the
# mean value of that column, the mean should only be calculated for
# the specific class rather than across all classes, now that you
# have the labels
#
# TODO: Copy the labels out of the dset into variable 'y' then Remove
# them from X. Encode the labels, using the .map() trick we showed
# you in Module 5 -- canadian:0, kama:1, and rosa:2
#
print X['wheat_type'].unique()
y = X['wheat_type'].map({'kama':0, 'canadian':1, 'rosa':2})
X = X.drop(labels=['wheat_type'], axis=1)
#
# TODO: Split your data into test / train sets
# Your test size can be 30% with random_state 7.
# Use variable names: X_train, X_test, y_train, y_test
#
from sklearn.model_selection import train_test_split
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.3, random_state=7)
#
# TODO: Create an SVC classifier named svc
# Use a linear kernel, and set the C value to C
#
from sklearn.svm import SVC
svc = SVC(kernel=kernel, C=C, gamma=gamma)
#
# TODO: Create an KNeighbors classifier named knn
# Set the neighbor count to 5
#
from sklearn.neighbors import KNeighborsClassifier
knn = KNeighborsClassifier(n_neighbors=5)
benchmark(knn, X_train, X_test, y_train, y_test, 'KNeighbors')
drawPlots(knn, X_train, X_test, y_train, y_test, 'KNeighbors')
benchmark(svc, X_train, X_test, y_train, y_test, 'SVC')
drawPlots(svc, X_train, X_test, y_train, y_test, 'SVC')
plt.show()
#
# BONUS: After submitting your answers, toy around with
# gamma, kernel, and C.
| mit | -277,696,860,954,413,630 | 26.106061 | 119 | 0.646916 | false |
juliancantillo/royal-films | royalfilms/cinemas/migrations/0003_auto_20160305_0344.py | 1 | 1291 | # -*- coding: utf-8 -*-
# Generated by Django 1.9.2 on 2016-03-05 03:44
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
import uuid
class Migration(migrations.Migration):
dependencies = [
('cinemas', '0002_auto_20160303_0631'),
]
operations = [
migrations.CreateModel(
name='Show',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('uuid', models.UUIDField(db_index=True, default=uuid.uuid4, editable=False, unique=True)),
('created_at', models.DateTimeField(auto_now_add=True, verbose_name='Created at')),
('updated_at', models.DateTimeField(auto_now=True, verbose_name='Updated at')),
('time', models.TimeField()),
],
options={
'abstract': False,
},
),
migrations.RemoveField(
model_name='function',
name='time',
),
migrations.AddField(
model_name='show',
name='function',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='cinemas.Function'),
),
]
| mit | 6,041,118,326,002,203,000 | 32.102564 | 114 | 0.565453 | false |
christophercrouzet/hienoi | demos/orbit.py | 1 | 3478 | #!/usr/bin/env python
"""Particles orbiting around the origin.
Features:
- user attributes: particles are initialized within a radius from the
origin and are, at each simulation step, updated to orbit around the origin.
- NumPy: operations are done directly on the particle data for increased
performances.
"""
import math
import os
import sys
import numpy
_HERE = os.path.abspath(os.path.dirname(__file__))
sys.path.insert(0, os.path.abspath(os.path.join(_HERE, os.pardir)))
import hienoi.application
from hienoi import Vector2f, Vector4f
# Mass of the central object around which particles are orbiting.
_CENTRAL_MASS = 50.0
# Number of particles.
_COUNT = 1000
# Minimum radius of the disc used to distribute the particles.
_MIN_RADIUS = 2.0
# Maximum radius of the disc used to distribute the particles.
_MAX_RADIUS = 30.0
# Mass of each particle.
_MASS = 2.0
# Mass variance for each particle.
_MASS_VARIANCE = 1.0
# Size of a particle, relative to its mass.
_SIZE = 0.2
# Squared distance to the origin where particles are drawn in the 'far color'.
_FAR_SQUARED_DISTANCE = 500.0
# Color to use for far particles.
_FAR_COLOR = Vector4f(0.0, 1.0, 1.0, 1.0)
# Color to use for near particles.
_NEAR_COLOR = Vector4f(1.0, 0.0, 0.0, 1.0)
def initialize_particle_simulation(sim):
"""Callback to initialize the particle simulation state.
Parameters
----------
sim : hienoi.dynamics.ParticleSimulation
Particle simulation.
"""
numpy.random.seed(_COUNT + 611)
# Add a few particles at random positions within a given radius and with
# initial velocities suitable for elliptical orbiting.
particles = sim.add_particles(_COUNT)
data = particles.data
r = numpy.random.uniform(low=_MIN_RADIUS, high=_MAX_RADIUS, size=_COUNT)
t = numpy.random.uniform(high=2.0 * numpy.pi, size=_COUNT)
data['position'][:, 0] = r * numpy.cos(t)
data['position'][:, 1] = r * numpy.sin(t)
data['mass'] = numpy.random.uniform(low=_MASS - _MASS_VARIANCE,
high=_MASS + _MASS_VARIANCE,
size=_COUNT)
speeds = numpy.sqrt(data['mass'] / r)
data['velocity'][:, 0] = data['position'][:, 1] * speeds
data['velocity'][:, 1] = -data['position'][:, 0] * speeds
data['size'] = data['mass'] * _SIZE / _MASS
def update_particle_simulation(sim):
"""Callback to update the particle simulation state.
Parameters
----------
sim : hienoi.dynamics.ParticleSimulation
Particle simulation.
"""
data = sim.particles.data
squared_distances = numpy.sum(data['position'][numpy.newaxis, :] ** 2,
axis=-1)
squared_distances = squared_distances.reshape(-1, 1)
data['force'] -= (data['position']
* _CENTRAL_MASS
* data['mass'][:, numpy.newaxis]
/ squared_distances)
data['color'] = (_FAR_COLOR - _NEAR_COLOR)
data['color'] *= squared_distances / _FAR_SQUARED_DISTANCE
data['color'] += _NEAR_COLOR
def run():
"""Run the application."""
return hienoi.application.run(
gui = {
'window_title': 'orbit',
'show_grid': False,
},
particle_simulation={
'initialize_callback': initialize_particle_simulation,
'postsolve_callback': update_particle_simulation,
})
if __name__ == '__main__':
sys.exit(run())
| mit | -1,000,051,599,808,256,300 | 28.726496 | 78 | 0.625359 | false |
Ratfink/asciiplayback | src/asciiplaybackgtk.py | 1 | 2940 | #!/usr/bin/env python
import os.path
import sys
from gi.repository import Gtk, Gio, Gdk, GObject
from asciiplayback import *
from asciimation import *
from gtkasciiplayer import *
class ASCIIPlaybackGtk(Gtk.Window):
def __init__(self):
Gtk.Window.__init__(self, title="ASCIIPlayback")
self.set_default_size(0, 0)
if len(sys.argv) > 1:
self.filename = sys.argv[1]
self.player = ASCIIPlayback(ASCIImation(filename=self.filename))
else:
self.filename = ""
blank_asciimation = ASCIImation(font_family='monospace', size=[19, 3])
blank_asciimation.frames.append(Frame(text='\n No file loaded! \n'))
self.player = ASCIIPlayback(asciimation=blank_asciimation)
self.hb = Gtk.HeaderBar()
self.update_headerbar()
button = Gtk.Button(image=Gtk.Image.new_from_gicon(Gio.ThemedIcon(
name="document-open-symbolic"),
Gtk.IconSize.BUTTON))
button.connect("clicked", self.do_open)
self.hb.pack_start(button)
box = Gtk.Box(orientation=Gtk.Orientation.VERTICAL)
self.video = GtkASCIIPlayer(self.player)
box.pack_start(self.video, True, True, 0)
ab = Gtk.ActionBar()
self.controls = GtkASCIIControls(self.player)
ab.set_center_widget(self.controls)
box.pack_end(ab, False, False, 0)
self.add(box)
def do_open(self, button):
dialog = Gtk.FileChooserDialog("Open", self,
Gtk.FileChooserAction.OPEN,
(Gtk.STOCK_CANCEL, Gtk.ResponseType.CANCEL,
Gtk.STOCK_OPEN, Gtk.ResponseType.OK))
self.add_filters(dialog)
response = dialog.run()
if response == Gtk.ResponseType.OK:
self.filename = dialog.get_filename()
self.player = ASCIIPlayback(ASCIImation(filename=self.filename))
self.video.player = self.player
self.controls.player = self.player
self.update_headerbar()
elif response == Gtk.ResponseType.CANCEL:
pass
dialog.destroy()
def add_filters(self, dialog):
filter_json = Gtk.FileFilter()
filter_json.set_name("JSON files")
filter_json.add_mime_type("application/json")
dialog.add_filter(filter_json)
filter_any = Gtk.FileFilter()
filter_any.set_name("All files")
filter_any.add_pattern("*")
dialog.add_filter(filter_any)
def update_headerbar(self):
self.hb.props.show_close_button = True
self.hb.props.title = "ASCIIPlayback"
self.hb.props.subtitle = os.path.basename(self.filename)
self.hb.props.has_subtitle = True
self.set_titlebar(self.hb)
def run():
win = ASCIIPlaybackGtk()
win.connect("delete-event", Gtk.main_quit)
win.show_all()
Gtk.main()
if __name__ == '__main__':
run()
| mit | -1,430,004,682,715,594,200 | 30.956522 | 82 | 0.608163 | false |
iworm/taobao-image-splitter | taobao-image-splitter.py | 1 | 7554 | #!/usr/bin/python3
# coding=utf-8
"""
The MIT License (MIT)
Copyright (c) 2015 iworm [email protected]
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
import glob
import os
import random
import sys
from PIL import Image
class TaobaoImageSplitter(object):
root_path = ''
watermark_img = ''
def __init__(self, root_path):
self.root_path = root_path
watermark_path = os.path.join(root_path, 'watermark.png')
self.watermark_img = self._get_watermark_image(watermark_path)
def _get_watermark_image(self, watermark_path):
watermark_img = Image.open(watermark_path)
watermark_img = watermark_img.convert('RGBA')
return watermark_img
def calc_resized_size(self, original_size, width):
original_width = original_size[0]
original_height = original_size[1]
ratio = original_width / original_height
new_height = (int)(width / ratio)
return (width, new_height)
def calc_watermark_position(self, size):
width = int(size[0]*0.5)
height = int(size[1]*0.5)
return (width, height)
def resize_to_specified_width(self, imgs, min_width, watermark):
resized_imgs = []
for img in imgs:
new_size = self.calc_resized_size(img.size, min_width)
watermark_layer = Image.new('RGBA', new_size)
watermark_position = self.calc_watermark_position(new_size)
watermark_layer.paste(watermark, watermark_position)
resized = img.resize(new_size, Image.ANTIALIAS)
resized = Image.composite(watermark_layer, resized, watermark_layer)
resized_imgs = resized_imgs + [resized]
return resized_imgs
def get_all_images(self, path):
all_imgs = []
for filename in glob.glob(path):
all_imgs = all_imgs + [Image.open(filename)]
return all_imgs
def get_min_width(self, imgs):
min_width = 800
for img in imgs:
if(img.size[0] < min_width):
min_width = img.size[0]
return min_width
def get_total_height(self, imgs):
total_height = 0
for img in imgs:
total_height += img.size[1]
return total_height
def create_new_image_and_paste(self, size, imgs_to_paste):
final_img = Image.new('RGB', size)
current_height = 0
for img in imgs_to_paste:
final_img.paste(img, (0, current_height))
current_height += img.size[1]
return final_img
def get_random_height(self):
min_crop_height = 300
max_crop_height = 500
return random.randint(min_crop_height, max_crop_height)
def split_image(self, img):
cropped_height = 0
cropped_imgs = []
height = self.get_random_height()
while(cropped_height < img.size[1]):
if(img.size[1] - cropped_height - height < 300):
height = img.size[1] - cropped_height
cropped = img.crop(
(0, cropped_height, img.size[0], cropped_height + height))
cropped_imgs = cropped_imgs + [cropped]
cropped_height += height
height = self.get_random_height()
return cropped_imgs
def delete_all_files(self, path):
for filename in os.listdir(path):
current_file = os.path.join(path, filename)
if os.path.isfile(current_file):
os.remove(current_file)
def get_main_output_path(self, path):
output_path = os.path.join(path, 'main')
if os.path.exists(output_path):
self.delete_all_files(output_path)
else:
os.makedirs(output_path)
return output_path
def get_mobile_output_path(self, path):
output_path = os.path.join(path, 'mobile')
if os.path.exists(output_path):
self.delete_all_files(output_path)
else:
os.makedirs(output_path)
return output_path
def get_splitted_output_path(self, path):
output_path = os.path.join(path, 'splitted')
if os.path.exists(output_path):
self.delete_all_files(output_path)
else:
os.makedirs(output_path)
return output_path
def save(self, cropped_imgs, output_path):
index = 0
for img in cropped_imgs:
index += 1
output_file_name = os.path.join(output_path, str(index) + '.jpg')
img.save(output_file_name)
def process(self):
for directory in os.listdir(self.root_path):
current_path = os.path.join(self.root_path, directory)
if not os.path.isdir(current_path):
continue
src_img_path = os.path.join(current_path, 'src')
all_imgs = self.get_all_images(os.path.join(src_img_path, '*.jpg'))
min_width = self.get_min_width(all_imgs)
resized_imgs = self.resize_to_specified_width(
all_imgs, min_width, self.watermark_img)
self.save(resized_imgs, self.get_mobile_output_path(current_path))
total_height = self.get_total_height(resized_imgs)
final_img = self.create_new_image_and_paste(
(min_width, total_height), resized_imgs)
splitted_img = self.split_image(final_img)
self.save(splitted_img, self.get_splitted_output_path(current_path))
main_src_img_path = os.path.join(current_path, 'main_src')
all_main_imgs = self.get_all_images(os.path.join(main_src_img_path, '*.jpg'))
min_main_width = self.get_min_width(all_main_imgs)
resized_main_imgs = self.resize_to_specified_width(
all_main_imgs, min_main_width, self.watermark_img)
self.save(resized_main_imgs, self.get_main_output_path(current_path))
def print_usage(script_name):
print('''Usage:
*nix System:
python3 {0} '/Users/username/path/'
Windows System:
python3 {0} 'd:\\images\\'
'''.format(script_name))
if __name__ == '__main__':
if len(sys.argv) != 2:
print_usage(sys.argv[0])
quit()
if not os.path.isdir(sys.argv[1]):
print_usage(sys.argv[0])
quit()
a = TaobaoImageSplitter(sys.argv[1])
a.process()
| mit | -2,058,913,852,656,968,000 | 29.344398 | 89 | 0.595181 | false |
truthcoin/truthcoin-cpp | qa/rpc-tests/util.py | 1 | 12390 | # Copyright (c) 2014 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
# Helpful routines for regression testing
#
# Add python-truthcoinrpc to module search path:
import os
import sys
sys.path.append(os.path.join(os.path.dirname(os.path.abspath(__file__)), "python-truthcoinrpc"))
from decimal import Decimal, ROUND_DOWN
import json
import random
import shutil
import subprocess
import time
import re
from truthcoinrpc.authproxy import AuthServiceProxy, JSONRPCException
from util import *
def p2p_port(n):
return 11000 + n + os.getpid()%999
def rpc_port(n):
return 12000 + n + os.getpid()%999
def check_json_precision():
"""Make sure json library being used does not lose precision converting CSH values"""
n = Decimal("20000000.00000003")
satoshis = int(json.loads(json.dumps(float(n)))*1.0e8)
if satoshis != 2000000000000003:
raise RuntimeError("JSON encode/decode loses precision")
def sync_blocks(rpc_connections):
"""
Wait until everybody has the same block count
"""
while True:
counts = [ x.getblockcount() for x in rpc_connections ]
if counts == [ counts[0] ]*len(counts):
break
time.sleep(1)
def sync_mempools(rpc_connections):
"""
Wait until everybody has the same transactions in their memory
pools
"""
while True:
pool = set(rpc_connections[0].getrawmempool())
num_match = 1
for i in range(1, len(rpc_connections)):
if set(rpc_connections[i].getrawmempool()) == pool:
num_match = num_match+1
if num_match == len(rpc_connections):
break
time.sleep(1)
truthcoind_processes = {}
def initialize_datadir(dirname, n):
datadir = os.path.join(dirname, "node"+str(n))
if not os.path.isdir(datadir):
os.makedirs(datadir)
with open(os.path.join(datadir, "truthcoin.conf"), 'w') as f:
f.write("regtest=1\n");
f.write("rpcuser=rt\n");
f.write("rpcpassword=rt\n");
f.write("port="+str(p2p_port(n))+"\n");
f.write("rpcport="+str(rpc_port(n))+"\n");
return datadir
def initialize_chain(test_dir):
"""
Create (or copy from cache) a 200-block-long chain and
4 wallets.
truthcoind and truthcoin-cli must be in search path.
"""
if not os.path.isdir(os.path.join("cache", "node0")):
devnull = open("/dev/null", "w+")
# Create cache directories, run truthcoinds:
for i in range(4):
datadir=initialize_datadir("cache", i)
args = [ os.getenv("TRUTHCOIND", "truthcoind"), "-keypool=1", "-datadir="+datadir, "-discover=0" ]
if i > 0:
args.append("-connect=127.0.0.1:"+str(p2p_port(0)))
truthcoind_processes[i] = subprocess.Popen(args)
subprocess.check_call([ os.getenv("TRUTHCOINCLI", "truthcoin-cli"), "-datadir="+datadir,
"-rpcwait", "getblockcount"], stdout=devnull)
devnull.close()
rpcs = []
for i in range(4):
try:
url = "http://rt:[email protected]:%d"%(rpc_port(i),)
rpcs.append(AuthServiceProxy(url))
except:
sys.stderr.write("Error connecting to "+url+"\n")
sys.exit(1)
# Create a 200-block-long chain; each of the 4 nodes
# gets 25 mature blocks and 25 immature.
# blocks are created with timestamps 10 minutes apart, starting
# at 1 Jan 2014
block_time = 1388534400
for i in range(2):
for peer in range(4):
for j in range(25):
set_node_times(rpcs, block_time)
rpcs[peer].setgenerate(True, 1)
block_time += 10*60
# Must sync before next peer starts generating blocks
sync_blocks(rpcs)
# Shut them down, and clean up cache directories:
stop_nodes(rpcs)
wait_truthcoinds()
for i in range(4):
os.remove(log_filename("cache", i, "debug.log"))
os.remove(log_filename("cache", i, "db.log"))
os.remove(log_filename("cache", i, "peers.dat"))
os.remove(log_filename("cache", i, "fee_estimates.dat"))
for i in range(4):
from_dir = os.path.join("cache", "node"+str(i))
to_dir = os.path.join(test_dir, "node"+str(i))
shutil.copytree(from_dir, to_dir)
initialize_datadir(test_dir, i) # Overwrite port/rpcport in truthcoin.conf
def initialize_chain_clean(test_dir, num_nodes):
"""
Create an empty blockchain and num_nodes wallets.
Useful if a test case wants complete control over initialization.
"""
for i in range(num_nodes):
datadir=initialize_datadir(test_dir, i)
def _rpchost_to_args(rpchost):
'''Convert optional IP:port spec to rpcconnect/rpcport args'''
if rpchost is None:
return []
match = re.match('(\[[0-9a-fA-f:]+\]|[^:]+)(?::([0-9]+))?$', rpchost)
if not match:
raise ValueError('Invalid RPC host spec ' + rpchost)
rpcconnect = match.group(1)
rpcport = match.group(2)
if rpcconnect.startswith('['): # remove IPv6 [...] wrapping
rpcconnect = rpcconnect[1:-1]
rv = ['-rpcconnect=' + rpcconnect]
if rpcport:
rv += ['-rpcport=' + rpcport]
return rv
def start_node(i, dirname, extra_args=None, rpchost=None):
"""
Start a truthcoind and return RPC connection to it
"""
datadir = os.path.join(dirname, "node"+str(i))
args = [ os.getenv("TRUTHCOIND", "truthcoind"), "-datadir="+datadir, "-keypool=1", "-discover=0", "-rest" ]
if extra_args is not None: args.extend(extra_args)
truthcoind_processes[i] = subprocess.Popen(args)
devnull = open("/dev/null", "w+")
subprocess.check_call([ os.getenv("TRUTHCOINCLI", "truthcoin-cli"), "-datadir="+datadir] +
_rpchost_to_args(rpchost) +
["-rpcwait", "getblockcount"], stdout=devnull)
devnull.close()
url = "http://rt:rt@%s:%d" % (rpchost or '127.0.0.1', rpc_port(i))
proxy = AuthServiceProxy(url)
proxy.url = url # store URL on proxy for info
return proxy
def start_nodes(num_nodes, dirname, extra_args=None, rpchost=None):
"""
Start multiple truthcoinds, return RPC connections to them
"""
if extra_args is None: extra_args = [ None for i in range(num_nodes) ]
return [ start_node(i, dirname, extra_args[i], rpchost) for i in range(num_nodes) ]
def log_filename(dirname, n_node, logname):
return os.path.join(dirname, "node"+str(n_node), "regtest", logname)
def stop_node(node, i):
node.stop()
truthcoind_processes[i].wait()
del truthcoind_processes[i]
def stop_nodes(nodes):
for node in nodes:
node.stop()
del nodes[:] # Emptying array closes connections as a side effect
def set_node_times(nodes, t):
for node in nodes:
node.setmocktime(t)
def wait_truthcoinds():
# Wait for all truthcoinds to cleanly exit
for truthcoind in truthcoind_processes.values():
truthcoind.wait()
truthcoind_processes.clear()
def connect_nodes(from_connection, node_num):
ip_port = "127.0.0.1:"+str(p2p_port(node_num))
from_connection.addnode(ip_port, "onetry")
# poll until version handshake complete to avoid race conditions
# with transaction relaying
while any(peer['version'] == 0 for peer in from_connection.getpeerinfo()):
time.sleep(0.1)
def connect_nodes_bi(nodes, a, b):
connect_nodes(nodes[a], b)
connect_nodes(nodes[b], a)
def find_output(node, txid, amount):
"""
Return index to output of txid with value amount
Raises exception if there is none.
"""
txdata = node.getrawtransaction(txid, 1)
for i in range(len(txdata["vout"])):
if txdata["vout"][i]["value"] == amount:
return i
raise RuntimeError("find_output txid %s : %s not found"%(txid,str(amount)))
def gather_inputs(from_node, amount_needed, confirmations_required=1):
"""
Return a random set of unspent txouts that are enough to pay amount_needed
"""
assert(confirmations_required >=0)
utxo = from_node.listunspent(confirmations_required)
random.shuffle(utxo)
inputs = []
total_in = Decimal("0.00000000")
while total_in < amount_needed and len(utxo) > 0:
t = utxo.pop()
total_in += t["amount"]
inputs.append({ "txid" : t["txid"], "vout" : t["vout"], "address" : t["address"] } )
if total_in < amount_needed:
raise RuntimeError("Insufficient funds: need %d, have %d"%(amount_needed, total_in))
return (total_in, inputs)
def make_change(from_node, amount_in, amount_out, fee):
"""
Create change output(s), return them
"""
outputs = {}
amount = amount_out+fee
change = amount_in - amount
if change > amount*2:
# Create an extra change output to break up big inputs
change_address = from_node.getnewaddress()
# Split change in two, being careful of rounding:
outputs[change_address] = Decimal(change/2).quantize(Decimal('0.00000001'), rounding=ROUND_DOWN)
change = amount_in - amount - outputs[change_address]
if change > 0:
outputs[from_node.getnewaddress()] = change
return outputs
def send_zeropri_transaction(from_node, to_node, amount, fee):
"""
Create&broadcast a zero-priority transaction.
Returns (txid, hex-encoded-txdata)
Ensures transaction is zero-priority by first creating a send-to-self,
then using it's output
"""
# Create a send-to-self with confirmed inputs:
self_address = from_node.getnewaddress()
(total_in, inputs) = gather_inputs(from_node, amount+fee*2)
outputs = make_change(from_node, total_in, amount+fee, fee)
outputs[self_address] = float(amount+fee)
self_rawtx = from_node.createrawtransaction(inputs, outputs)
self_signresult = from_node.signrawtransaction(self_rawtx)
self_txid = from_node.sendrawtransaction(self_signresult["hex"], True)
vout = find_output(from_node, self_txid, amount+fee)
# Now immediately spend the output to create a 1-input, 1-output
# zero-priority transaction:
inputs = [ { "txid" : self_txid, "vout" : vout } ]
outputs = { to_node.getnewaddress() : float(amount) }
rawtx = from_node.createrawtransaction(inputs, outputs)
signresult = from_node.signrawtransaction(rawtx)
txid = from_node.sendrawtransaction(signresult["hex"], True)
return (txid, signresult["hex"])
def random_zeropri_transaction(nodes, amount, min_fee, fee_increment, fee_variants):
"""
Create a random zero-priority transaction.
Returns (txid, hex-encoded-transaction-data, fee)
"""
from_node = random.choice(nodes)
to_node = random.choice(nodes)
fee = min_fee + fee_increment*random.randint(0,fee_variants)
(txid, txhex) = send_zeropri_transaction(from_node, to_node, amount, fee)
return (txid, txhex, fee)
def random_transaction(nodes, amount, min_fee, fee_increment, fee_variants):
"""
Create a random transaction.
Returns (txid, hex-encoded-transaction-data, fee)
"""
from_node = random.choice(nodes)
to_node = random.choice(nodes)
fee = min_fee + fee_increment*random.randint(0,fee_variants)
(total_in, inputs) = gather_inputs(from_node, amount+fee)
outputs = make_change(from_node, total_in, amount, fee)
outputs[to_node.getnewaddress()] = float(amount)
rawtx = from_node.createrawtransaction(inputs, outputs)
signresult = from_node.signrawtransaction(rawtx)
txid = from_node.sendrawtransaction(signresult["hex"], True)
return (txid, signresult["hex"], fee)
def assert_equal(thing1, thing2):
if thing1 != thing2:
raise AssertionError("%s != %s"%(str(thing1),str(thing2)))
def assert_greater_than(thing1, thing2):
if thing1 <= thing2:
raise AssertionError("%s <= %s"%(str(thing1),str(thing2)))
def assert_raises(exc, fun, *args, **kwds):
try:
fun(*args, **kwds)
except exc:
pass
except Exception as e:
raise AssertionError("Unexpected exception raised: "+type(e).__name__)
else:
raise AssertionError("No exception raised")
| mit | 4,969,878,162,674,825,000 | 35.122449 | 111 | 0.631961 | false |
nemesiscodex/JukyOS-sugar | extensions/cpsection/power/view.py | 1 | 4563 | # Copyright (C) 2008, OLPC
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
import gtk
from gettext import gettext as _
from sugar.graphics import style
from jarabe.controlpanel.sectionview import SectionView
from jarabe.controlpanel.inlinealert import InlineAlert
class Power(SectionView):
def __init__(self, model, alerts):
SectionView.__init__(self)
self._model = model
self.restart_alerts = alerts
self._automatic_pm_valid = True
self._automatic_pm_change_handler = None
self.set_border_width(style.DEFAULT_SPACING * 2)
self.set_spacing(style.DEFAULT_SPACING)
group = gtk.SizeGroup(gtk.SIZE_GROUP_HORIZONTAL)
self._automatic_pm_alert_box = gtk.HBox(spacing=style.DEFAULT_SPACING)
separator_pm = gtk.HSeparator()
self.pack_start(separator_pm, expand=False)
separator_pm.show()
label_pm = gtk.Label(_('Power management'))
label_pm.set_alignment(0, 0)
self.pack_start(label_pm, expand=False)
label_pm.show()
box_pm = gtk.VBox()
box_pm.set_border_width(style.DEFAULT_SPACING * 2)
box_pm.set_spacing(style.DEFAULT_SPACING)
box_automatic_pm = gtk.HBox(spacing=style.DEFAULT_SPACING)
label_automatic_pm = gtk.Label(
_('Automatic power management (increases battery life)'))
label_automatic_pm.set_alignment(0, 0.5)
self._automatic_button = gtk.CheckButton()
self._automatic_button.set_alignment(0, 0)
box_automatic_pm.pack_start(self._automatic_button, expand=False)
box_automatic_pm.pack_start(label_automatic_pm, expand=False)
self._automatic_button.show()
label_automatic_pm.show()
group.add_widget(label_automatic_pm)
box_pm.pack_start(box_automatic_pm, expand=False)
box_automatic_pm.show()
self._automatic_pm_alert = InlineAlert()
label_automatic_pm_error = gtk.Label()
group.add_widget(label_automatic_pm_error)
self._automatic_pm_alert_box.pack_start(label_automatic_pm_error,
expand=False)
label_automatic_pm_error.show()
self._automatic_pm_alert_box.pack_start(self._automatic_pm_alert,
expand=False)
box_pm.pack_end(self._automatic_pm_alert_box, expand=False)
self._automatic_pm_alert_box.show()
if 'automatic_pm' in self.restart_alerts:
self._automatic_pm_alert.props.msg = self.restart_msg
self._automatic_pm_alert.show()
self.pack_start(box_pm, expand=False)
box_pm.show()
self.setup()
def setup(self):
try:
automatic_state = self._model.get_automatic_pm()
except Exception, detail:
self._automatic_pm_alert.props.msg = detail
self._automatic_pm_alert.show()
else:
self._automatic_button.set_active(automatic_state)
self._automatic_pm_valid = True
self.needs_restart = False
self._automatic_pm_change_handler = self._automatic_button.connect( \
'toggled', self.__automatic_pm_toggled_cb)
def undo(self):
self._automatic_button.disconnect(self._automatic_pm_change_handler)
self._model.undo()
self._automatic_pm_alert.hide()
def _validate(self):
if self._automatic_pm_valid:
self.props.is_valid = True
else:
self.props.is_valid = False
def __automatic_pm_toggled_cb(self, widget, data=None):
state = widget.get_active()
try:
self._model.set_automatic_pm(state)
except Exception, detail:
print detail
self._automatic_pm_alert.props.msg = detail
else:
self._automatic_pm_valid = True
self._validate()
return False
| gpl-2.0 | 4,088,517,647,482,016,300 | 36.401639 | 78 | 0.636862 | false |
Ichimonji10/robottelo | tests/foreman/ui/test_adusergroup.py | 1 | 21965 | """Test class for Active Directory Feature
@Requirement: Adusergroup
@CaseAutomation: Automated
@CaseLevel: Acceptance
@CaseComponent: UI
@TestType: Functional
@CaseImportance: High
@Upstream: No
"""
from fauxfactory import gen_string
from nailgun import entities
from robottelo.config import settings
from robottelo.constants import (
LDAP_SERVER_TYPE, LDAP_ATTR, PERMISSIONS, ANY_CONTEXT)
from robottelo.decorators import (
run_in_one_thread,
stubbed,
skip_if_bug_open,
skip_if_not_set,
tier1,
tier2,
)
from robottelo.test import UITestCase
from robottelo.ui.factory import (
make_role, make_usergroup, make_loc, make_org, set_context)
from robottelo.ui.locators import common_locators, locators, menu_locators
from robottelo.ui.session import Session
from selenium.webdriver.common.action_chains import ActionChains
@run_in_one_thread
class ActiveDirectoryUserGroupTestCase(UITestCase):
"""Implements Active Directory feature tests in UI."""
@classmethod
@skip_if_not_set('ldap')
def setUpClass(cls): # noqa
super(ActiveDirectoryUserGroupTestCase, cls).setUpClass()
cls.ldap_user_name = settings.ldap.username
cls.ldap_user_passwd = settings.ldap.password
cls.base_dn = settings.ldap.basedn
cls.group_base_dn = settings.ldap.grpbasedn
cls.ldap_hostname = settings.ldap.hostname
cls.usergroup_name = gen_string('alpha')
authsource_attrs = entities.AuthSourceLDAP(
onthefly_register=True,
account=cls.ldap_user_name,
account_password=cls.ldap_user_passwd,
base_dn=cls.base_dn,
groups_base=cls.group_base_dn,
attr_firstname=LDAP_ATTR['firstname'],
attr_lastname=LDAP_ATTR['surname'],
attr_login=LDAP_ATTR['login_ad'],
server_type=LDAP_SERVER_TYPE['API']['ad'],
attr_mail=LDAP_ATTR['mail'],
name=gen_string('alpha'),
host=cls.ldap_hostname,
tls=False,
port='389',
).create()
cls.ldap_server_name = authsource_attrs.name
def tearDown(self):
with Session(self.browser) as session:
set_context(session, org=ANY_CONTEXT['org'])
if self.user.search(self.ldap_user_name):
self.user.delete(self.ldap_user_name)
if self.usergroup.search(self.usergroup_name):
self.usergroup.delete(self.usergroup_name, True)
super(ActiveDirectoryUserGroupTestCase, self).tearDown()
def check_external_user(self):
"""Check whether external user is active and reachable. That operation
also add that user into application system for internal configuration
procedures
"""
strategy, value = locators['login.loggedin']
with Session(
self.browser, self.ldap_user_name, self.ldap_user_passwd):
self.assertIsNotNone(self.login.wait_until_element(
(strategy, value % self.ldap_user_name)
))
@tier1
def test_positive_add_admin_role(self):
"""Associate Admin role to User Group. [belonging to external AD User
Group.]
@id: c3371810-1ddc-4a2c-b7e1-3b4d5db3a755
@Steps:
1. Create an UserGroup.
2. Assign admin role to UserGroup.
3. Create and associate an External AD UserGroup.
@Assert: Whether a User belonging to User Group is able to access some
of the pages.
"""
self.check_external_user()
with Session(self.browser) as session:
make_usergroup(
session,
name=self.usergroup_name,
roles=['admin'],
ext_usergrp='foobargroup',
ext_authsourceid='LDAP-' + self.ldap_server_name,
)
self.assertIsNotNone(self.usergroup.search(self.usergroup_name))
set_context(session, org=ANY_CONTEXT['org'])
self.user.update(
username=self.ldap_user_name,
authorized_by='LDAP-' + self.ldap_server_name,
new_password=self.ldap_user_passwd,
password_confirmation=self.ldap_user_passwd,
)
with Session(
self.browser,
self.ldap_user_name,
self.ldap_user_passwd,
):
session.nav.go_to_users()
session.nav.go_to_roles()
session.nav.go_to_content_views()
@tier2
def test_positive_add_foreman_role(self):
"""Associate foreman roles to User Group.
[belonging to external AD User Group.]
@id: c11fbf85-e144-4576-99e3-1ba111479f0f
@Steps:
1. Create an UserGroup.
2. Assign some foreman roles to UserGroup.
3. Create and associate an External AD UserGroup.
@Assert: Whether a User belonging to User Group is able to access
foreman entities as per roles.
@CaseLevel: Integration
"""
self.check_external_user()
strategy, value = locators['login.loggedin']
foreman_role = gen_string('alpha')
location_name = gen_string('alpha')
with Session(self.browser) as session:
make_role(session, name=foreman_role)
self.role.update(
foreman_role,
add_permission=True,
permission_list=PERMISSIONS['Location'],
resource_type='Location',
)
make_usergroup(
session,
name=self.usergroup_name,
roles=[foreman_role],
ext_usergrp='foobargroup',
ext_authsourceid="LDAP-" + self.ldap_server_name,
)
self.assertIsNotNone(self.usergroup.search(self.usergroup_name))
set_context(session, org=ANY_CONTEXT['org'])
self.user.update(
username=self.ldap_user_name,
authorized_by='LDAP-' + self.ldap_server_name,
new_password=self.ldap_user_passwd,
password_confirmation=self.ldap_user_passwd,
)
with Session(
self.browser,
self.ldap_user_name,
self.ldap_user_passwd,
) as session:
self.assertIsNotNone(self.login.wait_until_element(
(strategy, value % self.ldap_user_name)
))
make_loc(session, name=location_name)
self.assertIsNotNone(self.location.search(location_name))
@tier2
def test_positive_add_katello_role(self):
"""Associate katello roles to User Group.
[belonging to external AD User Group.]
@id: aa5e3bf4-cb42-43a4-93ea-a2eea54b847a
@Steps:
1. Create an UserGroup.
2. Assign some foreman roles to UserGroup.
3. Create and associate an External AD UserGroup.
@Assert: Whether a User belonging to User Group is able to access
katello entities as per roles.
@CaseLevel: Integration
"""
self.check_external_user()
katello_role = gen_string('alpha')
org_name = gen_string('alpha')
with Session(self.browser) as session:
make_role(session, name=katello_role)
self.role.update(
katello_role,
add_permission=True,
permission_list=PERMISSIONS['Organization'],
resource_type='Organization',
)
make_usergroup(
session,
name=self.usergroup_name,
roles=[katello_role],
ext_usergrp='foobargroup',
ext_authsourceid='LDAP-' + self.ldap_server_name,
)
self.assertIsNotNone(self.usergroup.search(self.usergroup_name))
set_context(session, org=ANY_CONTEXT['org'])
self.user.update(
username=self.ldap_user_name,
authorized_by='LDAP-' + self.ldap_server_name,
new_password=self.ldap_user_passwd,
password_confirmation=self.ldap_user_passwd,
)
with Session(
self.browser,
self.ldap_user_name,
self.ldap_user_passwd
) as session:
make_org(session, org_name=org_name)
self.assertIsNotNone(self.org.search(org_name))
@tier1
def test_positive_create_external(self):
"""Create External AD User Group as per AD group
@id: b5e64316-55b9-4480-8701-308e91be9344
@Steps:
1. Create an UserGroup.
2. Assign some roles to UserGroup.
3. Create an External AD UserGroup as per the UserGroup name in AD
@Assert: Whether creation of External AD User Group is possible.
"""
with Session(self.browser) as session:
make_usergroup(
session,
name=self.usergroup_name,
roles=['admin'],
ext_usergrp='foobargroup',
ext_authsourceid='LDAP-' + self.ldap_server_name,
)
self.assertIsNotNone(self.usergroup.search(self.usergroup_name))
@tier1
def test_negative_create_external_with_same_name(self):
"""Attempt to create two User Groups with same External AD User Group
name
@id: 8f2cde96-644a-4729-880a-65a22c7e7262
@Steps:
1. Create an UserGroup.
2. Assign External AD UserGroup as per the UserGroup name in AD.
3. Repeat steps 1) and 2), but provide the same external UserGroup name
@Assert: Creation of User Group should not be possible with same
External AD User Group name.
"""
new_usergroup_name = gen_string('alpha')
with Session(self.browser) as session:
make_usergroup(
session,
name=self.usergroup_name,
roles=['admin'],
ext_usergrp='foobargroup',
ext_authsourceid='LDAP-' + self.ldap_server_name,
)
self.assertIsNotNone(self.usergroup.search(self.usergroup_name))
make_usergroup(
session,
name=new_usergroup_name,
roles=['admin'],
ext_usergrp='foobargroup',
ext_authsourceid='LDAP-' + self.ldap_server_name,
)
self.assertIsNone(self.usergroup.search(new_usergroup_name))
@tier1
def test_negative_create_external_with_invalid_name(self):
"""Create External AD User Group with random name
@id: 2fd12301-9a35-49f1-9723-2b74551414c2
@Steps:
1. Create an UserGroup.
2. Assign some roles to UserGroup.
3. Create an External AD UserGroup with any random name.
@Assert: Creation of External AD User Group should not be possible with
random name.
"""
with Session(self.browser) as session:
make_usergroup(
session,
name=self.usergroup_name,
roles=['admin'],
ext_usergrp=gen_string('alpha'),
ext_authsourceid='LDAP-' + self.ldap_server_name,
)
self.assertIsNotNone(self.usergroup.wait_until_element(
common_locators['haserror']
))
self.assertIsNone(self.usergroup.search(self.usergroup_name))
@stubbed()
@tier2
def test_positive_delete_external(self):
"""Delete External AD User Group
@id: 364e9ddc-4ab7-46a9-b52c-8159aab7f811
@Steps:
1. Create an UserGroup.
2. Assign some roles to UserGroup.
3. Create an External AD UserGroup as per the UserGroup name in AD.
4. Delete the External AD UserGroup.
Note:- Deletion as of sat6.1 is possible only via CLI and not via UI.
@Assert: Deletion of External AD User Group should be possible and the
user should not be able to perform the roles that were assigned to it
at the UserGroup level.
@caseautomation: notautomated
@CaseLevel: Integration
"""
@skip_if_bug_open('bugzilla', '1221971')
@tier2
def test_positive_update_external_roles(self):
"""Added AD UserGroup roles get pushed down to user
@id: f3ca1aae-5461-4af3-a508-82679bb6afed
@setup: assign additional roles to the UserGroup
@steps:
1. Create an UserGroup.
2. Assign some roles to UserGroup.
3. Create an External AD UserGroup as per the UserGroup name in AD.
4. Login to sat6 with the AD user.
5. Assign additional roles to the UserGroup.
6. Login to sat6 with LDAP user that is part of aforementioned
UserGroup.
@assert: User has access to all NEW functional areas that are assigned
to aforementioned UserGroup.
@CaseLevel: Integration
"""
self.check_external_user()
foreman_role = gen_string('alpha')
katello_role = gen_string('alpha')
org_name = gen_string('alpha')
loc_name = gen_string('alpha')
with Session(self.browser) as session:
make_role(session, name=foreman_role)
self.role.update(
foreman_role,
add_permission=True,
permission_list=PERMISSIONS['Location'],
resource_type='Location',
)
make_usergroup(
session,
name=self.usergroup_name,
roles=[foreman_role],
ext_usergrp='foobargroup',
ext_authsourceid='LDAP-' + self.ldap_server_name,
)
self.assertIsNotNone(self.usergroup.search(self.usergroup_name))
set_context(session, org=ANY_CONTEXT['org'])
self.user.update(
username=self.ldap_user_name,
authorized_by='LDAP-' + self.ldap_server_name,
new_password=self.ldap_user_passwd,
password_confirmation=self.ldap_user_passwd,
)
with Session(
self.browser,
self.ldap_user_name,
self.ldap_user_passwd,
) as session:
make_loc(session, name=loc_name)
self.assertIsNotNone(self.location.search(loc_name))
with Session(self.browser) as session:
make_role(session, name=katello_role)
self.role.update(
katello_role,
add_permission=True,
permission_list=PERMISSIONS['Organization'],
resource_type='Organization',
)
self.usergroup.update(
self.usergroup_name,
new_roles=[katello_role],
entity_select=True,
)
self.usergroup.refresh_ext_group(
self.usergroup_name, 'foobargroup')
self.assertIsNotNone(self.usergroup.wait_until_element(
common_locators['notif.success']))
with Session(
self.browser,
self.ldap_user_name,
self.ldap_user_passwd,
) as session:
make_org(session, org_name=org_name)
self.assertIsNotNone(self.org.search(org_name))
@tier2
def test_positive_delete_external_roles(self):
"""Deleted AD UserGroup roles get pushed down to user
@id: 479bc8fe-f6a3-4c89-8c7e-3d997315383f
@setup: delete roles from an AD UserGroup
@steps:
1. Create an UserGroup.
2. Assign some roles to UserGroup.
3. Create an External AD UserGroup as per the UserGroup name in AD.
4. Login to sat6 with the AD user.
5. Unassign some of the existing roles of the UserGroup.
6. Login to sat6 with LDAP user that is part of aforementioned
UserGroup.
@assert: User no longer has access to all deleted functional areas
that were assigned to aforementioned UserGroup.
@CaseLevel: Integration
"""
self.check_external_user()
foreman_role = gen_string('alpha')
with Session(self.browser) as session:
make_role(session, name=foreman_role)
self.role.update(
foreman_role,
add_permission=True,
permission_list=PERMISSIONS['Location'],
resource_type='Location',
)
make_usergroup(
session,
name=self.usergroup_name,
roles=[foreman_role],
ext_usergrp='foobargroup',
ext_authsourceid='LDAP-' + self.ldap_server_name,
)
self.assertIsNotNone(self.usergroup.search(self.usergroup_name))
set_context(session, org=ANY_CONTEXT['org'])
self.user.update(
username=self.ldap_user_name,
authorized_by='LDAP-' + self.ldap_server_name,
new_password=self.ldap_user_passwd,
password_confirmation=self.ldap_user_passwd,
)
with Session(
self.browser, self.ldap_user_name, self.ldap_user_passwd
) as session:
session.nav.go_to_loc()
with Session(self.browser):
self.usergroup.update(
self.usergroup_name, roles=[foreman_role], entity_select=False)
with Session(
self.browser,
self.ldap_user_name,
self.ldap_user_passwd,
) as session:
ActionChains(
self.browser
).move_to_element(session.nav.wait_until_element(
menu_locators['menu.any_context']
)).perform()
self.assertIsNone(session.nav.wait_until_element(
menu_locators['loc.manage_loc']
))
@tier2
def test_positive_update_external_user_roles(self):
"""Assure that user has roles/can access feature areas for
additional roles assigned outside any roles assigned by his group
@id: a487f7d6-22f2-4e42-b34f-8d984f721c83
@setup: Assign roles to UserGroup and configure external
UserGroup subsequently assign specified roles to the user(s).
roles that are not part of the larger UserGroup
@steps:
1. Create an UserGroup.
2. Assign some roles to UserGroup.
3. Create an External AD UserGroup as per the UserGroup name in AD.
4. Assign some more roles to a User(which is part of external AD
UserGroup) at the User level.
5. Login to sat6 with the above AD user and attempt to access areas
assigned specifically to user.
@assert: User can access not only those feature areas in his
UserGroup but those additional feature areas / roles assigned
specifically to user
@CaseLevel: Integration
"""
self.check_external_user()
foreman_role = gen_string('alpha')
katello_role = gen_string('alpha')
org_name = gen_string('alpha')
loc_name = gen_string('alpha')
with Session(self.browser) as session:
make_role(session, name=foreman_role)
self.role.update(
foreman_role,
add_permission=True,
permission_list=PERMISSIONS['Location'],
resource_type='Location',
)
make_usergroup(
session,
name=self.usergroup_name,
roles=[foreman_role],
ext_usergrp='foobargroup',
ext_authsourceid='LDAP-' + self.ldap_server_name,
)
self.assertIsNotNone(self.usergroup.search(self.usergroup_name))
set_context(session, org=ANY_CONTEXT['org'])
self.user.update(
username=self.ldap_user_name,
authorized_by='LDAP-' + self.ldap_server_name,
new_password=self.ldap_user_passwd,
password_confirmation=self.ldap_user_passwd,
)
with Session(
self.browser,
self.ldap_user_name,
self.ldap_user_passwd,
) as session:
make_loc(session, name=loc_name)
self.assertIsNotNone(self.location.search(loc_name))
with Session(self.browser) as session:
make_role(session, name=katello_role)
self.role.update(
katello_role,
add_permission=True,
permission_list=PERMISSIONS['Organization'],
resource_type='Organization',
)
set_context(session, org=ANY_CONTEXT['org'])
self.user.update(
self.ldap_user_name,
new_roles=[katello_role],
select=True,
)
with Session(
self.browser,
self.ldap_user_name,
self.ldap_user_passwd,
) as session:
make_org(session, org_name=org_name)
self.assertIsNotNone(self.org.search(org_name))
@stubbed()
@tier2
def test_positive_add_external_user(self):
"""New user added to UserGroup at AD side inherits roles in Sat6
@id: da41d197-85d5-4405-98ec-30c1d69f4c93
@setup: UserGroup with specified roles.
@steps:
1. Create an UserGroup.
2. Assign some roles to UserGroup.
3. Create an External AD UserGroup as per the UserGroup name in AD.
4. On AD server side, assign a new user to a UserGroup.
5. Login to sat6 with the above new AD user and attempt to access the
functional areas assigned to the user.
@assert: User can access feature areas as defined by roles in the
UserGroup of which he is a part.
@caseautomation: notautomated
@CaseLevel: Integration
"""
| gpl-3.0 | -5,938,062,585,294,894,000 | 34.949264 | 79 | 0.580833 | false |
ESOedX/edx-platform | lms/djangoapps/discussion/django_comment_client/base/tests.py | 1 | 90626 | # pylint: skip-file
# -*- coding: utf-8 -*-
"""Tests for django comment client views."""
from __future__ import absolute_import
import json
import logging
from contextlib import contextmanager
import ddt
import mock
import six
from django.contrib.auth.models import User
from django.core.management import call_command
from django.test.client import RequestFactory
from django.urls import reverse
from eventtracking.processors.exceptions import EventEmissionExit
from mock import ANY, Mock, patch
from opaque_keys.edx.keys import CourseKey
from six import text_type
from six.moves import range
from common.test.utils import MockSignalHandlerMixin, disable_signal
from course_modes.models import CourseMode
from course_modes.tests.factories import CourseModeFactory
from lms.djangoapps.discussion.django_comment_client.base import views
from lms.djangoapps.discussion.django_comment_client.tests.group_id import (
CohortedTopicGroupIdTestMixin,
GroupIdAssertionMixin,
NonCohortedTopicGroupIdTestMixin
)
from lms.djangoapps.discussion.django_comment_client.tests.unicode import UnicodeTestMixin
from lms.djangoapps.discussion.django_comment_client.tests.utils import CohortedTestCase, ForumsEnableMixin
from lms.djangoapps.teams.tests.factories import CourseTeamFactory, CourseTeamMembershipFactory
from openedx.core.djangoapps.course_groups.cohorts import set_course_cohorted
from openedx.core.djangoapps.course_groups.tests.helpers import CohortFactory
from openedx.core.djangoapps.django_comment_common.comment_client import Thread
from openedx.core.djangoapps.django_comment_common.models import (
FORUM_ROLE_STUDENT,
CourseDiscussionSettings,
Role,
assign_role
)
from openedx.core.djangoapps.django_comment_common.utils import (
ThreadContext,
seed_permissions_roles,
set_course_discussion_settings
)
from openedx.core.djangoapps.waffle_utils.testutils import WAFFLE_TABLES
from student.roles import CourseStaffRole, UserBasedRole
from student.tests.factories import CourseAccessRoleFactory, CourseEnrollmentFactory, UserFactory
from track.middleware import TrackMiddleware
from track.views import segmentio
from track.views.tests.base import SEGMENTIO_TEST_USER_ID, SegmentIOTrackingTestCaseBase
from util.testing import UrlResetMixin
from xmodule.modulestore import ModuleStoreEnum
from xmodule.modulestore.django import modulestore
from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase, SharedModuleStoreTestCase
from xmodule.modulestore.tests.factories import CourseFactory, ItemFactory, check_mongo_calls
from .event_transformers import ForumThreadViewedEventTransformer
log = logging.getLogger(__name__)
CS_PREFIX = "http://localhost:4567/api/v1"
QUERY_COUNT_TABLE_BLACKLIST = WAFFLE_TABLES
# pylint: disable=missing-docstring
class MockRequestSetupMixin(object):
def _create_response_mock(self, data):
return Mock(
text=json.dumps(data),
json=Mock(return_value=data),
status_code=200
)
def _set_mock_request_data(self, mock_request, data):
mock_request.return_value = self._create_response_mock(data)
@patch('openedx.core.djangoapps.django_comment_common.comment_client.utils.requests.request', autospec=True)
class CreateThreadGroupIdTestCase(
MockRequestSetupMixin,
CohortedTestCase,
CohortedTopicGroupIdTestMixin,
NonCohortedTopicGroupIdTestMixin
):
cs_endpoint = "/threads"
def call_view(self, mock_request, commentable_id, user, group_id, pass_group_id=True):
self._set_mock_request_data(mock_request, {})
request_data = {"body": "body", "title": "title", "thread_type": "discussion"}
if pass_group_id:
request_data["group_id"] = group_id
request = RequestFactory().post("dummy_url", request_data)
request.user = user
request.view_name = "create_thread"
return views.create_thread(
request,
course_id=six.text_type(self.course.id),
commentable_id=commentable_id
)
def test_group_info_in_response(self, mock_request):
response = self.call_view(
mock_request,
"cohorted_topic",
self.student,
None
)
self._assert_json_response_contains_group_info(response)
@patch('openedx.core.djangoapps.django_comment_common.comment_client.utils.requests.request', autospec=True)
@disable_signal(views, 'thread_edited')
@disable_signal(views, 'thread_voted')
@disable_signal(views, 'thread_deleted')
class ThreadActionGroupIdTestCase(
MockRequestSetupMixin,
CohortedTestCase,
GroupIdAssertionMixin
):
def call_view(
self,
view_name,
mock_request,
user=None,
post_params=None,
view_args=None
):
self._set_mock_request_data(
mock_request,
{
"user_id": str(self.student.id),
"group_id": self.student_cohort.id,
"closed": False,
"type": "thread",
"commentable_id": "non_team_dummy_id"
}
)
request = RequestFactory().post("dummy_url", post_params or {})
request.user = user or self.student
request.view_name = view_name
return getattr(views, view_name)(
request,
course_id=six.text_type(self.course.id),
thread_id="dummy",
**(view_args or {})
)
def test_update(self, mock_request):
response = self.call_view(
"update_thread",
mock_request,
post_params={"body": "body", "title": "title"}
)
self._assert_json_response_contains_group_info(response)
def test_delete(self, mock_request):
response = self.call_view("delete_thread", mock_request)
self._assert_json_response_contains_group_info(response)
def test_vote(self, mock_request):
response = self.call_view(
"vote_for_thread",
mock_request,
view_args={"value": "up"}
)
self._assert_json_response_contains_group_info(response)
response = self.call_view("undo_vote_for_thread", mock_request)
self._assert_json_response_contains_group_info(response)
def test_flag(self, mock_request):
response = self.call_view("flag_abuse_for_thread", mock_request)
self._assert_json_response_contains_group_info(response)
response = self.call_view("un_flag_abuse_for_thread", mock_request)
self._assert_json_response_contains_group_info(response)
def test_pin(self, mock_request):
response = self.call_view(
"pin_thread",
mock_request,
user=self.moderator
)
self._assert_json_response_contains_group_info(response)
response = self.call_view(
"un_pin_thread",
mock_request,
user=self.moderator
)
self._assert_json_response_contains_group_info(response)
def test_openclose(self, mock_request):
response = self.call_view(
"openclose_thread",
mock_request,
user=self.moderator
)
self._assert_json_response_contains_group_info(
response,
lambda d: d['content']
)
class ViewsTestCaseMixin(object):
def set_up_course(self, module_count=0):
"""
Creates a course, optionally with module_count discussion modules, and
a user with appropriate permissions.
"""
# create a course
self.course = CourseFactory.create(
org='MITx', course='999',
discussion_topics={"Some Topic": {"id": "some_topic"}},
display_name='Robot Super Course',
)
self.course_id = self.course.id
# add some discussion modules
for i in range(module_count):
ItemFactory.create(
parent_location=self.course.location,
category='discussion',
discussion_id='id_module_{}'.format(i),
discussion_category=u'Category {}'.format(i),
discussion_target=u'Discussion {}'.format(i)
)
# seed the forums permissions and roles
call_command('seed_permissions_roles', six.text_type(self.course_id))
# Patch the comment client user save method so it does not try
# to create a new cc user when creating a django user
with patch('student.models.cc.User.save'):
uname = 'student'
email = '[email protected]'
self.password = 'test'
# Create the user and make them active so we can log them in.
self.student = User.objects.create_user(uname, email, self.password)
self.student.is_active = True
self.student.save()
# Add a discussion moderator
self.moderator = UserFactory.create(password=self.password)
# Enroll the student in the course
CourseEnrollmentFactory(user=self.student,
course_id=self.course_id)
# Enroll the moderator and give them the appropriate roles
CourseEnrollmentFactory(user=self.moderator, course_id=self.course.id)
self.moderator.roles.add(Role.objects.get(name="Moderator", course_id=self.course.id))
assert self.client.login(username='student', password=self.password)
def _setup_mock_request(self, mock_request, include_depth=False):
"""
Ensure that mock_request returns the data necessary to make views
function correctly
"""
data = {
"user_id": str(self.student.id),
"closed": False,
"commentable_id": "non_team_dummy_id"
}
if include_depth:
data["depth"] = 0
self._set_mock_request_data(mock_request, data)
def create_thread_helper(self, mock_request, extra_request_data=None, extra_response_data=None):
"""
Issues a request to create a thread and verifies the result.
"""
self._set_mock_request_data(mock_request, {
"thread_type": "discussion",
"title": "Hello",
"body": "this is a post",
"course_id": "MITx/999/Robot_Super_Course",
"anonymous": False,
"anonymous_to_peers": False,
"commentable_id": "i4x-MITx-999-course-Robot_Super_Course",
"created_at": "2013-05-10T18:53:43Z",
"updated_at": "2013-05-10T18:53:43Z",
"at_position_list": [],
"closed": False,
"id": "518d4237b023791dca00000d",
"user_id": "1",
"username": "robot",
"votes": {
"count": 0,
"up_count": 0,
"down_count": 0,
"point": 0
},
"abuse_flaggers": [],
"type": "thread",
"group_id": None,
"pinned": False,
"endorsed": False,
"unread_comments_count": 0,
"read": False,
"comments_count": 0,
})
thread = {
"thread_type": "discussion",
"body": ["this is a post"],
"anonymous_to_peers": ["false"],
"auto_subscribe": ["false"],
"anonymous": ["false"],
"title": ["Hello"],
}
if extra_request_data:
thread.update(extra_request_data)
url = reverse('create_thread', kwargs={'commentable_id': 'i4x-MITx-999-course-Robot_Super_Course',
'course_id': six.text_type(self.course_id)})
response = self.client.post(url, data=thread)
assert mock_request.called
expected_data = {
'thread_type': 'discussion',
'body': u'this is a post',
'context': ThreadContext.COURSE,
'anonymous_to_peers': False, 'user_id': 1,
'title': u'Hello',
'commentable_id': u'i4x-MITx-999-course-Robot_Super_Course',
'anonymous': False,
'course_id': six.text_type(self.course_id),
}
if extra_response_data:
expected_data.update(extra_response_data)
mock_request.assert_called_with(
'post',
'{prefix}/i4x-MITx-999-course-Robot_Super_Course/threads'.format(prefix=CS_PREFIX),
data=expected_data,
params={'request_id': ANY},
headers=ANY,
timeout=5
)
assert response.status_code == 200
def update_thread_helper(self, mock_request):
"""
Issues a request to update a thread and verifies the result.
"""
self._setup_mock_request(mock_request)
# Mock out saving in order to test that content is correctly
# updated. Otherwise, the call to thread.save() receives the
# same mocked request data that the original call to retrieve
# the thread did, overwriting any changes.
with patch.object(Thread, 'save'):
response = self.client.post(
reverse("update_thread", kwargs={
"thread_id": "dummy",
"course_id": six.text_type(self.course_id)
}),
data={"body": "foo", "title": "foo", "commentable_id": "some_topic"}
)
self.assertEqual(response.status_code, 200)
data = json.loads(response.content.decode('utf-8'))
self.assertEqual(data['body'], 'foo')
self.assertEqual(data['title'], 'foo')
self.assertEqual(data['commentable_id'], 'some_topic')
@ddt.ddt
@patch('openedx.core.djangoapps.django_comment_common.comment_client.utils.requests.request', autospec=True)
@disable_signal(views, 'thread_created')
@disable_signal(views, 'thread_edited')
class ViewsQueryCountTestCase(
ForumsEnableMixin,
UrlResetMixin,
ModuleStoreTestCase,
MockRequestSetupMixin,
ViewsTestCaseMixin
):
CREATE_USER = False
ENABLED_CACHES = ['default', 'mongo_metadata_inheritance', 'loc_cache']
ENABLED_SIGNALS = ['course_published']
@patch.dict("django.conf.settings.FEATURES", {"ENABLE_DISCUSSION_SERVICE": True})
def setUp(self):
super(ViewsQueryCountTestCase, self).setUp()
def count_queries(func): # pylint: disable=no-self-argument
"""
Decorates test methods to count mongo and SQL calls for a
particular modulestore.
"""
def inner(self, default_store, module_count, mongo_calls, sql_queries, *args, **kwargs):
with modulestore().default_store(default_store):
self.set_up_course(module_count=module_count)
self.clear_caches()
with self.assertNumQueries(sql_queries, table_blacklist=QUERY_COUNT_TABLE_BLACKLIST):
with check_mongo_calls(mongo_calls):
func(self, *args, **kwargs)
return inner
@ddt.data(
(ModuleStoreEnum.Type.mongo, 3, 4, 41),
(ModuleStoreEnum.Type.split, 3, 13, 41),
)
@ddt.unpack
@count_queries
def test_create_thread(self, mock_request):
self.create_thread_helper(mock_request)
@ddt.data(
(ModuleStoreEnum.Type.mongo, 3, 3, 37),
(ModuleStoreEnum.Type.split, 3, 10, 37),
)
@ddt.unpack
@count_queries
def test_update_thread(self, mock_request):
self.update_thread_helper(mock_request)
@ddt.ddt
@patch('openedx.core.djangoapps.django_comment_common.comment_client.utils.requests.request', autospec=True)
class ViewsTestCase(
ForumsEnableMixin,
UrlResetMixin,
SharedModuleStoreTestCase,
MockRequestSetupMixin,
ViewsTestCaseMixin,
MockSignalHandlerMixin
):
@classmethod
def setUpClass(cls):
# pylint: disable=super-method-not-called
with super(ViewsTestCase, cls).setUpClassAndTestData():
cls.course = CourseFactory.create(
org='MITx', course='999',
discussion_topics={"Some Topic": {"id": "some_topic"}},
display_name='Robot Super Course',
)
@classmethod
def setUpTestData(cls):
super(ViewsTestCase, cls).setUpTestData()
cls.course_id = cls.course.id
# seed the forums permissions and roles
call_command('seed_permissions_roles', six.text_type(cls.course_id))
@patch.dict("django.conf.settings.FEATURES", {"ENABLE_DISCUSSION_SERVICE": True})
def setUp(self):
# Patching the ENABLE_DISCUSSION_SERVICE value affects the contents of urls.py,
# so we need to call super.setUp() which reloads urls.py (because
# of the UrlResetMixin)
super(ViewsTestCase, self).setUp()
# Patch the comment client user save method so it does not try
# to create a new cc user when creating a django user
with patch('student.models.cc.User.save'):
uname = 'student'
email = '[email protected]'
self.password = 'test'
# Create the user and make them active so we can log them in.
self.student = User.objects.create_user(uname, email, self.password)
self.student.is_active = True
self.student.save()
# Add a discussion moderator
self.moderator = UserFactory.create(password=self.password)
# Enroll the student in the course
CourseEnrollmentFactory(user=self.student,
course_id=self.course_id)
# Enroll the moderator and give them the appropriate roles
CourseEnrollmentFactory(user=self.moderator, course_id=self.course.id)
self.moderator.roles.add(Role.objects.get(name="Moderator", course_id=self.course.id))
assert self.client.login(username='student', password=self.password)
@contextmanager
def assert_discussion_signals(self, signal, user=None):
if user is None:
user = self.student
with self.assert_signal_sent(views, signal, sender=None, user=user, exclude_args=('post',)):
yield
def test_create_thread(self, mock_request):
with self.assert_discussion_signals('thread_created'):
self.create_thread_helper(mock_request)
def test_create_thread_standalone(self, mock_request):
team = CourseTeamFactory.create(
name="A Team",
course_id=self.course_id,
topic_id='topic_id',
discussion_topic_id="i4x-MITx-999-course-Robot_Super_Course"
)
# Add the student to the team so they can post to the commentable.
team.add_user(self.student)
# create_thread_helper verifies that extra data are passed through to the comments service
self.create_thread_helper(mock_request, extra_response_data={'context': ThreadContext.STANDALONE})
@ddt.data(
('follow_thread', 'thread_followed'),
('unfollow_thread', 'thread_unfollowed'),
)
@ddt.unpack
def test_follow_unfollow_thread_signals(self, view_name, signal, mock_request):
self.create_thread_helper(mock_request)
with self.assert_discussion_signals(signal):
response = self.client.post(
reverse(
view_name,
kwargs={"course_id": six.text_type(self.course_id), "thread_id": 'i4x-MITx-999-course-Robot_Super_Course'}
)
)
self.assertEqual(response.status_code, 200)
def test_delete_thread(self, mock_request):
self._set_mock_request_data(mock_request, {
"user_id": str(self.student.id),
"closed": False,
})
test_thread_id = "test_thread_id"
request = RequestFactory().post("dummy_url", {"id": test_thread_id})
request.user = self.student
request.view_name = "delete_thread"
with self.assert_discussion_signals('thread_deleted'):
response = views.delete_thread(
request,
course_id=six.text_type(self.course.id),
thread_id=test_thread_id
)
self.assertEqual(response.status_code, 200)
self.assertTrue(mock_request.called)
def test_delete_comment(self, mock_request):
self._set_mock_request_data(mock_request, {
"user_id": str(self.student.id),
"closed": False,
})
test_comment_id = "test_comment_id"
request = RequestFactory().post("dummy_url", {"id": test_comment_id})
request.user = self.student
request.view_name = "delete_comment"
with self.assert_discussion_signals('comment_deleted'):
response = views.delete_comment(
request,
course_id=six.text_type(self.course.id),
comment_id=test_comment_id
)
self.assertEqual(response.status_code, 200)
self.assertTrue(mock_request.called)
args = mock_request.call_args[0]
self.assertEqual(args[0], "delete")
self.assertTrue(args[1].endswith("/{}".format(test_comment_id)))
def _test_request_error(self, view_name, view_kwargs, data, mock_request):
"""
Submit a request against the given view with the given data and ensure
that the result is a 400 error and that no data was posted using
mock_request
"""
self._setup_mock_request(mock_request, include_depth=(view_name == "create_sub_comment"))
response = self.client.post(reverse(view_name, kwargs=view_kwargs), data=data)
self.assertEqual(response.status_code, 400)
for call in mock_request.call_args_list:
self.assertEqual(call[0][0].lower(), "get")
def test_create_thread_no_title(self, mock_request):
self._test_request_error(
"create_thread",
{"commentable_id": "dummy", "course_id": six.text_type(self.course_id)},
{"body": "foo"},
mock_request
)
def test_create_thread_empty_title(self, mock_request):
self._test_request_error(
"create_thread",
{"commentable_id": "dummy", "course_id": six.text_type(self.course_id)},
{"body": "foo", "title": " "},
mock_request
)
def test_create_thread_no_body(self, mock_request):
self._test_request_error(
"create_thread",
{"commentable_id": "dummy", "course_id": six.text_type(self.course_id)},
{"title": "foo"},
mock_request
)
def test_create_thread_empty_body(self, mock_request):
self._test_request_error(
"create_thread",
{"commentable_id": "dummy", "course_id": six.text_type(self.course_id)},
{"body": " ", "title": "foo"},
mock_request
)
def test_update_thread_no_title(self, mock_request):
self._test_request_error(
"update_thread",
{"thread_id": "dummy", "course_id": six.text_type(self.course_id)},
{"body": "foo"},
mock_request
)
def test_update_thread_empty_title(self, mock_request):
self._test_request_error(
"update_thread",
{"thread_id": "dummy", "course_id": six.text_type(self.course_id)},
{"body": "foo", "title": " "},
mock_request
)
def test_update_thread_no_body(self, mock_request):
self._test_request_error(
"update_thread",
{"thread_id": "dummy", "course_id": six.text_type(self.course_id)},
{"title": "foo"},
mock_request
)
def test_update_thread_empty_body(self, mock_request):
self._test_request_error(
"update_thread",
{"thread_id": "dummy", "course_id": six.text_type(self.course_id)},
{"body": " ", "title": "foo"},
mock_request
)
def test_update_thread_course_topic(self, mock_request):
with self.assert_discussion_signals('thread_edited'):
self.update_thread_helper(mock_request)
@patch(
'lms.djangoapps.discussion.django_comment_client.utils.get_discussion_categories_ids',
return_value=["test_commentable"],
)
def test_update_thread_wrong_commentable_id(self, mock_get_discussion_id_map, mock_request):
self._test_request_error(
"update_thread",
{"thread_id": "dummy", "course_id": six.text_type(self.course_id)},
{"body": "foo", "title": "foo", "commentable_id": "wrong_commentable"},
mock_request
)
def test_create_comment(self, mock_request):
self._setup_mock_request(mock_request)
with self.assert_discussion_signals('comment_created'):
response = self.client.post(
reverse(
"create_comment",
kwargs={"course_id": six.text_type(self.course_id), "thread_id": "dummy"}
),
data={"body": "body"}
)
self.assertEqual(response.status_code, 200)
def test_create_comment_no_body(self, mock_request):
self._test_request_error(
"create_comment",
{"thread_id": "dummy", "course_id": six.text_type(self.course_id)},
{},
mock_request
)
def test_create_comment_empty_body(self, mock_request):
self._test_request_error(
"create_comment",
{"thread_id": "dummy", "course_id": six.text_type(self.course_id)},
{"body": " "},
mock_request
)
def test_create_sub_comment_no_body(self, mock_request):
self._test_request_error(
"create_sub_comment",
{"comment_id": "dummy", "course_id": six.text_type(self.course_id)},
{},
mock_request
)
def test_create_sub_comment_empty_body(self, mock_request):
self._test_request_error(
"create_sub_comment",
{"comment_id": "dummy", "course_id": six.text_type(self.course_id)},
{"body": " "},
mock_request
)
def test_update_comment_no_body(self, mock_request):
self._test_request_error(
"update_comment",
{"comment_id": "dummy", "course_id": six.text_type(self.course_id)},
{},
mock_request
)
def test_update_comment_empty_body(self, mock_request):
self._test_request_error(
"update_comment",
{"comment_id": "dummy", "course_id": six.text_type(self.course_id)},
{"body": " "},
mock_request
)
def test_update_comment_basic(self, mock_request):
self._setup_mock_request(mock_request)
comment_id = "test_comment_id"
updated_body = "updated body"
with self.assert_discussion_signals('comment_edited'):
response = self.client.post(
reverse(
"update_comment",
kwargs={"course_id": six.text_type(self.course_id), "comment_id": comment_id}
),
data={"body": updated_body}
)
self.assertEqual(response.status_code, 200)
mock_request.assert_called_with(
"put",
"{prefix}/comments/{comment_id}".format(prefix=CS_PREFIX, comment_id=comment_id),
headers=ANY,
params=ANY,
timeout=ANY,
data={"body": updated_body}
)
def test_flag_thread_open(self, mock_request):
self.flag_thread(mock_request, False)
def test_flag_thread_close(self, mock_request):
self.flag_thread(mock_request, True)
def flag_thread(self, mock_request, is_closed):
self._set_mock_request_data(mock_request, {
"title": "Hello",
"body": "this is a post",
"course_id": "MITx/999/Robot_Super_Course",
"anonymous": False,
"anonymous_to_peers": False,
"commentable_id": "i4x-MITx-999-course-Robot_Super_Course",
"created_at": "2013-05-10T18:53:43Z",
"updated_at": "2013-05-10T18:53:43Z",
"at_position_list": [],
"closed": is_closed,
"id": "518d4237b023791dca00000d",
"user_id": "1", "username": "robot",
"votes": {
"count": 0,
"up_count": 0,
"down_count": 0,
"point": 0
},
"abuse_flaggers": [1],
"type": "thread",
"group_id": None,
"pinned": False,
"endorsed": False,
"unread_comments_count": 0,
"read": False,
"comments_count": 0,
})
url = reverse('flag_abuse_for_thread', kwargs={
'thread_id': '518d4237b023791dca00000d',
'course_id': six.text_type(self.course_id)
})
response = self.client.post(url)
assert mock_request.called
call_list = [
(
('get', '{prefix}/threads/518d4237b023791dca00000d'.format(prefix=CS_PREFIX)),
{
'data': None,
'params': {'mark_as_read': True, 'request_id': ANY, 'with_responses': False},
'headers': ANY,
'timeout': 5
}
),
(
('put', '{prefix}/threads/518d4237b023791dca00000d/abuse_flag'.format(prefix=CS_PREFIX)),
{
'data': {'user_id': '1'},
'params': {'request_id': ANY},
'headers': ANY,
'timeout': 5
}
),
(
('get', '{prefix}/threads/518d4237b023791dca00000d'.format(prefix=CS_PREFIX)),
{
'data': None,
'params': {'mark_as_read': True, 'request_id': ANY, 'with_responses': False},
'headers': ANY,
'timeout': 5
}
)
]
assert mock_request.call_args_list == call_list
assert response.status_code == 200
def test_un_flag_thread_open(self, mock_request):
self.un_flag_thread(mock_request, False)
def test_un_flag_thread_close(self, mock_request):
self.un_flag_thread(mock_request, True)
def un_flag_thread(self, mock_request, is_closed):
self._set_mock_request_data(mock_request, {
"title": "Hello",
"body": "this is a post",
"course_id": "MITx/999/Robot_Super_Course",
"anonymous": False,
"anonymous_to_peers": False,
"commentable_id": "i4x-MITx-999-course-Robot_Super_Course",
"created_at": "2013-05-10T18:53:43Z",
"updated_at": "2013-05-10T18:53:43Z",
"at_position_list": [],
"closed": is_closed,
"id": "518d4237b023791dca00000d",
"user_id": "1",
"username": "robot",
"votes": {
"count": 0,
"up_count": 0,
"down_count": 0,
"point": 0
},
"abuse_flaggers": [],
"type": "thread",
"group_id": None,
"pinned": False,
"endorsed": False,
"unread_comments_count": 0,
"read": False,
"comments_count": 0
})
url = reverse('un_flag_abuse_for_thread', kwargs={
'thread_id': '518d4237b023791dca00000d',
'course_id': six.text_type(self.course_id)
})
response = self.client.post(url)
assert mock_request.called
call_list = [
(
('get', '{prefix}/threads/518d4237b023791dca00000d'.format(prefix=CS_PREFIX)),
{
'data': None,
'params': {'mark_as_read': True, 'request_id': ANY, 'with_responses': False},
'headers': ANY,
'timeout': 5
}
),
(
('put', '{prefix}/threads/518d4237b023791dca00000d/abuse_unflag'.format(prefix=CS_PREFIX)),
{
'data': {'user_id': '1'},
'params': {'request_id': ANY},
'headers': ANY,
'timeout': 5
}
),
(
('get', '{prefix}/threads/518d4237b023791dca00000d'.format(prefix=CS_PREFIX)),
{
'data': None,
'params': {'mark_as_read': True, 'request_id': ANY, 'with_responses': False},
'headers': ANY,
'timeout': 5
}
)
]
assert mock_request.call_args_list == call_list
assert response.status_code == 200
def test_flag_comment_open(self, mock_request):
self.flag_comment(mock_request, False)
def test_flag_comment_close(self, mock_request):
self.flag_comment(mock_request, True)
def flag_comment(self, mock_request, is_closed):
self._set_mock_request_data(mock_request, {
"body": "this is a comment",
"course_id": "MITx/999/Robot_Super_Course",
"anonymous": False,
"anonymous_to_peers": False,
"commentable_id": "i4x-MITx-999-course-Robot_Super_Course",
"created_at": "2013-05-10T18:53:43Z",
"updated_at": "2013-05-10T18:53:43Z",
"at_position_list": [],
"closed": is_closed,
"id": "518d4237b023791dca00000d",
"user_id": "1",
"username": "robot",
"votes": {
"count": 0,
"up_count": 0,
"down_count": 0,
"point": 0
},
"abuse_flaggers": [1],
"type": "comment",
"endorsed": False
})
url = reverse('flag_abuse_for_comment', kwargs={
'comment_id': '518d4237b023791dca00000d',
'course_id': six.text_type(self.course_id)
})
response = self.client.post(url)
assert mock_request.called
call_list = [
(
('get', '{prefix}/comments/518d4237b023791dca00000d'.format(prefix=CS_PREFIX)),
{
'data': None,
'params': {'request_id': ANY},
'headers': ANY,
'timeout': 5
}
),
(
('put', '{prefix}/comments/518d4237b023791dca00000d/abuse_flag'.format(prefix=CS_PREFIX)),
{
'data': {'user_id': '1'},
'params': {'request_id': ANY},
'headers': ANY,
'timeout': 5
}
),
(
('get', '{prefix}/comments/518d4237b023791dca00000d'.format(prefix=CS_PREFIX)),
{
'data': None,
'params': {'request_id': ANY},
'headers': ANY,
'timeout': 5
}
)
]
assert mock_request.call_args_list == call_list
assert response.status_code == 200
def test_un_flag_comment_open(self, mock_request):
self.un_flag_comment(mock_request, False)
def test_un_flag_comment_close(self, mock_request):
self.un_flag_comment(mock_request, True)
def un_flag_comment(self, mock_request, is_closed):
self._set_mock_request_data(mock_request, {
"body": "this is a comment",
"course_id": "MITx/999/Robot_Super_Course",
"anonymous": False,
"anonymous_to_peers": False,
"commentable_id": "i4x-MITx-999-course-Robot_Super_Course",
"created_at": "2013-05-10T18:53:43Z",
"updated_at": "2013-05-10T18:53:43Z",
"at_position_list": [],
"closed": is_closed,
"id": "518d4237b023791dca00000d",
"user_id": "1",
"username": "robot",
"votes": {
"count": 0,
"up_count": 0,
"down_count": 0,
"point": 0
},
"abuse_flaggers": [],
"type": "comment",
"endorsed": False
})
url = reverse('un_flag_abuse_for_comment', kwargs={
'comment_id': '518d4237b023791dca00000d',
'course_id': six.text_type(self.course_id)
})
response = self.client.post(url)
assert mock_request.called
call_list = [
(
('get', '{prefix}/comments/518d4237b023791dca00000d'.format(prefix=CS_PREFIX)),
{
'data': None,
'params': {'request_id': ANY},
'headers': ANY,
'timeout': 5
}
),
(
('put', '{prefix}/comments/518d4237b023791dca00000d/abuse_unflag'.format(prefix=CS_PREFIX)),
{
'data': {'user_id': '1'},
'params': {'request_id': ANY},
'headers': ANY,
'timeout': 5
}
),
(
('get', '{prefix}/comments/518d4237b023791dca00000d'.format(prefix=CS_PREFIX)),
{
'data': None,
'params': {'request_id': ANY},
'headers': ANY,
'timeout': 5
}
)
]
assert mock_request.call_args_list == call_list
assert response.status_code == 200
@ddt.data(
('upvote_thread', 'thread_id', 'thread_voted'),
('upvote_comment', 'comment_id', 'comment_voted'),
('downvote_thread', 'thread_id', 'thread_voted'),
('downvote_comment', 'comment_id', 'comment_voted')
)
@ddt.unpack
def test_voting(self, view_name, item_id, signal, mock_request):
self._setup_mock_request(mock_request)
with self.assert_discussion_signals(signal):
response = self.client.post(
reverse(
view_name,
kwargs={item_id: 'dummy', 'course_id': six.text_type(self.course_id)}
)
)
self.assertEqual(response.status_code, 200)
def test_endorse_comment(self, mock_request):
self._setup_mock_request(mock_request)
self.client.login(username=self.moderator.username, password=self.password)
with self.assert_discussion_signals('comment_endorsed', user=self.moderator):
response = self.client.post(
reverse(
'endorse_comment',
kwargs={'comment_id': 'dummy', 'course_id': six.text_type(self.course_id)}
)
)
self.assertEqual(response.status_code, 200)
@patch("openedx.core.djangoapps.django_comment_common.comment_client.utils.requests.request", autospec=True)
@disable_signal(views, 'comment_endorsed')
class ViewPermissionsTestCase(ForumsEnableMixin, UrlResetMixin, SharedModuleStoreTestCase, MockRequestSetupMixin):
@classmethod
def setUpClass(cls):
# pylint: disable=super-method-not-called
with super(ViewPermissionsTestCase, cls).setUpClassAndTestData():
cls.course = CourseFactory.create()
@classmethod
def setUpTestData(cls):
super(ViewPermissionsTestCase, cls).setUpTestData()
seed_permissions_roles(cls.course.id)
cls.password = "test password"
cls.student = UserFactory.create(password=cls.password)
cls.moderator = UserFactory.create(password=cls.password)
CourseEnrollmentFactory(user=cls.student, course_id=cls.course.id)
CourseEnrollmentFactory(user=cls.moderator, course_id=cls.course.id)
cls.moderator.roles.add(Role.objects.get(name="Moderator", course_id=cls.course.id))
@patch.dict("django.conf.settings.FEATURES", {"ENABLE_DISCUSSION_SERVICE": True})
def setUp(self):
super(ViewPermissionsTestCase, self).setUp()
def test_pin_thread_as_student(self, mock_request):
self._set_mock_request_data(mock_request, {})
self.client.login(username=self.student.username, password=self.password)
response = self.client.post(
reverse("pin_thread", kwargs={"course_id": six.text_type(self.course.id), "thread_id": "dummy"})
)
self.assertEqual(response.status_code, 401)
def test_pin_thread_as_moderator(self, mock_request):
self._set_mock_request_data(mock_request, {})
self.client.login(username=self.moderator.username, password=self.password)
response = self.client.post(
reverse("pin_thread", kwargs={"course_id": six.text_type(self.course.id), "thread_id": "dummy"})
)
self.assertEqual(response.status_code, 200)
def test_un_pin_thread_as_student(self, mock_request):
self._set_mock_request_data(mock_request, {})
self.client.login(username=self.student.username, password=self.password)
response = self.client.post(
reverse("un_pin_thread", kwargs={"course_id": six.text_type(self.course.id), "thread_id": "dummy"})
)
self.assertEqual(response.status_code, 401)
def test_un_pin_thread_as_moderator(self, mock_request):
self._set_mock_request_data(mock_request, {})
self.client.login(username=self.moderator.username, password=self.password)
response = self.client.post(
reverse("un_pin_thread", kwargs={"course_id": six.text_type(self.course.id), "thread_id": "dummy"})
)
self.assertEqual(response.status_code, 200)
def _set_mock_request_thread_and_comment(self, mock_request, thread_data, comment_data):
def handle_request(*args, **kwargs):
url = args[1]
if "/threads/" in url:
return self._create_response_mock(thread_data)
elif "/comments/" in url:
return self._create_response_mock(comment_data)
else:
raise ArgumentError("Bad url to mock request")
mock_request.side_effect = handle_request
def test_endorse_response_as_staff(self, mock_request):
self._set_mock_request_thread_and_comment(
mock_request,
{"type": "thread", "thread_type": "question", "user_id": str(self.student.id)},
{"type": "comment", "thread_id": "dummy"}
)
self.client.login(username=self.moderator.username, password=self.password)
response = self.client.post(
reverse("endorse_comment", kwargs={"course_id": six.text_type(self.course.id), "comment_id": "dummy"})
)
self.assertEqual(response.status_code, 200)
def test_endorse_response_as_student(self, mock_request):
self._set_mock_request_thread_and_comment(
mock_request,
{"type": "thread", "thread_type": "question", "user_id": str(self.moderator.id)},
{"type": "comment", "thread_id": "dummy"}
)
self.client.login(username=self.student.username, password=self.password)
response = self.client.post(
reverse("endorse_comment", kwargs={"course_id": six.text_type(self.course.id), "comment_id": "dummy"})
)
self.assertEqual(response.status_code, 401)
def test_endorse_response_as_student_question_author(self, mock_request):
self._set_mock_request_thread_and_comment(
mock_request,
{"type": "thread", "thread_type": "question", "user_id": str(self.student.id)},
{"type": "comment", "thread_id": "dummy"}
)
self.client.login(username=self.student.username, password=self.password)
response = self.client.post(
reverse("endorse_comment", kwargs={"course_id": six.text_type(self.course.id), "comment_id": "dummy"})
)
self.assertEqual(response.status_code, 200)
class CreateThreadUnicodeTestCase(
ForumsEnableMixin,
SharedModuleStoreTestCase,
UnicodeTestMixin,
MockRequestSetupMixin):
@classmethod
def setUpClass(cls):
# pylint: disable=super-method-not-called
with super(CreateThreadUnicodeTestCase, cls).setUpClassAndTestData():
cls.course = CourseFactory.create()
@classmethod
def setUpTestData(cls):
super(CreateThreadUnicodeTestCase, cls).setUpTestData()
seed_permissions_roles(cls.course.id)
cls.student = UserFactory.create()
CourseEnrollmentFactory(user=cls.student, course_id=cls.course.id)
@patch('openedx.core.djangoapps.django_comment_common.comment_client.utils.requests.request', autospec=True)
def _test_unicode_data(self, text, mock_request,):
"""
Test to make sure unicode data in a thread doesn't break it.
"""
self._set_mock_request_data(mock_request, {})
request = RequestFactory().post("dummy_url", {"thread_type": "discussion", "body": text, "title": text})
request.user = self.student
request.view_name = "create_thread"
response = views.create_thread(
# The commentable ID contains a username, the Unicode char below ensures it works fine
request, course_id=six.text_type(self.course.id), commentable_id=u"non_tåem_dummy_id"
)
self.assertEqual(response.status_code, 200)
self.assertTrue(mock_request.called)
self.assertEqual(mock_request.call_args[1]["data"]["body"], text)
self.assertEqual(mock_request.call_args[1]["data"]["title"], text)
@disable_signal(views, 'thread_edited')
class UpdateThreadUnicodeTestCase(
ForumsEnableMixin,
SharedModuleStoreTestCase,
UnicodeTestMixin,
MockRequestSetupMixin
):
@classmethod
def setUpClass(cls):
# pylint: disable=super-method-not-called
with super(UpdateThreadUnicodeTestCase, cls).setUpClassAndTestData():
cls.course = CourseFactory.create()
@classmethod
def setUpTestData(cls):
super(UpdateThreadUnicodeTestCase, cls).setUpTestData()
seed_permissions_roles(cls.course.id)
cls.student = UserFactory.create()
CourseEnrollmentFactory(user=cls.student, course_id=cls.course.id)
@patch(
'lms.djangoapps.discussion.django_comment_client.utils.get_discussion_categories_ids',
return_value=["test_commentable"],
)
@patch('openedx.core.djangoapps.django_comment_common.comment_client.utils.requests.request', autospec=True)
def _test_unicode_data(self, text, mock_request, mock_get_discussion_id_map):
self._set_mock_request_data(mock_request, {
"user_id": str(self.student.id),
"closed": False,
})
request = RequestFactory().post("dummy_url", {"body": text, "title": text, "thread_type": "question", "commentable_id": "test_commentable"})
request.user = self.student
request.view_name = "update_thread"
response = views.update_thread(request, course_id=six.text_type(self.course.id), thread_id="dummy_thread_id")
self.assertEqual(response.status_code, 200)
self.assertTrue(mock_request.called)
self.assertEqual(mock_request.call_args[1]["data"]["body"], text)
self.assertEqual(mock_request.call_args[1]["data"]["title"], text)
self.assertEqual(mock_request.call_args[1]["data"]["thread_type"], "question")
self.assertEqual(mock_request.call_args[1]["data"]["commentable_id"], "test_commentable")
@disable_signal(views, 'comment_created')
class CreateCommentUnicodeTestCase(
ForumsEnableMixin,
SharedModuleStoreTestCase,
UnicodeTestMixin,
MockRequestSetupMixin
):
@classmethod
def setUpClass(cls):
# pylint: disable=super-method-not-called
with super(CreateCommentUnicodeTestCase, cls).setUpClassAndTestData():
cls.course = CourseFactory.create()
@classmethod
def setUpTestData(cls):
super(CreateCommentUnicodeTestCase, cls).setUpTestData()
seed_permissions_roles(cls.course.id)
cls.student = UserFactory.create()
CourseEnrollmentFactory(user=cls.student, course_id=cls.course.id)
@patch('openedx.core.djangoapps.django_comment_common.comment_client.utils.requests.request', autospec=True)
def _test_unicode_data(self, text, mock_request):
commentable_id = "non_team_dummy_id"
self._set_mock_request_data(mock_request, {
"closed": False,
"commentable_id": commentable_id
})
# We have to get clever here due to Thread's setters and getters.
# Patch won't work with it.
try:
Thread.commentable_id = commentable_id
request = RequestFactory().post("dummy_url", {"body": text})
request.user = self.student
request.view_name = "create_comment"
response = views.create_comment(
request, course_id=six.text_type(self.course.id), thread_id="dummy_thread_id"
)
self.assertEqual(response.status_code, 200)
self.assertTrue(mock_request.called)
self.assertEqual(mock_request.call_args[1]["data"]["body"], text)
finally:
del Thread.commentable_id
@disable_signal(views, 'comment_edited')
class UpdateCommentUnicodeTestCase(
ForumsEnableMixin,
SharedModuleStoreTestCase,
UnicodeTestMixin,
MockRequestSetupMixin
):
@classmethod
def setUpClass(cls):
# pylint: disable=super-method-not-called
with super(UpdateCommentUnicodeTestCase, cls).setUpClassAndTestData():
cls.course = CourseFactory.create()
@classmethod
def setUpTestData(cls):
super(UpdateCommentUnicodeTestCase, cls).setUpTestData()
seed_permissions_roles(cls.course.id)
cls.student = UserFactory.create()
CourseEnrollmentFactory(user=cls.student, course_id=cls.course.id)
@patch('openedx.core.djangoapps.django_comment_common.comment_client.utils.requests.request', autospec=True)
def _test_unicode_data(self, text, mock_request):
self._set_mock_request_data(mock_request, {
"user_id": str(self.student.id),
"closed": False,
})
request = RequestFactory().post("dummy_url", {"body": text})
request.user = self.student
request.view_name = "update_comment"
response = views.update_comment(request, course_id=six.text_type(self.course.id), comment_id="dummy_comment_id")
self.assertEqual(response.status_code, 200)
self.assertTrue(mock_request.called)
self.assertEqual(mock_request.call_args[1]["data"]["body"], text)
@disable_signal(views, 'comment_created')
class CreateSubCommentUnicodeTestCase(
ForumsEnableMixin,
SharedModuleStoreTestCase,
UnicodeTestMixin,
MockRequestSetupMixin
):
"""
Make sure comments under a response can handle unicode.
"""
@classmethod
def setUpClass(cls):
# pylint: disable=super-method-not-called
with super(CreateSubCommentUnicodeTestCase, cls).setUpClassAndTestData():
cls.course = CourseFactory.create()
@classmethod
def setUpTestData(cls):
super(CreateSubCommentUnicodeTestCase, cls).setUpTestData()
seed_permissions_roles(cls.course.id)
cls.student = UserFactory.create()
CourseEnrollmentFactory(user=cls.student, course_id=cls.course.id)
@patch('openedx.core.djangoapps.django_comment_common.comment_client.utils.requests.request', autospec=True)
def _test_unicode_data(self, text, mock_request):
"""
Create a comment with unicode in it.
"""
self._set_mock_request_data(mock_request, {
"closed": False,
"depth": 1,
"thread_id": "test_thread",
"commentable_id": "non_team_dummy_id"
})
request = RequestFactory().post("dummy_url", {"body": text})
request.user = self.student
request.view_name = "create_sub_comment"
Thread.commentable_id = "test_commentable"
try:
response = views.create_sub_comment(
request, course_id=six.text_type(self.course.id), comment_id="dummy_comment_id"
)
self.assertEqual(response.status_code, 200)
self.assertTrue(mock_request.called)
self.assertEqual(mock_request.call_args[1]["data"]["body"], text)
finally:
del Thread.commentable_id
@ddt.ddt
@patch("openedx.core.djangoapps.django_comment_common.comment_client.utils.requests.request", autospec=True)
@disable_signal(views, 'thread_voted')
@disable_signal(views, 'thread_edited')
@disable_signal(views, 'comment_created')
@disable_signal(views, 'comment_voted')
@disable_signal(views, 'comment_deleted')
class TeamsPermissionsTestCase(ForumsEnableMixin, UrlResetMixin, SharedModuleStoreTestCase, MockRequestSetupMixin):
# Most of the test points use the same ddt data.
# args: user, commentable_id, status_code
ddt_permissions_args = [
# Student in team can do operations on threads/comments within the team commentable.
('student_in_team', 'team_commentable_id', 200),
# Non-team commentables can be edited by any student.
('student_in_team', 'course_commentable_id', 200),
# Student not in team cannot do operations within the team commentable.
('student_not_in_team', 'team_commentable_id', 401),
# Non-team commentables can be edited by any student.
('student_not_in_team', 'course_commentable_id', 200),
# Moderators can always operator on threads within a team, regardless of team membership.
('moderator', 'team_commentable_id', 200),
# Group moderators have regular student privileges for creating a thread and commenting
('group_moderator', 'course_commentable_id', 200)
]
def change_divided_discussion_settings(self, scheme):
"""
Change divided discussion settings for the current course.
If dividing by cohorts, create and assign users to a cohort.
"""
enable_cohorts = True if scheme is CourseDiscussionSettings.COHORT else False
set_course_discussion_settings(
self.course.id,
enable_cohorts=enable_cohorts,
divided_discussions=[],
always_divide_inline_discussions=True,
division_scheme=scheme,
)
set_course_cohorted(self.course.id, enable_cohorts)
@classmethod
def setUpClass(cls):
# pylint: disable=super-method-not-called
with super(TeamsPermissionsTestCase, cls).setUpClassAndTestData():
teams_configuration = {
'topics': [{'id': "topic_id", 'name': 'Solar Power', 'description': 'Solar power is hot'}]
}
cls.course = CourseFactory.create(teams_configuration=teams_configuration)
@classmethod
def setUpTestData(cls):
super(TeamsPermissionsTestCase, cls).setUpTestData()
cls.course = CourseFactory.create()
cls.password = "test password"
seed_permissions_roles(cls.course.id)
# Create enrollment tracks
CourseModeFactory.create(
course_id=cls.course.id,
mode_slug=CourseMode.VERIFIED
)
CourseModeFactory.create(
course_id=cls.course.id,
mode_slug=CourseMode.AUDIT
)
# Create 6 users--
# student in team (in the team, audit)
# student not in team (not in the team, audit)
# cohorted (in the cohort, audit)
# verified (not in the cohort, verified)
# moderator (in the cohort, audit, moderator permissions)
# group moderator (in the cohort, verified, group moderator permissions)
def create_users_and_enroll(coursemode):
student = UserFactory.create(password=cls.password)
CourseEnrollmentFactory(
course_id=cls.course.id,
user=student,
mode=coursemode
)
return student
cls.student_in_team, cls.student_not_in_team, cls.moderator, cls.cohorted = (
[create_users_and_enroll(CourseMode.AUDIT) for _ in range(4)])
cls.verified, cls.group_moderator = [create_users_and_enroll(CourseMode.VERIFIED) for _ in range(2)]
# Give moderator and group moderator permissions
cls.moderator.roles.add(Role.objects.get(name="Moderator", course_id=cls.course.id))
assign_role(cls.course.id, cls.group_moderator, 'Group Moderator')
# Create a team
cls.team_commentable_id = "team_discussion_id"
cls.team = CourseTeamFactory.create(
name=u'The Only Team',
course_id=cls.course.id,
topic_id='topic_id',
discussion_topic_id=cls.team_commentable_id
)
CourseTeamMembershipFactory.create(team=cls.team, user=cls.student_in_team)
# Dummy commentable ID not linked to a team
cls.course_commentable_id = "course_level_commentable"
# Create cohort and add students to it
CohortFactory(
course_id=cls.course.id,
name='Test Cohort',
users=[cls.group_moderator, cls.cohorted]
)
@patch.dict("django.conf.settings.FEATURES", {"ENABLE_DISCUSSION_SERVICE": True})
def setUp(self):
super(TeamsPermissionsTestCase, self).setUp()
def _setup_mock(self, user, mock_request, data):
user = getattr(self, user)
self._set_mock_request_data(mock_request, data)
self.client.login(username=user.username, password=self.password)
@ddt.data(
# student_in_team will be able to update his own post, regardless of team membership
('student_in_team', 'student_in_team', 'team_commentable_id', 200, CourseDiscussionSettings.NONE),
('student_in_team', 'student_in_team', 'course_commentable_id', 200, CourseDiscussionSettings.NONE),
# students can only update their own posts
('student_in_team', 'moderator', 'team_commentable_id', 401, CourseDiscussionSettings.NONE),
# Even though student_not_in_team is not in the team, he can still modify posts he created while in the team.
('student_not_in_team', 'student_not_in_team', 'team_commentable_id', 200, CourseDiscussionSettings.NONE),
# Moderators can change their own posts and other people's posts.
('moderator', 'moderator', 'team_commentable_id', 200, CourseDiscussionSettings.NONE),
('moderator', 'student_in_team', 'team_commentable_id', 200, CourseDiscussionSettings.NONE),
# Group moderator can do operations on commentables within their group if the course is divided
('group_moderator', 'verified', 'course_commentable_id', 200, CourseDiscussionSettings.ENROLLMENT_TRACK),
('group_moderator', 'cohorted', 'course_commentable_id', 200, CourseDiscussionSettings.COHORT),
# Group moderators cannot do operations on commentables outside of their group
('group_moderator', 'verified', 'course_commentable_id', 401, CourseDiscussionSettings.COHORT),
('group_moderator', 'cohorted', 'course_commentable_id', 401, CourseDiscussionSettings.ENROLLMENT_TRACK),
# Group moderators cannot do operations when the course is not divided
('group_moderator', 'verified', 'course_commentable_id', 401, CourseDiscussionSettings.NONE),
('group_moderator', 'cohorted', 'course_commentable_id', 401, CourseDiscussionSettings.NONE)
)
@ddt.unpack
def test_update_thread(self, user, thread_author, commentable_id, status_code, division_scheme, mock_request):
"""
Verify that update_thread is limited to thread authors and privileged users (team membership does not matter).
"""
self.change_divided_discussion_settings(division_scheme)
commentable_id = getattr(self, commentable_id)
# thread_author is who is marked as the author of the thread being updated.
thread_author = getattr(self, thread_author)
self._setup_mock(
user, mock_request, # user is the person making the request.
{
"user_id": str(thread_author.id),
"closed": False, "commentable_id": commentable_id,
"context": "standalone",
"username": thread_author.username,
"course_id": six.text_type(self.course.id)
}
)
response = self.client.post(
reverse(
"update_thread",
kwargs={
"course_id": six.text_type(self.course.id),
"thread_id": "dummy"
}
),
data={"body": "foo", "title": "foo", "commentable_id": commentable_id}
)
self.assertEqual(response.status_code, status_code)
@ddt.data(
# Students can delete their own posts
('student_in_team', 'student_in_team', 'team_commentable_id', 200, CourseDiscussionSettings.NONE),
# Moderators can delete any post
('moderator', 'student_in_team', 'team_commentable_id', 200, CourseDiscussionSettings.NONE),
# Others cannot delete posts
('student_in_team', 'moderator', 'team_commentable_id', 401, CourseDiscussionSettings.NONE),
('student_not_in_team', 'student_in_team', 'team_commentable_id', 401, CourseDiscussionSettings.NONE),
# Group moderator can do operations on commentables within their group if the course is divided
('group_moderator', 'verified', 'team_commentable_id', 200, CourseDiscussionSettings.ENROLLMENT_TRACK),
('group_moderator', 'cohorted', 'team_commentable_id', 200, CourseDiscussionSettings.COHORT),
# Group moderators cannot do operations on commentables outside of their group
('group_moderator', 'verified', 'team_commentable_id', 401, CourseDiscussionSettings.COHORT),
('group_moderator', 'cohorted', 'team_commentable_id', 401, CourseDiscussionSettings.ENROLLMENT_TRACK),
# Group moderators cannot do operations when the course is not divided
('group_moderator', 'verified', 'team_commentable_id', 401, CourseDiscussionSettings.NONE),
('group_moderator', 'cohorted', 'team_commentable_id', 401, CourseDiscussionSettings.NONE)
)
@ddt.unpack
def test_delete_comment(self, user, comment_author, commentable_id, status_code, division_scheme, mock_request):
commentable_id = getattr(self, commentable_id)
comment_author = getattr(self, comment_author)
self.change_divided_discussion_settings(division_scheme)
self._setup_mock(user, mock_request, {
"closed": False,
"commentable_id": commentable_id,
"user_id": str(comment_author.id),
"username": comment_author.username,
"course_id": six.text_type(self.course.id)
})
response = self.client.post(
reverse(
"delete_comment",
kwargs={
"course_id": six.text_type(self.course.id),
"comment_id": "dummy"
}
),
data={"body": "foo", "title": "foo"}
)
self.assertEqual(response.status_code, status_code)
@ddt.data(*ddt_permissions_args)
@ddt.unpack
def test_create_comment(self, user, commentable_id, status_code, mock_request):
"""
Verify that create_comment is limited to members of the team or users with 'edit_content' permission.
"""
commentable_id = getattr(self, commentable_id)
self._setup_mock(user, mock_request, {"closed": False, "commentable_id": commentable_id})
response = self.client.post(
reverse(
"create_comment",
kwargs={
"course_id": six.text_type(self.course.id),
"thread_id": "dummy"
}
),
data={"body": "foo", "title": "foo"}
)
self.assertEqual(response.status_code, status_code)
@ddt.data(*ddt_permissions_args)
@ddt.unpack
def test_create_sub_comment(self, user, commentable_id, status_code, mock_request):
"""
Verify that create_subcomment is limited to members of the team or users with 'edit_content' permission.
"""
commentable_id = getattr(self, commentable_id)
self._setup_mock(
user, mock_request,
{"closed": False, "commentable_id": commentable_id, "thread_id": "dummy_thread"},
)
response = self.client.post(
reverse(
"create_sub_comment",
kwargs={
"course_id": six.text_type(self.course.id),
"comment_id": "dummy_comment"
}
),
data={"body": "foo", "title": "foo"}
)
self.assertEqual(response.status_code, status_code)
@ddt.data(*ddt_permissions_args)
@ddt.unpack
def test_comment_actions(self, user, commentable_id, status_code, mock_request):
"""
Verify that voting and flagging of comments is limited to members of the team or users with
'edit_content' permission.
"""
commentable_id = getattr(self, commentable_id)
self._setup_mock(
user, mock_request,
{"closed": False, "commentable_id": commentable_id, "thread_id": "dummy_thread"},
)
for action in ["upvote_comment", "downvote_comment", "un_flag_abuse_for_comment", "flag_abuse_for_comment"]:
response = self.client.post(
reverse(
action,
kwargs={"course_id": six.text_type(self.course.id), "comment_id": "dummy_comment"}
)
)
self.assertEqual(response.status_code, status_code)
@ddt.data(*ddt_permissions_args)
@ddt.unpack
def test_threads_actions(self, user, commentable_id, status_code, mock_request):
"""
Verify that voting, flagging, and following of threads is limited to members of the team or users with
'edit_content' permission.
"""
commentable_id = getattr(self, commentable_id)
self._setup_mock(
user, mock_request,
{"closed": False, "commentable_id": commentable_id},
)
for action in ["upvote_thread", "downvote_thread", "un_flag_abuse_for_thread", "flag_abuse_for_thread",
"follow_thread", "unfollow_thread"]:
response = self.client.post(
reverse(
action,
kwargs={"course_id": six.text_type(self.course.id), "thread_id": "dummy_thread"}
)
)
self.assertEqual(response.status_code, status_code)
TEAM_COMMENTABLE_ID = 'test-team-discussion'
@disable_signal(views, 'comment_created')
@ddt.ddt
class ForumEventTestCase(ForumsEnableMixin, SharedModuleStoreTestCase, MockRequestSetupMixin):
"""
Forum actions are expected to launch analytics events. Test these here.
"""
@classmethod
def setUpClass(cls):
# pylint: disable=super-method-not-called
with super(ForumEventTestCase, cls).setUpClassAndTestData():
cls.course = CourseFactory.create()
@classmethod
def setUpTestData(cls):
super(ForumEventTestCase, cls).setUpTestData()
seed_permissions_roles(cls.course.id)
cls.student = UserFactory.create()
CourseEnrollmentFactory(user=cls.student, course_id=cls.course.id)
cls.student.roles.add(Role.objects.get(name="Student", course_id=cls.course.id))
CourseAccessRoleFactory(course_id=cls.course.id, user=cls.student, role='Wizard')
@patch('eventtracking.tracker.emit')
@patch('openedx.core.djangoapps.django_comment_common.comment_client.utils.requests.request', autospec=True)
def test_response_event(self, mock_request, mock_emit):
"""
Check to make sure an event is fired when a user responds to a thread.
"""
self._set_mock_request_data(mock_request, {
"closed": False,
"commentable_id": 'test_commentable_id',
'thread_id': 'test_thread_id',
})
request = RequestFactory().post("dummy_url", {"body": "Test comment", 'auto_subscribe': True})
request.user = self.student
request.view_name = "create_comment"
views.create_comment(request, course_id=six.text_type(self.course.id), thread_id='test_thread_id')
event_name, event = mock_emit.call_args[0]
self.assertEqual(event_name, 'edx.forum.response.created')
self.assertEqual(event['body'], "Test comment")
self.assertEqual(event['commentable_id'], 'test_commentable_id')
self.assertEqual(event['user_forums_roles'], ['Student'])
self.assertEqual(event['user_course_roles'], ['Wizard'])
self.assertEqual(event['discussion']['id'], 'test_thread_id')
self.assertEqual(event['options']['followed'], True)
@patch('eventtracking.tracker.emit')
@patch('openedx.core.djangoapps.django_comment_common.comment_client.utils.requests.request', autospec=True)
def test_comment_event(self, mock_request, mock_emit):
"""
Ensure an event is fired when someone comments on a response.
"""
self._set_mock_request_data(mock_request, {
"closed": False,
"depth": 1,
"thread_id": "test_thread_id",
"commentable_id": "test_commentable_id",
"parent_id": "test_response_id"
})
request = RequestFactory().post("dummy_url", {"body": "Another comment"})
request.user = self.student
request.view_name = "create_sub_comment"
views.create_sub_comment(request, course_id=six.text_type(self.course.id), comment_id="dummy_comment_id")
event_name, event = mock_emit.call_args[0]
self.assertEqual(event_name, "edx.forum.comment.created")
self.assertEqual(event['body'], 'Another comment')
self.assertEqual(event['discussion']['id'], 'test_thread_id')
self.assertEqual(event['response']['id'], 'test_response_id')
self.assertEqual(event['user_forums_roles'], ['Student'])
self.assertEqual(event['user_course_roles'], ['Wizard'])
self.assertEqual(event['options']['followed'], False)
@patch('eventtracking.tracker.emit')
@patch('openedx.core.djangoapps.django_comment_common.comment_client.utils.requests.request', autospec=True)
@ddt.data((
'create_thread',
'edx.forum.thread.created', {
'thread_type': 'discussion',
'body': 'Test text',
'title': 'Test',
'auto_subscribe': True
},
{'commentable_id': TEAM_COMMENTABLE_ID}
), (
'create_comment',
'edx.forum.response.created',
{'body': 'Test comment', 'auto_subscribe': True},
{'thread_id': 'test_thread_id'}
), (
'create_sub_comment',
'edx.forum.comment.created',
{'body': 'Another comment'},
{'comment_id': 'dummy_comment_id'}
))
@ddt.unpack
def test_team_events(self, view_name, event_name, view_data, view_kwargs, mock_request, mock_emit):
user = self.student
team = CourseTeamFactory.create(discussion_topic_id=TEAM_COMMENTABLE_ID)
CourseTeamMembershipFactory.create(team=team, user=user)
self._set_mock_request_data(mock_request, {
'closed': False,
'commentable_id': TEAM_COMMENTABLE_ID,
'thread_id': 'test_thread_id',
})
request = RequestFactory().post('dummy_url', view_data)
request.user = user
request.view_name = view_name
getattr(views, view_name)(request, course_id=six.text_type(self.course.id), **view_kwargs)
name, event = mock_emit.call_args[0]
self.assertEqual(name, event_name)
self.assertEqual(event['team_id'], team.team_id)
@ddt.data(
('vote_for_thread', 'thread_id', 'thread'),
('undo_vote_for_thread', 'thread_id', 'thread'),
('vote_for_comment', 'comment_id', 'response'),
('undo_vote_for_comment', 'comment_id', 'response'),
)
@ddt.unpack
@patch('eventtracking.tracker.emit')
@patch('openedx.core.djangoapps.django_comment_common.comment_client.utils.requests.request', autospec=True)
def test_thread_voted_event(self, view_name, obj_id_name, obj_type, mock_request, mock_emit):
undo = view_name.startswith('undo')
self._set_mock_request_data(mock_request, {
'closed': False,
'commentable_id': 'test_commentable_id',
'username': 'gumprecht',
})
request = RequestFactory().post('dummy_url', {})
request.user = self.student
request.view_name = view_name
view_function = getattr(views, view_name)
kwargs = dict(course_id=six.text_type(self.course.id))
kwargs[obj_id_name] = obj_id_name
if not undo:
kwargs.update(value='up')
view_function(request, **kwargs)
self.assertTrue(mock_emit.called)
event_name, event = mock_emit.call_args[0]
self.assertEqual(event_name, 'edx.forum.{}.voted'.format(obj_type))
self.assertEqual(event['target_username'], 'gumprecht')
self.assertEqual(event['undo_vote'], undo)
self.assertEqual(event['vote_value'], 'up')
class UsersEndpointTestCase(ForumsEnableMixin, SharedModuleStoreTestCase, MockRequestSetupMixin):
@classmethod
def setUpClass(cls):
# pylint: disable=super-method-not-called
with super(UsersEndpointTestCase, cls).setUpClassAndTestData():
cls.course = CourseFactory.create()
@classmethod
def setUpTestData(cls):
super(UsersEndpointTestCase, cls).setUpTestData()
seed_permissions_roles(cls.course.id)
cls.student = UserFactory.create()
cls.enrollment = CourseEnrollmentFactory(user=cls.student, course_id=cls.course.id)
cls.other_user = UserFactory.create(username="other")
CourseEnrollmentFactory(user=cls.other_user, course_id=cls.course.id)
def set_post_counts(self, mock_request, threads_count=1, comments_count=1):
"""
sets up a mock response from the comments service for getting post counts for our other_user
"""
self._set_mock_request_data(mock_request, {
"threads_count": threads_count,
"comments_count": comments_count,
})
def make_request(self, method='get', course_id=None, **kwargs):
course_id = course_id or self.course.id
request = getattr(RequestFactory(), method)("dummy_url", kwargs)
request.user = self.student
request.view_name = "users"
return views.users(request, course_id=text_type(course_id))
@patch('openedx.core.djangoapps.django_comment_common.comment_client.utils.requests.request', autospec=True)
def test_finds_exact_match(self, mock_request):
self.set_post_counts(mock_request)
response = self.make_request(username="other")
self.assertEqual(response.status_code, 200)
self.assertEqual(
json.loads(response.content.decode('utf-8'))["users"],
[{"id": self.other_user.id, "username": self.other_user.username}]
)
@patch('openedx.core.djangoapps.django_comment_common.comment_client.utils.requests.request', autospec=True)
def test_finds_no_match(self, mock_request):
self.set_post_counts(mock_request)
response = self.make_request(username="othor")
self.assertEqual(response.status_code, 200)
self.assertEqual(json.loads(response.content.decode('utf-8'))["users"], [])
def test_requires_GET(self):
response = self.make_request(method='post', username="other")
self.assertEqual(response.status_code, 405)
def test_requires_username_param(self):
response = self.make_request()
self.assertEqual(response.status_code, 400)
content = json.loads(response.content.decode('utf-8'))
self.assertIn("errors", content)
self.assertNotIn("users", content)
def test_course_does_not_exist(self):
course_id = CourseKey.from_string("does/not/exist")
response = self.make_request(course_id=course_id, username="other")
self.assertEqual(response.status_code, 404)
content = json.loads(response.content.decode('utf-8'))
self.assertIn("errors", content)
self.assertNotIn("users", content)
def test_requires_requestor_enrolled_in_course(self):
# unenroll self.student from the course.
self.enrollment.delete()
response = self.make_request(username="other")
self.assertEqual(response.status_code, 404)
content = json.loads(response.content.decode('utf-8'))
self.assertIn("errors", content)
self.assertNotIn("users", content)
@patch('openedx.core.djangoapps.django_comment_common.comment_client.utils.requests.request', autospec=True)
def test_requires_matched_user_has_forum_content(self, mock_request):
self.set_post_counts(mock_request, 0, 0)
response = self.make_request(username="other")
self.assertEqual(response.status_code, 200)
self.assertEqual(json.loads(response.content.decode('utf-8'))["users"], [])
@ddt.ddt
class SegmentIOForumThreadViewedEventTestCase(SegmentIOTrackingTestCaseBase):
def _raise_navigation_event(self, label, include_name):
middleware = TrackMiddleware()
kwargs = {'label': label}
if include_name:
kwargs['name'] = 'edx.bi.app.navigation.screen'
else:
kwargs['exclude_name'] = True
request = self.create_request(
data=self.create_segmentio_event_json(**kwargs),
content_type='application/json',
)
User.objects.create(pk=SEGMENTIO_TEST_USER_ID, username=str(mock.sentinel.username))
middleware.process_request(request)
try:
response = segmentio.segmentio_event(request)
self.assertEqual(response.status_code, 200)
finally:
middleware.process_response(request, None)
@ddt.data(True, False)
def test_thread_viewed(self, include_name):
"""
Tests that a SegmentIO thread viewed event is accepted and transformed.
Only tests that the transformation happens at all; does not
comprehensively test that it happens correctly.
ForumThreadViewedEventTransformerTestCase tests for correctness.
"""
self._raise_navigation_event('Forum: View Thread', include_name)
event = self.get_event()
self.assertEqual(event['name'], 'edx.forum.thread.viewed')
self.assertEqual(event['event_type'], event['name'])
@ddt.data(True, False)
def test_non_thread_viewed(self, include_name):
"""
Tests that other BI events are thrown out.
"""
self._raise_navigation_event('Forum: Create Thread', include_name)
self.assert_no_events_emitted()
def _get_transformed_event(input_event):
transformer = ForumThreadViewedEventTransformer(**input_event)
transformer.transform()
return transformer
def _create_event(
label='Forum: View Thread',
include_context=True,
inner_context=None,
username=None,
course_id=None,
**event_data
):
result = {'name': 'edx.bi.app.navigation.screen'}
if include_context:
result['context'] = {'label': label}
if course_id:
result['context']['course_id'] = str(course_id)
if username:
result['username'] = username
if event_data:
result['event'] = event_data
if inner_context:
if not event_data:
result['event'] = {}
result['event']['context'] = inner_context
return result
def _create_and_transform_event(**kwargs):
event = _create_event(**kwargs)
return event, _get_transformed_event(event)
@ddt.ddt
class ForumThreadViewedEventTransformerTestCase(ForumsEnableMixin, UrlResetMixin, ModuleStoreTestCase):
"""
Test that the ForumThreadViewedEventTransformer transforms events correctly
and without raising exceptions.
Because the events passed through the transformer can come from external
sources (e.g., a mobile app), we carefully test a myriad of cases, including
those with incomplete and malformed events.
"""
CATEGORY_ID = 'i4x-edx-discussion-id'
CATEGORY_NAME = 'Discussion 1'
PARENT_CATEGORY_NAME = 'Chapter 1'
TEAM_CATEGORY_ID = 'i4x-edx-team-discussion-id'
TEAM_CATEGORY_NAME = 'Team Chat'
TEAM_PARENT_CATEGORY_NAME = PARENT_CATEGORY_NAME
DUMMY_CATEGORY_ID = 'i4x-edx-dummy-commentable-id'
DUMMY_THREAD_ID = 'dummy_thread_id'
@mock.patch.dict("student.models.settings.FEATURES", {"ENABLE_DISCUSSION_SERVICE": True})
def setUp(self):
super(ForumThreadViewedEventTransformerTestCase, self).setUp()
self.courses_by_store = {
ModuleStoreEnum.Type.mongo: CourseFactory.create(
org='TestX',
course='TR-101',
run='Event_Transform_Test',
default_store=ModuleStoreEnum.Type.mongo,
),
ModuleStoreEnum.Type.split: CourseFactory.create(
org='TestX',
course='TR-101S',
run='Event_Transform_Test_Split',
default_store=ModuleStoreEnum.Type.split,
),
}
self.course = self.courses_by_store['mongo']
self.student = UserFactory.create()
self.staff = UserFactory.create(is_staff=True)
UserBasedRole(user=self.staff, role=CourseStaffRole.ROLE).add_course(self.course.id)
CourseEnrollmentFactory.create(user=self.student, course_id=self.course.id)
self.category = ItemFactory.create(
parent_location=self.course.location,
category='discussion',
discussion_id=self.CATEGORY_ID,
discussion_category=self.PARENT_CATEGORY_NAME,
discussion_target=self.CATEGORY_NAME,
)
self.team_category = ItemFactory.create(
parent_location=self.course.location,
category='discussion',
discussion_id=self.TEAM_CATEGORY_ID,
discussion_category=self.TEAM_PARENT_CATEGORY_NAME,
discussion_target=self.TEAM_CATEGORY_NAME,
)
self.team = CourseTeamFactory.create(
name='Team 1',
course_id=self.course.id,
topic_id='arbitrary-topic-id',
discussion_topic_id=self.team_category.discussion_id,
)
def test_missing_context(self):
event = _create_event(include_context=False)
with self.assertRaises(EventEmissionExit):
_get_transformed_event(event)
def test_no_data(self):
event, event_trans = _create_and_transform_event()
event['name'] = 'edx.forum.thread.viewed'
event['event_type'] = event['name']
event['event'] = {}
self.assertDictEqual(event_trans, event)
def test_inner_context(self):
_, event_trans = _create_and_transform_event(inner_context={})
self.assertNotIn('context', event_trans['event'])
def test_non_thread_view(self):
event = _create_event(
label='Forum: Create Thread',
course_id=self.course.id,
topic_id=self.DUMMY_CATEGORY_ID,
thread_id=self.DUMMY_THREAD_ID,
)
with self.assertRaises(EventEmissionExit):
_get_transformed_event(event)
def test_bad_field_types(self):
event, event_trans = _create_and_transform_event(
course_id={},
topic_id=3,
thread_id=object(),
action=3.14,
)
event['name'] = 'edx.forum.thread.viewed'
event['event_type'] = event['name']
self.assertDictEqual(event_trans, event)
def test_bad_course_id(self):
event, event_trans = _create_and_transform_event(course_id='non-existent-course-id')
event_data = event_trans['event']
self.assertNotIn('category_id', event_data)
self.assertNotIn('category_name', event_data)
self.assertNotIn('url', event_data)
self.assertNotIn('user_forums_roles', event_data)
self.assertNotIn('user_course_roles', event_data)
def test_bad_username(self):
event, event_trans = _create_and_transform_event(username='non-existent-username')
event_data = event_trans['event']
self.assertNotIn('category_id', event_data)
self.assertNotIn('category_name', event_data)
self.assertNotIn('user_forums_roles', event_data)
self.assertNotIn('user_course_roles', event_data)
def test_bad_url(self):
event, event_trans = _create_and_transform_event(
course_id=self.course.id,
topic_id='malformed/commentable/id',
thread_id='malformed/thread/id',
)
self.assertNotIn('url', event_trans['event'])
def test_renamed_fields(self):
AUTHOR = 'joe-the-plumber'
event, event_trans = _create_and_transform_event(
course_id=self.course.id,
topic_id=self.DUMMY_CATEGORY_ID,
thread_id=self.DUMMY_THREAD_ID,
author=AUTHOR,
)
self.assertEqual(event_trans['event']['commentable_id'], self.DUMMY_CATEGORY_ID)
self.assertEqual(event_trans['event']['id'], self.DUMMY_THREAD_ID)
self.assertEqual(event_trans['event']['target_username'], AUTHOR)
def test_titles(self):
# No title
_, event_1_trans = _create_and_transform_event()
self.assertNotIn('title', event_1_trans['event'])
self.assertNotIn('title_truncated', event_1_trans['event'])
# Short title
_, event_2_trans = _create_and_transform_event(
action='!',
)
self.assertIn('title', event_2_trans['event'])
self.assertIn('title_truncated', event_2_trans['event'])
self.assertFalse(event_2_trans['event']['title_truncated'])
# Long title
_, event_3_trans = _create_and_transform_event(
action=('covfefe' * 200),
)
self.assertIn('title', event_3_trans['event'])
self.assertIn('title_truncated', event_3_trans['event'])
self.assertTrue(event_3_trans['event']['title_truncated'])
@ddt.data(ModuleStoreEnum.Type.mongo, ModuleStoreEnum.Type.split)
def test_urls(self, store):
course = self.courses_by_store[store]
commentable_id = self.DUMMY_CATEGORY_ID
thread_id = self.DUMMY_THREAD_ID
_, event_trans = _create_and_transform_event(
course_id=course.id,
topic_id=commentable_id,
thread_id=thread_id,
)
expected_path = '/courses/{0}/discussion/forum/{1}/threads/{2}'.format(
course.id, commentable_id, thread_id
)
self.assertTrue(event_trans['event'].get('url').endswith(expected_path))
def test_categories(self):
# Bad category
_, event_trans_1 = _create_and_transform_event(
username=self.student.username,
course_id=self.course.id,
topic_id='non-existent-category-id',
)
self.assertNotIn('category_id', event_trans_1['event'])
self.assertNotIn('category_name', event_trans_1['event'])
# Good category
_, event_trans_2 = _create_and_transform_event(
username=self.student.username,
course_id=self.course.id,
topic_id=self.category.discussion_id,
)
self.assertEqual(event_trans_2['event'].get('category_id'), self.category.discussion_id)
full_category_name = u'{0} / {1}'.format(self.category.discussion_category, self.category.discussion_target)
self.assertEqual(event_trans_2['event'].get('category_name'), full_category_name)
def test_roles(self):
# No user
_, event_trans_1 = _create_and_transform_event(
course_id=self.course.id,
)
self.assertNotIn('user_forums_roles', event_trans_1['event'])
self.assertNotIn('user_course_roles', event_trans_1['event'])
# Student user
_, event_trans_2 = _create_and_transform_event(
course_id=self.course.id,
username=self.student.username,
)
self.assertEqual(event_trans_2['event'].get('user_forums_roles'), [FORUM_ROLE_STUDENT])
self.assertEqual(event_trans_2['event'].get('user_course_roles'), [])
# Course staff user
_, event_trans_3 = _create_and_transform_event(
course_id=self.course.id,
username=self.staff.username,
)
self.assertEqual(event_trans_3['event'].get('user_forums_roles'), [])
self.assertEqual(event_trans_3['event'].get('user_course_roles'), [CourseStaffRole.ROLE])
def test_teams(self):
# No category
_, event_trans_1 = _create_and_transform_event(
course_id=self.course.id,
)
self.assertNotIn('team_id', event_trans_1)
# Non-team category
_, event_trans_2 = _create_and_transform_event(
course_id=self.course.id,
topic_id=self.CATEGORY_ID,
)
self.assertNotIn('team_id', event_trans_2)
# Team category
_, event_trans_3 = _create_and_transform_event(
course_id=self.course.id,
topic_id=self.TEAM_CATEGORY_ID,
)
self.assertEqual(event_trans_3['event'].get('team_id'), self.team.team_id)
| agpl-3.0 | 6,711,102,984,654,132,000 | 38.870216 | 148 | 0.599713 | false |
agdsn/pycroft | pycroft/model/address.py | 1 | 3378 | # -*- coding: utf-8 -*-
# Copyright (c) 2015 The Pycroft Authors. See the AUTHORS file.
# This file is part of the Pycroft project and licensed under the terms of
# the Apache License, Version 2.0. See the LICENSE file for details.
from typing import List
from sqlalchemy import Column, String, UniqueConstraint
from pycroft.model import ddl
from pycroft.model.base import IntegerIdModel
DEFAULT_CITY = "Dresden"
DEFAULT_COUNTRY = "Germany"
class Address(IntegerIdModel):
"""A known address.
Addresses differ from most other entities such as users or rooms in the following ways:
- Their identity is provided by their value, i.e. if two addresses have equal values,
they should be identitcal
- Their existence is justified solely by the reference of another object.
At no point in time should there be any unreferenced address records in the db.
- They should be immutable: This implies that editing e.g. the street of a user's address
should not change the street of the corresponding room's address.
This implies that addresses are *stateless*, i.e. have no life cycle.
Establishing these consistencies requires triggers.
"""
street = Column(String(), nullable=False)
number = Column(String(), nullable=False)
addition = Column(String(), nullable=False, server_default="")
# Sometimes, zipcodes can contain things like dashes, so rather take String().
# we could probably impose some format by a check but that would be over engineering
zip_code = Column(String(), nullable=False)
city = Column(String(), nullable=False, server_default=DEFAULT_CITY)
state = Column(String(), nullable=False, server_default="")
country = Column(String(), nullable=False, server_default=DEFAULT_COUNTRY)
__table_args__ = (
UniqueConstraint('street', 'number', 'addition', 'zip_code', 'city', 'state', 'country'),
)
def __str__(self):
return f"{self:short}"
def __format__(self, spec="short"):
"""Return the address items separated by the format specifier"""
city = self.city.upper() if self.country and self.country != DEFAULT_COUNTRY else self.city
items: List[str] = [f"{self.street} {self.number} // {self.addition}" if self.addition
else f"{self.street} {self.number}", f"{self.zip_code} {city}"]
if self.state:
state = self.state.upper() if self.country and self.country != DEFAULT_COUNTRY else self.state
items.append(f"{state}")
if self.country and self.country != DEFAULT_COUNTRY:
items.append(f"{self.country.upper()}")
glue = ", " if spec == "short" else "\n" if spec == "long" else spec
return glue.join(items)
manager = ddl.DDLManager()
address_remove_orphans = ddl.Function(
'address_remove_orphans', [], 'trigger',
""" BEGIN
delete from address
where not exists (select 1 from room where room.address_id = address.id)
and not exists (select 1 from "user" where "user".address_id = address.id);
RETURN NULL;
END;""",
volatility='volatile', strict=True, language='plpgsql'
)
manager.add_function(Address.__table__, address_remove_orphans)
# User trigger for the respective backref added in `user.py`
# Room trigger for the respective backref added in `facilities.py`
manager.register()
| apache-2.0 | -3,732,029,067,401,948,700 | 41.759494 | 106 | 0.68206 | false |
Kruehlio/MUSEspec | utils/starlight.py | 1 | 15375 | # -*- coding: utf-8 -*-
""" Spectrum class for running starlight on spectra. Particularly for
MUSE cubes
"""
import matplotlib
matplotlib.use('Agg')
import os
import numpy as np
import scipy as sp
import shutil
import time
import platform
import matplotlib.pyplot as plt
import logging
from ..MUSEio.museio import asciiout, cubeout
logfmt = '%(levelname)s [%(asctime)s]: %(message)s'
datefmt= '%Y-%m-%d %H:%M:%S'
formatter = logging.Formatter(fmt=logfmt,datefmt=datefmt)
logger = logging.getLogger('__main__')
logging.root.setLevel(logging.DEBUG)
ch = logging.StreamHandler() #console handler
ch.setFormatter(formatter)
logger.handlers = []
logger.addHandler(ch)
SL_BASE_ALL = os.path.join(os.path.dirname(__file__), "../etc/Base.BC03.S")
SL_BASE_FEW = os.path.join(os.path.dirname(__file__), "../etc/Base.BC03.N")
SL_BASE_BB = os.path.join(os.path.dirname(__file__), "../etc/Base.BC03.15lh")
SL_CONFIG = os.path.join(os.path.dirname(__file__), "../etc/MUSE_SLv01.config")
SL_MASK = os.path.join(os.path.dirname(__file__), "../etc/Masks.EmLines.SDSS.gm")
SL_BASES = os.path.join(os.path.dirname(__file__), "../etc/bases")
if platform.platform().startswith('Linux'):
SL_EXE = os.path.join(os.path.dirname(__file__), "../etc/starlight")
else:
SL_EXE = os.path.join(os.path.dirname(__file__), "../etc/starlight_mac")
class StarLight:
""" StarLight class for fitting """
def __init__(self, filen, verbose=0, minwl=None, maxwl=None,
run=1, bases='FEW', inst='MUSE', red='CAL'):
self.specfile = filen
if minwl == None:
self.minwl=3330
else:
self.minwl=minwl
if maxwl == None:
self.maxwl=9400
else:
self.maxwl=maxwl
self.cwd = os.getcwd()
root, ext = os.path.splitext(filen)
self.output = os.path.join(root+'_sl_out'+ext)
self.sllog = root+'_sl_log'+ext
self.seed = np.random.randint(1E6, 9E6)
self.inst = inst
self.red = red
basewdir = os.path.join(self.cwd, 'bases')
if not os.path.isdir(basewdir):
os.makedirs(basewdir)
if bases == 'FEW':
shutil.copy(SL_BASE_FEW, self.cwd)
self.bases = SL_BASE_FEW
elif bases == 'ALL':
shutil.copy(SL_BASE_ALL, self.cwd)
self.bases = SL_BASE_ALL
elif bases == 'BB':
shutil.copy(SL_BASE_BB, self.cwd)
self.bases = SL_BASE_BB
shutil.copy(SL_CONFIG, self.cwd)
f = open(self.bases)
basescps = [g for g in f.readlines() if not g.startswith('#')]
f.close()
for basescp in basescps:
baseraw = os.path.join(SL_BASES, basescp.split()[0])
if os.path.isfile(baseraw):
shutil.copy(baseraw, basewdir)
if not os.path.isfile(SL_EXE):
print ('ERROR: STARLIGHT executable not found')
raise SystemExit
if run == 1:
self._makeGrid()
self._runGrid()
def _makeGrid(self, name='muse_grid.in'):
headkey = ['[Number of fits to run]',
'[base_dir]', '[obs_dir]', '[mask_dir]', '[out_dir]',
'[seed]', '[llow_SN]', '[lupp_SN]', '[Olsyn_ini]',
'[Olsyn_fin]', '[Odlsyn]', '[fscale_chi2]', '[FIT/FXK]',
'[IsErrSpecAvailable]', '[IsFlagSpecAvailable]']
speckey = ['spectrum', 'config', 'bases', 'masks', 'red', 'v0_start',
'vd_start', 'output']
header = {'[Number of fits to run]': '1',
'[base_dir]': self.cwd+'/bases/',
'[obs_dir]' :self.cwd+'/',
'[mask_dir]' : os.path.split(SL_MASK)[0]+'/',
'[out_dir]': self.cwd+'/',
'[seed]': self.seed,
'[llow_SN]': 5200,
'[lupp_SN]': 5400,
'[Olsyn_ini]': self.minwl,
'[Olsyn_fin]': self.maxwl,
'[Odlsyn]':1.0,
'[fscale_chi2]':1.0,
'[FIT/FXK]': 'FIT',
'[IsErrSpecAvailable]':'1',
'[IsFlagSpecAvailable]':'1'}
specline = {'spectrum': self.specfile,
'config': os.path.split(SL_CONFIG)[-1],
'bases': os.path.split(self.bases)[-1],
'masks': os.path.split(SL_MASK)[-1],
'red' : self.red,
'v0_start': 0,
'vd_start': 50,
'output': self.output}
f = open(name, 'w')
for head in headkey:
f.write('%s %s\n' %(header[head], head))
for spec in speckey:
f.write('%s ' %(specline[spec]))
f.write('\n')
self.grid = name
def _runGrid(self, cleanup=True):
t1 = time.time()
slarg = [SL_EXE, '<', self.grid, '>', self.sllog]
os.system(' '.join(slarg))
# Cleanup
if cleanup == True:
shutil.rmtree('bases')
os.remove(os.path.join(self.cwd, os.path.split(self.bases)[-1]))
os.remove(os.path.join(self.cwd, os.path.split(SL_CONFIG)[-1]))
return time.time()-t1
def modOut(self, plot=0, minwl=3860, maxwl=4470,
rm=True):
starwl, starfit = np.array([]), np.array([])
datawl, data, gas, stars = 4*[np.array([])]
success, run, norm, v0, vd, av = 0, 0, 1, -1, -1, -1
try:
f = open(self.output)
output = f.readlines()
f.close()
if rm == True:
os.remove(self.sllog)
slpath = os.path.join(self.cwd, 'sl_fits')
if not os.path.isdir(slpath):
os.makedirs(slpath)
slout = os.path.join(slpath, self.output)
if os.path.isfile(slout):
os.remove(slout)
shutil.move(self.output, os.path.join(self.cwd, 'sl_fits'))
run = 1
except IOError:
pass
if run == 1:
for out in output:
outsplit = out.split()
if outsplit[1:] == ['[fobs_norm', '(in', 'input', 'units)]']:
norm = float(outsplit[0])
success = 1
if outsplit[1:] == ['Run', 'aborted:(']:
break
if len(outsplit) == 4:
try:
outsplit = [float(a) for a in outsplit]
if float(outsplit[0]) >= self.minwl:
starfit = np.append(starfit, outsplit[2])
starwl = np.append(starwl, outsplit[0])
if outsplit[3] != -2:
data = np.append(data, outsplit[1])
gas = np.append(gas, outsplit[1]-outsplit[2] )
stars = np.append(stars, outsplit[2])
datawl = np.append(datawl, outsplit[0])
except ValueError:
pass
if len(outsplit) == 3:
if outsplit[1] == '[v0_min':
v0 = float(outsplit[0])
if outsplit[1] == '[vd_min':
vd = float(outsplit[0])
if outsplit[1] == '[AV_min':
av = float(outsplit[0])
if plot == 1:
sel0 = (datawl > minwl) * (datawl < maxwl)
sel1 = (datawl > 3860) * (datawl < 4630)
sel2 = (datawl > 4730) * (datawl < 5230)
sel3 = (datawl > 6420) * (datawl < 7020)
fig1 = plt.figure(figsize = (5,8.4))
fig1.subplots_adjust(bottom=0.10, top=0.99, left=0.15, right=0.98)
ax1 = fig1.add_subplot(3, 1, 1)
ax2 = fig1.add_subplot(3, 1, 2)
ax3 = fig1.add_subplot(3, 1, 3)
for ax in [ax1, ax2, ax3]:
ax.plot(datawl, 0*datawl, '--', color ='grey')
ax.plot(datawl, norm*gas, '-', color ='black')
ax.plot(datawl, norm*data, '-', color ='firebrick', lw=2)
ax.plot(starwl, norm*starfit, '-', color ='green')
ax.set_ylabel(r'$F_{\lambda}\,\rm{(10^{-17}\,erg\,s^{-1}\,cm^{-2}\, \AA^{-1})}$',
fontsize=16)
ax3.set_xlabel(r'Restframe wavelength $(\AA)$', fontsize=16)
ax1.set_xlim(3860, 4630)
ax3.set_xlim(6420, 6780)
ax2.set_xlim(4750, 5230)
ax1.set_ylim(norm*np.min(gas[sel1]), norm*np.max(data[sel1])*1.05)
ax2.set_ylim(norm*np.min(gas[sel2]), norm*np.max(data[sel2])*1.05)
ax3.set_ylim(norm*np.min(gas[sel3]), norm*np.max(data[sel3])*1.05)
fig1.savefig('%s_starlight.pdf' %(self.inst))
plt.close(fig1)
fig2 = plt.figure(figsize = (8,5))
fig2.subplots_adjust(bottom=0.14, top=0.99, left=0.12, right=0.98)
ax = fig2.add_subplot(1, 1, 1)
ax.plot(datawl, 0*datawl, '--', color ='grey')
ax.plot(datawl, norm*gas, '-', color ='black')
ax.plot(datawl, norm*data, '-', color ='firebrick', lw=2)
ax.plot(starwl, norm*starfit, '-', color ='green')
ax.set_ylabel(r'$F_{\lambda}\,\rm{(10^{-17}\,erg\,s^{-1}\,cm^{-2}\, \AA^{-1})}$',
fontsize=16)
ax.set_xlabel(r'Restframe wavelength $(\AA)$', fontsize=16)
ax.set_xlim(np.min(datawl[sel0]), np.max(datawl[sel0]))
ax.set_ylim(norm*np.min(gas[sel0]), norm*np.max(data[sel0])*1.05)
fig2.savefig('%s_starlight_all.pdf' %(self.inst))
plt.close(fig2)
return datawl, data, stars, norm, success, v0, vd, av
def runStar(s3d, ascii, starres = None, minwl=None, maxwl=None,
plot=0, verbose=1, rm=True, bases='ALL'):
""" Convinience function to run starlight on an ascii file returning its
spectral fit and bring it into original rest-frame wavelength scale again
Parameters
----------
ascii : str
Filename of spectrum in Format WL SPEC ERR FLAG
Returns
----------
data : np.array (array of zeros if starlight not sucessfull)
Original data (resampled twice, to check for accuracy)
star : np.array (array of zeros if starlight not sucessfull)
Starlight fit
success : int
Flag whether starlight was executed successully
"""
if verbose == 1:
logger.info('Starting starlight')
if starres == None:
starres = '%s_star_res.txt' %(s3d.inst)
if os.path.isfile(starres):
os.remove(starres)
t1 = time.time()
sl = StarLight(filen=ascii, bases=bases, minwl=minwl, maxwl=maxwl)
datawl, data, stars, norm, success, v0, vd, av =\
sl.modOut(plot=plot, rm=rm, minwl=minwl, maxwl=maxwl)
zerospec = np.zeros(s3d.wave.shape)
if success == 1:
if verbose == 1:
logger.info('Running starlight took %.2f s' %(time.time() - t1))
s = sp.interpolate.InterpolatedUnivariateSpline(datawl*(1+s3d.z),
data*1E3*norm/(1+s3d.z))
t = sp.interpolate.InterpolatedUnivariateSpline(datawl*(1+s3d.z),
stars*1E3*norm/(1+s3d.z))
return s(s3d.wave), t(s3d.wave), success, v0, vd, av
else:
if verbose ==1:
logger.info('Starlight failed in %.2f s' %(time.time() - t1))
return zerospec, zerospec, success, v0, vd, av
def subStars(s3d, x, y, size=0, verbose=1,
inst='MUSE', bases='ALL', starres=None):
""" Convinience function to subtract a starlight fit based on a single
spectrum from many spaxels
Parameters
----------
x : integer
x-Index of region center
y : integer
y-Index of region center
size : integer
Size of square around center (x,y +/- size)
"""
if starres == None:
starres = '%s_x%i_y%i_star_res.txt' %(s3d.inst, x, y)
if os.path.isfile(starres):
os.remove(starres)
wl, spec, err = s3d.extrSpec(x=x, y=y, size=size, verbose=0)
ascii = asciiout(s3d=s3d, wl=wl, spec=spec, err=err, frame='rest',
resample = 1, name='%s_%s_%s' %(x, y, size), fmt='txt')
data, stars, success, v0, vd, av = runStar(s3d, ascii, bases=bases, verbose=0)
f = open(starres, 'a')
f.write('%i\t%i\t%.1f\t%.1f\t%.3f\n' %(x, y, v0, vd, av))
f.close()
os.remove(ascii)
miny, maxy = max(0, y-size), min(s3d.leny-1, y+size+1)
minx, maxx = max(0, x-size), min(s3d.lenx-1, x+size+1)
xindizes = np.arange(minx, maxx, 1)
yindizes = np.arange(miny, maxy, 1)
zerospec = np.zeros(s3d.wave.shape)
if success == 1:
# rs = data/spec
# logger.info('Resampling accuracy %.3f +/- %.3f' \
# %(np.nanmedian(rs), np.nanstd(rs[1:-1])))
for xindx in xindizes:
for yindx in yindizes:
wl, spec, err = s3d.extrSpec(x=xindx, y=yindx, verbose=verbose)
# Renormalize to actual spectrum
substars = np.nanmedian(spec/data)*stars
# Overwrite starcube with fitted values
s3d.starcube[:, yindx, xindx] = substars
else:
for xindx in xindizes:
for yindx in yindizes:
# No sucess
s3d.starcube[:, yindx, xindx] = zerospec
return
def subAllStars(s3d, dx=2, nc=None, x1=None, x2=None, y1=None, y2=None,
bases = 'FEW'):
"""
Convinience function to subtract starlight fits on the full cube. Can work
with subcubes defined by x1, x2, y1, y2. Resamples by a factor of 2*dx+1.
"""
logger.info("Starting starlight on full cube with %i cores" %s3d.ncores)
logger.info("This might take a bit")
t1 = time.time()
if x1 != None and x2!= None:
logger.info("X-range: %i to %i" %(x1, x2))
xindizes = np.arange(x1, x2, 2*dx+1)
else:
xindizes = np.arange(dx, s3d.lenx, 2*dx+1)
if y1 != None and y2!= None:
logger.info("Y-range: %i to %i" %(y1, y2))
yindizes = np.arange(y1, y2, 2*dx+1)
else:
yindizes = np.arange(dx, s3d.leny, 2*dx+1)
starres = '%s_x%i_%i_y%i_%i_star_res.txt' \
%(s3d.inst, xindizes[0], xindizes[-1], yindizes[0], xindizes[-1])
if os.path.isfile(starres):
os.remove(starres)
for xindx in xindizes:
for yindx in yindizes:
subStars(s3d, xindx, yindx, dx,
bases=bases, verbose=0, starres=starres)
cubeout(s3d, s3d.starcube, err=s3d.erro, name='star')
cubeout(s3d, s3d.data-s3d.starcube, err=s3d.erro, name='gas')
logger.info("This took %.2f h" %((time.time()-t1)/3600.))
| mit | 2,258,833,824,051,038,000 | 36.317961 | 101 | 0.499382 | false |
Alecto3-D/testable-greeter | nameko/test/web/test_server.py | 1 | 5499 | import socket
import pytest
from eventlet import wsgi
from mock import patch
from werkzeug.contrib.fixers import ProxyFix
from nameko.exceptions import ConfigurationError
from nameko.web.handlers import HttpRequestHandler, http
from nameko.web.server import (
BaseHTTPServer, HttpOnlyProtocol, WebServer, parse_address)
class ExampleService(object):
name = "exampleservice"
@http('GET', '/')
def do_index(self, request):
return ''
@http('GET', '/large')
def do_large(self, request):
# more than a buffer's worth
return 'x' * (10**6)
def test_broken_pipe(
container_factory, web_config, web_config_port, web_session
):
container = container_factory(ExampleService, web_config)
container.start()
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.connect(('127.0.0.1', web_config_port))
s.sendall(b'GET /large \r\n\r\n')
s.recv(10)
s.close() # break connection while there is still more data coming
# server should still work
assert web_session.get('/').text == ''
def test_other_socket_error(
container_factory, web_config, web_config_port, web_session
):
container = container_factory(ExampleService, web_config)
container.start()
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.connect(('127.0.0.1', web_config_port))
with patch.object(BaseHTTPServer.BaseHTTPRequestHandler, 'finish') as fin:
fin.side_effect = socket.error('boom')
s.sendall(b'GET / \r\n\r\n')
s.recv(10)
s.close()
# takes down container
with pytest.raises(socket.error) as exc:
container.wait()
assert 'boom' in str(exc)
def test_client_disconnect_os_error(
container_factory, web_config, web_config_port, web_session
):
""" Regression for https://github.com/nameko/nameko/issues/368
"""
container = container_factory(ExampleService, web_config)
container.start()
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.connect(('127.0.0.1', web_config_port))
with patch.object(HttpOnlyProtocol, 'handle_one_request') as handle:
handle.side_effect = OSError('raw readinto() returned invalid length')
s.sendall(b'GET / \r\n\r\n')
s.recv(10)
s.close()
# server should still work
assert web_session.get('/').text == ''
def test_other_os_error(
container_factory, web_config, web_config_port, web_session
):
""" Regression for https://github.com/nameko/nameko/issues/368
"""
container = container_factory(ExampleService, web_config)
container.start()
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.connect(('127.0.0.1', web_config_port))
with patch.object(BaseHTTPServer.BaseHTTPRequestHandler, 'finish') as fin:
fin.side_effect = OSError('boom')
s.sendall(b'GET / \r\n\r\n')
s.recv(10)
s.close()
# takes down container
with pytest.raises(OSError) as exc:
container.wait()
assert 'boom' in str(exc)
@pytest.mark.parametrize(['source', 'result'], [
('8000', ('', 8000)),
('foo:8000', ('foo', 8000)),
('foo', None),
])
def test_parse_address(source, result):
if result is None:
with pytest.raises(ConfigurationError) as exc:
parse_address(source)
assert 'Misconfigured bind address' in str(exc)
assert '`foo`' in str(exc)
else:
assert parse_address(source) == result
def test_adding_middleware_with_get_wsgi_app(container_factory, web_config):
class CustomWebServer(WebServer):
def get_wsgi_app(self):
# get the original WSGI app that processes http requests
app = super(CustomWebServer, self).get_wsgi_app()
# apply the ProxyFix middleware as an example
return ProxyFix(app, num_proxies=1)
class CustomHttpRequestHandler(HttpRequestHandler):
server = CustomWebServer()
http = CustomHttpRequestHandler.decorator
class CustomServerExampleService(object):
name = 'customserverservice'
@http('GET', '/')
def do_index(self, request):
return '' # pragma: no cover
container = container_factory(CustomServerExampleService, web_config)
with patch.object(CustomWebServer, 'get_wsgi_server') as get_wsgi_server:
container.start()
wsgi_app = get_wsgi_server.call_args[0][1]
assert isinstance(wsgi_app, ProxyFix)
def test_custom_wsgi_server_is_used(
container_factory, web_config, web_config_port, web_session
):
def custom_wsgi_app(environ, start_response):
start_response('200 OK', [])
return 'Override'
class CustomWebServer(WebServer):
def get_wsgi_server(
self, sock, wsgi_app, protocol=HttpOnlyProtocol, debug=False
):
return wsgi.Server(
sock,
sock.getsockname(),
custom_wsgi_app,
protocol=protocol,
debug=debug
)
class CustomHttpRequestHandler(HttpRequestHandler):
server = CustomWebServer()
http = CustomHttpRequestHandler.decorator
class CustomServerExampleService(object):
name = 'customserverservice'
@http('GET', '/')
def do_index(self, request):
return '' # pragma: no cover
container = container_factory(CustomServerExampleService, web_config)
container.start()
assert web_session.get('/').text == 'Override'
| mit | 2,576,094,329,098,093,000 | 28.406417 | 78 | 0.646299 | false |
asm-products/sim | sim/settings.py | 1 | 3046 | """
Django settings for sim project.
For more information on this file, see
https://docs.djangoproject.com/en/1.6/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.6/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.6/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '#nf4&)bxyk9ybkd&$f=!#a&g9-+hexue%6^=!s9!m=8&u-!i1%'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
TEMPLATE_DEBUG = DEBUG
TEMPLATE_DIRS = [os.path.join(BASE_DIR, 'templates')]
ALLOWED_HOSTS = []
ADMINS = (
('Nicolas Joseph', '[email protected]')
)
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'djangular',
'rest_framework',
'public',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'sim.urls'
WSGI_APPLICATION = 'sim.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.6/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'simdb',
'USER': 'sim',
'PASSWORD': 'devsim',
'HOST': '192.168.33.10',
'PORT': '5432'
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.6/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'Europe/Paris'
USE_I18N = True
USE_L10N = True
USE_TZ = True
APPEND_SLASH = True # Adds a file at the end of a URL
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.6/howto/static-files/
STATICFILES_DIRS = (
("css", os.path.join(BASE_DIR, "stylesheets")),
("js", os.path.join(BASE_DIR, "js")),
("img", os.path.join(BASE_DIR, "img")),
("bw", os.path.join(BASE_DIR, "bower_components")),
)
STATIC_URL = '/static/'
STATIC_ROOT = os.path.join(BASE_DIR, "generatedFiles")
REST_FRAMEWORK = {
# Use hyperlinked styles by default.
# Only used if the `serializer_class` attribute is not set on a view.
'DEFAULT_MODEL_SERIALIZER_CLASS':
'rest_framework.serializers.HyperlinkedModelSerializer',
# Use Django's standard `django.contrib.auth` permissions,
# or allow read-only access for unauthenticated users.
'DEFAULT_PERMISSION_CLASSES': [
'rest_framework.permissions.DjangoModelPermissionsOrAnonReadOnly'
]
}
| bsd-2-clause | 1,686,597,243,547,752,400 | 25.034188 | 73 | 0.69107 | false |
dan-stone/canal | canal/tests/test_from_json.py | 1 | 3665 | import numpy as np
import canal as canal
from .util import NumpyTestCase
class FromJSONTestCase(NumpyTestCase):
class Measurement(canal.Measurement):
int_field = canal.IntegerField()
alternate_db_name = canal.IntegerField(db_name="something_else")
float_field = canal.FloatField()
bool_field = canal.BooleanField()
string_field = canal.StringField()
tag_1 = canal.Tag()
tag_2 = canal.Tag()
def test_from_json_iso_time(self):
test_data = 5*[
[
"2015-01-29T21:55:43.702900257Z",
1,
2,
1.2,
True,
"some content",
"1",
"2"
],
[
"2015-01-29T21:55:43.702900345Z",
2,
3,
2.3,
False,
"some other content",
"1",
"2"
]
]
json_data = dict(
results=[dict(
series=[dict(
name="Measurement",
columns=[
"time",
"int_field",
"something_else",
"float_field",
"bool_field",
"string_field",
"tag_1",
"tag_2"
],
values=test_data
)]
)]
)
test_series = self.Measurement.from_json(json_data)
self.assertndArrayEqual(
test_series.time,
np.array(
5*[
"2015-01-29T21:55:43.702900257Z",
"2015-01-29T21:55:43.702900345Z"
],
dtype='datetime64'
)
)
self.assertndArrayEqual(
test_series.int_field,
np.array(5*[1, 2])
)
self.assertndArrayEqual(
test_series.alternate_db_name,
np.array(5*[2, 3])
)
self.assertndArrayEqual(
test_series.float_field,
np.array(5*[1.2, 2.3])
)
self.assertndArrayEqual(
test_series.bool_field,
np.array(5*[True, False])
)
self.assertndArrayEqual(
test_series.string_field,
np.array(5*["some content", "some other content"])
)
self.assertndArrayEqual(
test_series.tag_1,
np.array(10*["1"])
)
self.assertndArrayEqual(
test_series.tag_2,
np.array(10*["2"])
)
def test_from_json_bad_input(self):
with self.assertRaises(ValueError):
list(self.Measurement.from_json({"bad": "input"}))
def test_empty_json(self):
content = dict()
with self.assertRaises(ValueError):
self.Measurement.from_json(content)
def test_from_json_wrong_measurement(self):
test_json = dict(
results=[dict(
series=[dict(
name="SomeOtherMeasurement",
columns=[
"time",
"int_field",
"float_field",
"bool_field",
"string_field",
"tag_1",
"tag_2"
],
values=[]
)]
)]
)
with self.assertRaises(ValueError):
self.Measurement.from_json(test_json)
| mit | 4,122,715,446,854,051,300 | 27.192308 | 72 | 0.411187 | false |
vaal-/il2_stats | src/stats/online.py | 1 | 2169 | from copy import deepcopy
import logging
from mission_report import parse_mission_log_line
from mission_report.constants import COUNTRIES_COALITION_DEFAULT, COALITION_ALIAS
from stats.models import PlayerOnline, Profile
logger = logging.getLogger('online')
_countries = deepcopy(COUNTRIES_COALITION_DEFAULT)
def update_online(m_report_files, online_timestamp):
for file_path in m_report_files:
if file_path.stat().st_mtime > online_timestamp:
online_timestamp = file_path.stat().st_mtime
with file_path.open() as f:
for line in f:
# игнорируем "плохие" строки без
if 'AType' not in line:
logger.warning('ignored bad string: [{}]'.format(line))
continue
try:
data = parse_mission_log_line.parse(line)
except parse_mission_log_line.UnexpectedATypeWarning:
logger.warning('unexpected atype: [{}]'.format(line))
continue
atype_id = data.pop('atype_id')
if atype_id == 10:
try:
profile = Profile.objects.get(uuid=data['account_id'])
except Profile.DoesNotExist:
profile = None
PlayerOnline.objects.update_or_create(uuid=data['account_id'], defaults={
'nickname': data['name'],
'coalition': _countries[data['country_id']],
'profile': profile,
})
elif atype_id == 21:
PlayerOnline.objects.filter(uuid=data['account_id']).delete()
elif atype_id == 0:
for country, coalition in data['countries'].items():
_countries[country] = COALITION_ALIAS[coalition]
return online_timestamp
def cleanup_online():
PlayerOnline.objects.all().delete()
| mit | -3,861,800,965,770,720,000 | 38.45283 | 97 | 0.506996 | false |
Samsung/ADBI | idk/cachereader/debuginfo.py | 1 | 7463 | import sqlite3
import os.path
from .cfa import CallFrameAddress
from .files import Files
from .framepointers import Framepointers
from .function import Functions
from .insnset import InsnSet
from .lines import Lines
from .location import Locations
from .types import Types
from .variables import Variables
from .symbols import Symbols
from cachebuilder import DebugInfo as DebugInfoWriter
class DebugInfo:
def __init__(self, path, conn):
self.conn = conn
self.path = path
self.cfa = CallFrameAddress(self)
self.files = Files(self)
self.framepointers = Framepointers(self)
self.functions = Functions(self)
self.insnset = InsnSet(self)
self.lines = Lines(self)
self.locations = Locations(self)
self.types = Types(self)
self.variables = Variables(self)
self.symbols = Symbols(self)
@classmethod
def loadcached(cls, path, dbpath=None):
'''Load a new cache for the given file.'''
dbpath = dbpath or path + '.ac'
def get_file_time(path):
'''Get the modification time of the given file.'''
try:
return os.path.getmtime(path)
except OSError:
return 0
if not os.path.isfile(path):
raise IOError('Binary file does not exist: %s.' % path)
if not os.path.isfile(dbpath):
raise ValueError('No cache file exists for %s.' % path)
if get_file_time(dbpath) < get_file_time(path):
raise ValueError('Cache older than binary.')
return cls(path, sqlite3.connect(dbpath))
@classmethod
def load(cls, path, dbpath=None, store=True):
'''Load or create a debug cache for the given file.'''
try:
return cls.loadcached(path, dbpath)
except ValueError:
with open(path, 'rb') as elf:
writer = DebugInfoWriter(elf)
if store:
writer.store(dbpath)
return cls(path, writer.cache)
def get_CFA_expression(self, addr):
return self.cfa[addr]
def close(self):
self.conn.close()
def query_db(self, query, *args):
'''Query the database and yield rows as tuples or single objects.'''
for e in self.conn.execute(query, tuple(args)):
if len(e) == 1:
yield e[0]
else:
yield e
def query_db_one(self, query, *args):
'''Query the database and return one matching row as tuple or single object.'''
for e in self.conn.execute(query, tuple(args)):
if len(e) == 1:
return e[0]
else:
return e
break
return None
def iter_traceable_lines(self, filename):
'''Yield line-address pairs of traceable lines in the given file.'''
query = '''select locations.line, lines.addr
from locations join lines
on locations.id == lines.loc
where file=(select id from files where path=?)'''
return self.query_db(query, filename)
def func2addr(self, filename, fn):
'''Get function entry address.'''
if filename:
filename = self.files.expand(filename)
query = '''select lo from functions join locations
on locations.id == functions.loc
where functions.name = ?
and locations.file = (select id from files where path = ?)'''
ret = self.query_db(query, fn, filename)
else:
ret = self.query_db('select lo from functions where name = ?', fn)
ret = set(ret)
if len(ret) == 1:
return ret.pop()
elif ret:
raise ValueError('ambiguous function name %s. Found at: %s' % (fn, ', '.join([hex(addr) for addr in ret])))
else:
raise ValueError('no such function: %s.' % fn)
return ret.pop()
def sym2addr(self, name, symbol_type=None):
'''Get symbol entry address'''
if symbol_type:
ret = self.query_db('select value from symbols where name = ? and type = ?', name, symbol_type)
else:
ret = self.query_db('select value from symbols where name = ?', name)
ret = set(ret)
if len(ret) == 1:
return ret.pop()
elif ret:
raise ValueError('multiple symbols with name %s. addresses: %s' % (name, ', '.join([hex(value) for value in ret])))
else:
raise ValueError('no such symbol: %s.' % name)
return ret.pop()
def line2addr(self, path, line):
path = self.files.expand(path)
query = '''select lines.addr from lines join locations
on lines.loc == locations.id
where locations.line = ?
and locations.file = (select id from files where path = ?)'''
ret = self.query_db(query, line, path)
ret = list(ret)
if len(ret) == 1:
return ret.pop()
elif ret:
raise ValueError('location ambiguous: %s:%i.' % (self.files.simplify(path), line))
else:
raise ValueError('location invalid or not traceable: %s:%i.' % (self.files.simplify(path), line))
def get_addr(self, spec, use_symbols=False):
spec = spec.strip()
if spec.startswith('*'):
return int(spec[1:], 0)
colon_idx = spec.rfind(':')
if colon_idx == -1:
offset = 0
offset_idx = spec.rfind('+')
if offset_idx > -1:
offset = int(spec[offset_idx + 1:], 16)
spec = spec[:offset_idx]
# function
func = spec.strip()
if use_symbols:
return self.sym2addr(func, 'STT_FUNC') + offset
else:
return self.func2addr(None, func) + offset
else:
filename = spec[:colon_idx]
linefunc = spec[colon_idx + 1:]
try:
line = int(linefunc)
except ValueError:
func = linefunc.strip()
return self.func2addr(filename, func)
return self.line2addr(filename, line)
def get_datatype(self, spec):
pass
def iter_vars(self, address):
return (x[0] for x in self.conn.execute('select name from addr2vars where lo <= ? < hi', (address,)))
def iter_locals(self, address):
idx = self.addr2func_id(address)
if not idx:
return
return (x[0] for x in self.conn.execute('select distinct name from vars2func join vars on vars2func.var = vars.id where func = ?', (idx,)))
def addr2sym_id(self, address):
return self.conn.execute('select id from symbols where value <= ? and ? < value + size', (address, address,))
def addr2func(self, address):
ret = self.conn.execute('select func from addr2func where lo <= ? < hi', (address,))
if ret:
return ret[0]
def addr2func_id(self, address):
ret = self.conn.execute('select id from addr2func where lo <= ? < hi', (address,)).fetchone()
if ret:
return ret[0]
def get_func_range(self, address):
ret = self.conn.execute('select lo, hi from addr2func where lo <= ? < hi', (address,)).fetchone()
return ret
| apache-2.0 | -4,911,396,537,872,181,000 | 32.022124 | 147 | 0.554201 | false |
knowledgecommonsdc/kcdc3 | kcdc3/apps/pinata/migrations/0008_auto__add_field_page_template__chg_field_page_status.py | 1 | 5497 | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Page.template'
db.add_column('pinata_page', 'template',
self.gf('django.db.models.fields.CharField')(default='basic.html', max_length=48),
keep_default=False)
# Changing field 'Page.status'
db.alter_column('pinata_page', 'status', self.gf('django.db.models.fields.CharField')(max_length=9))
def backwards(self, orm):
# Deleting field 'Page.template'
db.delete_column('pinata_page', 'template')
# Changing field 'Page.status'
db.alter_column('pinata_page', 'status', self.gf('django.db.models.fields.CharField')(max_length=48))
models = {
'pinata.notice': {
'Meta': {'ordering': "['sort_order', 'title']", 'object_name': 'Notice'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'live': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'main_text': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'sort_order': ('django.db.models.fields.IntegerField', [], {'default': '50', 'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
'pinata.page': {
'Meta': {'ordering': "['path']", 'object_name': 'Page'},
'featured': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'main_text': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['pinata.Page']", 'null': 'True', 'blank': 'True'}),
'path': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '200'}),
'short_title': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'sidebar_text': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'sort_order': ('django.db.models.fields.IntegerField', [], {'default': '50', 'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.CharField', [], {'default': "'PUBLISHED'", 'max_length': '9'}),
'teaser': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'template': ('django.db.models.fields.CharField', [], {'default': "'basic.html'", 'max_length': '48'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
'pinata.pressclipping': {
'Meta': {'ordering': "['date']", 'object_name': 'PressClipping'},
'date': ('django.db.models.fields.DateField', [], {'blank': 'True'}),
'destination_url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
'excerpt': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.CharField', [], {'max_length': '200', 'blank': 'True'}),
'main_text': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'publication': ('django.db.models.fields.CharField', [], {'max_length': '200', 'blank': 'True'}),
'status': ('django.db.models.fields.CharField', [], {'default': "'PUBLISHED'", 'max_length': '9'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
'pinata.slide': {
'Meta': {'ordering': "['sort_order', 'title']", 'object_name': 'Slide'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'live': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'main_text': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'sort_order': ('django.db.models.fields.IntegerField', [], {'default': '50', 'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
'pinata.sponsor': {
'Meta': {'ordering': "['group', 'sort_order', 'title']", 'object_name': 'Sponsor'},
'destination_url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
'group': ('django.db.models.fields.CharField', [], {'default': "'B'", 'max_length': '3'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'main_text': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'sort_order': ('django.db.models.fields.IntegerField', [], {'default': '50', 'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.CharField', [], {'default': "'PUBLISHED'", 'max_length': '9'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '200'})
}
}
complete_apps = ['pinata'] | mit | 2,237,524,497,699,989,200 | 62.930233 | 136 | 0.541022 | false |
ni/nifpga-python | nifpga/status.py | 1 | 14615 | """
An set of status exception classes to be used when an NiFpga
function returns either a warning or error status.
Use check_status() to raise an appropriate exception if necessary.
Error and Warning exception class names are auto-generated from the
strings in 'codeToString' in this file.
For example, handle a fatal error like this:
>>> @check_status('frob', ['foo', 'bar', 'baz'])
... def frob(foo, bar, baz):
... return -61141
...
>>> try:
... frob(0, 1, 2)
... except FpgaBusyError as e:
... print(e) # doctest: +NORMALIZE_WHITESPACE
Error: FpgaBusy (-61141) when calling 'frob' with arguments:
foo: 0x0
bar: 0x1
baz: 0x2
Or handle a warning like this:
>>> @check_status('frob', ['foo', 'bar', 'baz'])
... def frob(foo, bar, baz):
... return 61003
...
>>> with warnings.catch_warnings(record=True) as w:
... frob(0, 1, 2)
... print(w[0].message) # doctest: +NORMALIZE_WHITESPACE
Warning: FpgaAlreadyRunning (61003) when calling 'frob' with arguments:
foo: 0x0
bar: 0x1
baz: 0x2
Copyright (c) 2017 National Instruments
"""
import functools
import warnings
def _raise_or_warn_if_nonzero_status(status, function_name, argument_names, *args):
"""
Helper for the 'check_status' decorator.
Raises the proper ErrorStatus subclass or warns the proper WarnStatus
subclass if status is not 0 (success).
function_name: the name of the function, e.g. "NiFpga_ConfigureFifo"
Used to make the exception message more useful.
argument_names: list of names of the arguments to the function
e.g. ["session", "fifo"]
args: the arguments that were passed to the function
'argument_names' and 'args' are used to make the exception message
more useful, and to find the arguments after catching an exception if
the function fails (e.g. 'e.get_args()["session"]').
"""
if status == 0:
return
if status in codes_to_exception_classes:
if status < 0:
raise codes_to_exception_classes[status](function_name, argument_names, *args)
else:
warning = codes_to_exception_classes[status](function_name, argument_names, *args)
warnings.warn(warning)
else:
if status < 0:
raise UnknownError(status, function_name, argument_names, *args)
else:
warnings.warn(UnknownWarning(status, function_name, argument_names, *args))
def check_status(function_name, argument_names):
"""
Decorator (that takes arguments) to call a function and raise
an appropriate subclass of Status if the
returned status is not zero.
Also validates that the number of parameters passed to the
function is correct.
function_name: the name of the function, e.g. "NiFpga_ConfigureFifo"
Used to make the exception message more useful.
argument_names: list of names of the arguments to the function
e.g. ["session", "fifo"]
Used to make the exception message more useful, and to find the
arguments after catching an exception if the function fails
(e.g. 'e.get_args()["session"]').
"""
def decorator(function):
@functools.wraps(function)
def internal(*args):
if hasattr(function, "argtypes") and len(args) != len(function.argtypes):
raise TypeError("%s takes exactly %u arguments (%u given)"
% (function_name, len(function.argtypes), len(args)))
status = function(*args)
_raise_or_warn_if_nonzero_status(status, function_name, argument_names, args)
return internal
return decorator
class Status(BaseException):
def __init__(self, code, code_string, function_name, argument_names,
function_args):
""" Base exception class for when an NiFpga function returns a non-zero
status.
Args:
code (int): e.g. -52000
code_string (str) : e.g. 'MemoryFull'
function_name (string): the function that returned the error or
warning status. e.g. 'NiFpga_ConfigureFifo'
argument_names (list): a list of the names of the arguments to the
function. e.g. ["session", "fifo", "requested depth"]
function_args (tuple) : a tuple of the arguments passed to the
function. The order of argument_names should correspond to the
order of function_args. e.g. '(session, fifo, depth)'
"""
self._code = code
self._code_string = code_string
self._function_name = function_name
self._named_args = []
for i, arg in enumerate(function_args):
self._named_args.append(
{
"name": argument_names[i],
"value": arg
})
# this is also necessary to properly reconstruct the object when
# passing it between processes
super(Status, self).__init__(self._code,
self._code_string,
self._function_name,
self._named_args)
def get_code(self):
return self._code
def get_code_string(self):
return self._code_string
def get_function_name(self):
""" Returns a string for the functions name, """
return self._function_name
def get_args(self):
"""
Returns a dictionary of argument names to argument values of
the function that caused the exception to be raised.
Returns:
arg_dict (dictionary): Converts ctypes args to their actual values
instead of the ctypes instance. e.g.
.. code-block:: python
{
"session":0x10000L,
"fifo" : 0x0,
...}
"""
arg_dict = {}
for arg in self._named_args:
# ctypes types all have a member named 'value'.
value = arg["value"].value if hasattr(arg["value"], "value") else arg["value"]
arg_dict[arg["name"]] = value
return arg_dict
def _stringify_arg(self, arg):
"""
Converts a function argument to a readable string for debugging.
Stringify ctypes values, instead of the ctypes instance itself.
Adds single quotes around strings (so it's obvious they are strings).
Stringify numbers as hex to make it easier to decode
bit packed sessions, attributes, etc.
"""
# ctypes types all have a member named 'value'.
if hasattr(arg, "value"):
return self._stringify_arg(arg.value)
if isinstance(arg, str):
return "'%s'" % arg
try:
return hex(arg)
except TypeError:
return str(arg)
def __str__(self):
"""
Returns the function name, status code, and arguments used.
Example:
.. code-block:: python
Error: FifoTimeout (-50400) when calling 'Dummy Function Name' with
arguments:
session: 0xbeef
fifo: 0xf1f0L
data: 0xda7aL
number of elements: 0x100L
timeout ms: 0x200L
elements remaining: 0x300L
a bogus string argument: 'I am a string'
"""
arg_string = ""
for arg in self._named_args:
arg_string += "\n\t%s: %s" % (arg["name"], self._stringify_arg(arg["value"]))
return "%s: %s (%d) when calling '%s' with arguments:%s" \
% ("Error" if self._code < 0 else "Warning",
self._code_string,
self._code,
self._function_name,
arg_string)
class WarningStatus(Status, RuntimeWarning):
"""
Base warning class for when an NiFpga function returns a warning (> 0)
status.
Useful if trying to catch warning and error status exceptions separately
"""
def __init__(self, code, code_string, function_name, argument_names,
function_args):
super(WarningStatus, self).__init__(code, code_string, function_name,
argument_names, function_args)
class ErrorStatus(Status, RuntimeError):
"""
Base Error class for when an NiFpga function returns an error (< 0)
status.
Useful if trying to catch warning and error status exceptions separately
"""
def __init__(self, code, code_string, function_name, argument_names,
function_args):
super(ErrorStatus, self).__init__(code, code_string, function_name,
argument_names, function_args)
class UnknownWarning(WarningStatus):
def __init__(self, code, function_name, argument_names, function_args):
super(UnknownWarning, self).__init__(code=code,
code_string="Unknown code",
function_name=function_name,
argument_names=argument_names,
function_args=function_args)
class UnknownError(ErrorStatus):
def __init__(self, code, function_name, argument_names, function_args):
super(UnknownError, self).__init__(code=code,
code_string="Unknown code",
function_name=function_name,
argument_names=argument_names,
function_args=function_args)
# Define error codes and their names.
# Each code in this list will be codegened into two classes, e.g.:
# FifoTimeoutError (for code -50400)
# FifoTimeoutWarning (for code 50400)
error_codes = [
(-50400, "FifoTimeout"),
(-50405, "TransferAborted"),
(-52000, "MemoryFull"),
(-52003, "SoftwareFault"),
(-52005, "InvalidParameter"),
(-52006, "ResourceNotFound"),
(-52007, "OperationTimedOut"),
(-52008, "OSFault"),
(-52010, "ResourceNotInitialized"),
(-52012, "EndOfData"),
(-52013, "ObjectNameCollision"),
(-61003, "FpgaAlreadyRunning"),
(-61018, "DownloadError"),
(-61024, "DeviceTypeMismatch"),
(-61046, "CommunicationTimeout"),
(-61060, "IrqTimeout"),
(-61070, "CorruptBitfile"),
(-61072, "BadDepth"),
(-61073, "BadReadWriteCount"),
(-61083, "ClockLostLock"),
(-61141, "FpgaBusy"),
(-61200, "FpgaBusyFpgaInterfaceCApi"),
(-61201, "FpgaBusyScanInterface"),
(-61202, "FpgaBusyFpgaInterface"),
(-61203, "FpgaBusyInteractive"),
(-61204, "FpgaBusyEmulation"),
(-61211, "ResetCalledWithImplicitEnableRemoval"),
(-61212, "AbortCalledWithImplicitEnableRemoval"),
(-61213, "CloseAndResetCalledWithImplicitEnableRemoval"),
(-61214, "ImplicitEnableRemovalButNotYetRun"),
(-61215, "RunAfterStoppedCalledWithImplicitEnableRemoval"),
(-61216, "GatedClockHandshakingViolation"),
(-61217, "RegionsOutstandingForSession"),
(-61219, "ElementsNotPermissibleToBeAcquired"),
(-61252, "FpgaBusyConfiguration"),
(-61253, "CloseCalledWithResetNotSupported"),
(-61254, "RunAfterStoppedNotSupported"),
(-61499, "InternalError"),
(-63003, "TotalDmaFifoDepthExceeded"),
(-63033, "AccessDenied"),
(-63038, "HostVersionMismatch"),
(-63040, "RpcConnectionError"),
(-63041, "RpcServerError"),
(-63042, "NetworkFault"),
(-63043, "RpcSessionError"),
(-63044, "RpcServerMissing"),
(-63045, "FeatureNotSupportedOverRpc"),
(-63046, "UsingRemoteSessionForLocalTarget"),
(-63050, "TriggerReserved"),
(-63051, "TriggerNotReserved"),
(-63080, "BufferInvalidSize"),
(-63081, "BufferNotAllocated"),
(-63082, "FifoReserved"),
(-63083, "FifoElementsCurrentlyAcquired"),
(-63084, "MisalignedAccess"),
(-63085, "ControlOrIndicatorTooLarge"),
(-63086, "OperationNotSupportedWhileStarted"),
(-63087, "TypesDoNotMatch"),
(-63088, "OutOfFifoRegions"),
(-63101, "BitfileReadError"),
(-63106, "SignatureMismatch"),
(-63107, "IncompatibleBitfile"),
(-63150, "HardwareFault"),
(-63170, "PowerShutdown"),
(-63171, "ThermalShutdown"),
(-63180, "InvalidAliasName"),
(-63181, "AliasNotFound"),
(-63182, "InvalidDeviceAccess"),
(-63183, "InvalidPort"),
(-63184, "ChildDeviceNotInserted"),
(-63192, "InvalidResourceName"),
(-63193, "FeatureNotSupported"),
(-63194, "VersionMismatch"),
(-63195, "InvalidSession"),
(-63196, "InvalidAttribute"),
(-63198, "OutOfHandles"),
]
# create an exception class for each error code and add to dictionary
# ie FifoTimeoutWarning, FifoTimeoutError
codes_to_exception_classes = {}
_g = globals()
for code, code_string in error_codes:
# we need introduce a scope, otherwise code, and code_string
# will all reference the same value.
def add_classes(code, code_string):
classname = code_string + 'Error'
def __init__(self, function_name, argument_names, function_args):
ErrorStatus.__init__(self,
code=code,
code_string=code_string,
function_name=function_name,
argument_names=argument_names,
function_args=function_args)
error_class = type(classname, (ErrorStatus,),
{'__init__': __init__, 'CODE': code})
codes_to_exception_classes[code] = error_class
# copy the exception type into module globals
_g[error_class.__name__] = error_class
classname = code_string + 'Warning'
def __init__(self, function_name, argument_names, function_args):
WarningStatus.__init__(self,
code=-code,
code_string=code_string,
function_name=function_name,
argument_names=argument_names,
function_args=function_args)
warning_class = type(classname, (WarningStatus,),
{'__init__': __init__, 'CODE': -code})
codes_to_exception_classes[-code] = warning_class
# copy the warning type into module globals
_g[warning_class.__name__] = warning_class
add_classes(code, code_string)
| mit | 8,977,066,302,177,029,000 | 36.474359 | 94 | 0.584536 | false |
chienlieu2017/it_management | odoo/addons/l10n_multilang/models/account.py | 1 | 1875 | # -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import fields, models
#in this file, we mostly add the tag translate=True on existing fields that we now want to be translated
class AccountAccountTag(models.Model):
_inherit = 'account.account.tag'
name = fields.Char(translate=True)
class AccountAccountTemplate(models.Model):
_inherit = 'account.account.template'
name = fields.Char(translate=True)
class AccountAccount(models.Model):
_inherit = 'account.account'
name = fields.Char(translate=True)
class AccountTax(models.Model):
_inherit = 'account.tax'
name = fields.Char(translate=True)
class AccountTaxTemplate(models.Model):
_inherit = 'account.tax.template'
name = fields.Char(translate=True)
class AccountChartTemplate(models.Model):
_inherit = 'account.chart.template'
_order = 'name'
name = fields.Char(translate=True)
spoken_languages = fields.Char(string='Spoken Languages', help="State here the languages for which the translations of templates could be loaded at the time of installation of this localization module and copied in the final object when generating them from templates. You must provide the language codes separated by ';'")
class AccountFiscalPosition(models.Model):
_inherit = 'account.fiscal.position'
name = fields.Char(translate=True)
note = fields.Text(translate=True)
class AccountFiscalPositionTemplate(models.Model):
_inherit = 'account.fiscal.position.template'
name = fields.Char(translate=True)
note = fields.Text(translate=True)
class AccountJournal(models.Model):
_inherit = 'account.journal'
name = fields.Char(translate=True)
class AccountAnalyticAccount(models.Model):
_inherit = 'account.analytic.account'
name = fields.Char(translate=True)
| gpl-3.0 | 3,139,817,623,212,854,300 | 26.173913 | 327 | 0.7344 | false |
google-research/google-research | goemotions/inspect_output_layer_weights.py | 1 | 2106 | # coding=utf-8
# Copyright 2021 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# coding=utf-8
# Copyright 2019 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Save BERT output layer weights for inspection."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
from absl import app
from absl import flags
import numpy as np
import tensorflow as tf
FLAGS = flags.FLAGS
flags.DEFINE_string("checkpoint_dir", None, "Model checkpoint directory.")
flags.DEFINE_string("tensor_names", "output_weights,new_output_weights",
"Comma separated list of tensor names to save.")
def save_tensor(reader, name):
tensor = reader.get_tensor(name)
np.save(os.path.join(FLAGS.checkpoint_dir, name + ".npy"), tensor)
def main(_):
checkpoint = tf.train.latest_checkpoint(FLAGS.checkpoint_dir)
reader = tf.train.NewCheckpointReader(checkpoint)
for name in FLAGS.tensor_names.split(","):
save_tensor(reader, name)
if __name__ == "__main__":
app.run(main)
| apache-2.0 | -7,049,970,231,258,936,000 | 31.4 | 74 | 0.740266 | false |
dpetker/adventofcode | 2018/test/test_day8.py | 1 | 1343 | import unittest
import os
from src.day8 import create_node_tree
SAMPLE_DATA = "2 3 0 3 10 11 12 1 1 0 1 99 2 1 1 2"
class TestMemoryManeuver(unittest.TestCase):
def test_sample_input_part_one(self):
tree = create_node_tree(SAMPLE_DATA)
self.assertEqual(str(tree), SAMPLE_DATA)
self.assertEqual(tree.metadata_sum, 138)
# def test_real_input_part_one(self):
# with open(os.path.join(os.path.dirname(__file__), '../input/day7.txt'), 'r') as f:
# lines = f.readlines()
# day7_input = [line.strip() for line in lines]
# result = find_basic_path(day7_input)
# print(f"The correct order of steps for Part One is {result}")
# self.assertEqual(result, "BDHNEGOLQASVWYPXUMZJIKRTFC")
# def test_sample_input_part_two(self):
# result, total_time = find_path_in_parallel(SAMPLE_DATA, 2, 0)
# self.assertEqual(result, "CABFDE")
# self.assertEqual(total_time, 15)
# def test_real_input_part_two(self):
# with open(os.path.join(os.path.dirname(__file__), '../input/day7.txt'), 'r') as f:
# lines = f.readlines()
# day7_input = [line.strip() for line in lines]
# result, total_time = find_path_in_parallel(day7_input, 5, 60)
# print(f"The total time for Part Two is {total_time}s")
# self.assertEqual(total_time, 1107)
if __name__ == '__main__':
unittest.main()
| mit | -2,462,994,842,280,944,600 | 35.297297 | 88 | 0.653016 | false |
slint/zenodo | zenodo/modules/sitemap/ext.py | 2 | 2576 | # -*- coding: utf-8 -*-
#
# This file is part of Zenodo.
# Copyright (C) 2018 CERN.
#
# Zenodo is free software; you can redistribute it
# and/or modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Zenodo is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Zenodo; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston,
# MA 02111-1307, USA.
#
# In applying this license, CERN does not
# waive the privileges and immunities granted to it by virtue of its status
# as an Intergovernmental Organization or submit itself to any jurisdiction.
"""Sitemap generation for Zenodo."""
from __future__ import absolute_import, print_function
from invenio_cache import current_cache
from . import config
from .generators import generator_fns
class ZenodoSitemap(object):
"""Zenodo sitemap extension."""
def __init__(self, app=None):
"""Extension initialization."""
if app:
self.init_app(app)
def init_app(self, app):
"""Flask application initialization."""
self.app = app
self.init_config(app)
self.generators = [fn for fn in generator_fns]
app.extensions['zenodo-sitemap'] = self
# Keep the currently stored sitemap cache keys for easy clearing
self.cache_keys = set()
def set_cache(self, key, value):
"""Set the sitemap cache."""
current_cache.set(key, value, timeout=-1)
self.cache_keys.add(key)
@staticmethod
def get_cache(key):
"""Get the sitemap cache."""
current_cache.get(key)
def clear_cache(self):
"""Clear the sitemap cache."""
for key in self.cache_keys:
current_cache.delete(key)
self.cache_keys = set()
@staticmethod
def init_config(app):
"""Initialize configuration."""
for k in dir(config):
if k.startswith('ZENODO_SITEMAP_'):
app.config.setdefault(k, getattr(config, k))
def _generate_all_urls(self):
"""Run all generators and yield the sitemap JSON entries."""
for generator in self.generators:
for generated in generator():
yield generated
| gpl-2.0 | -950,144,029,099,850,400 | 31.607595 | 76 | 0.660326 | false |
djhshih/genomic | utils/genompy/genompy/randomize.py | 1 | 1657 | #!/usr/bin/env python3
import random
from . import cn
from . import gp
def overlap_genes_in_regions(regions, geneDb, geneSets, overlaps, genes=None):
'''Track overlap of genes in regions with genes in each gene set, in place'''
# genes and overlaps will be modified in place
# store all genes from all regions
if genes is None:
genes = set()
for region in regions:
region_genes = geneDb.genes(region)
for gene in region_genes:
genes.add(gene)
overlap_genes_in_set(genes, geneSets, overlaps)
def overlap_genes_in_set(genes, geneSets, overlaps):
'''Track overlap of genes with each gene set, in place'''
# overlaps will be modified in place
# determine overlap of genes with gene sets
for name, gs in geneSets.sets.items():
gene_set = gs.genes
# count overlap
overlap = 0
for gene in gene_set:
if gene in genes:
overlap += 1
overlaps[name].append(overlap)
def randomized_genes(genes, universe):
'''Return set of re-sampled genes from universe'''
new_genes = set()
for i in range(len(genes)):
new_genes.add( random.choice(universe) )
return new_genes
def randomize_regions(regions, chrlens, rand_chrom=True):
'''Randomize regions in place'''
for region in regions:
randomize_region(region, chrlens, rand_chrom)
def randomize_region(region, chrlens, rand_chrom=True):
'''Randomize region in place'''
if rand_chrom:
# randomly choose a new chromosome
region.chromosome = random.choice([x for x in chrlens.keys()])
# randomize start position
size = region.size
max_end = chrlens[region.chromosome] - size + 1
region.start = random.randint(0, max_end)
region.end = region.start + size - 1
| gpl-3.0 | -5,363,245,007,392,990,000 | 25.301587 | 78 | 0.715751 | false |
Itxaka/st2 | st2common/tests/unit/test_rbac_loader.py | 1 | 8480 | # Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import unittest2
import mock
import jsonschema
from st2tests import config
from st2tests.fixturesloader import get_fixtures_base_path
from st2common.rbac.loader import RBACDefinitionsLoader
__all__ = [
'RBACDefinitionsLoaderTestCase'
]
class RBACDefinitionsLoaderTestCase(unittest2.TestCase):
@classmethod
def setUpClass(cls):
config.parse_args()
def test_load_role_definition_success(self):
loader = RBACDefinitionsLoader()
file_path = os.path.join(get_fixtures_base_path(), 'rbac/roles/role_three.yaml')
role_definition_api = loader.load_role_definition_from_file(file_path=file_path)
self.assertEqual(role_definition_api.name, 'role_three')
self.assertTrue('all the pack permissions on pack dummy_pack_1' in
role_definition_api.description)
self.assertEqual(len(role_definition_api.permission_grants), 3)
self.assertEqual(role_definition_api.permission_grants[0]['resource_uid'],
'pack:dummy_pack_1')
self.assertEqual(role_definition_api.permission_grants[1]['resource_uid'],
'pack:dummy_pack_2')
self.assertTrue('rule_view' in role_definition_api.permission_grants[1]['permission_types'])
self.assertEqual(role_definition_api.permission_grants[2]['permission_types'],
['action_execute'])
def test_load_role_definition_validation_error(self):
loader = RBACDefinitionsLoader()
# Invalid permission which doesn't apply to the resource in question
file_path = os.path.join(get_fixtures_base_path(), 'rbac_invalid/roles/role_one.yaml')
expected_msg = 'Invalid permission type "rule_all" for resource type "action"'
self.assertRaisesRegexp(ValueError, expected_msg, loader.load_role_definition_from_file,
file_path=file_path)
# Invalid permission type which doesn't exist
file_path = os.path.join(get_fixtures_base_path(), 'rbac_invalid/roles/role_two.yaml')
expected_msg = '.*Failed validating \'enum\'.*'
self.assertRaisesRegexp(jsonschema.ValidationError, expected_msg,
loader.load_role_definition_from_file, file_path=file_path)
def test_load_user_role_assignments_success(self):
loader = RBACDefinitionsLoader()
file_path = os.path.join(get_fixtures_base_path(), 'rbac/assignments/user3.yaml')
user_role_assignment_api = loader.load_user_role_assignments_from_file(file_path=file_path)
self.assertEqual(user_role_assignment_api.username, 'user3')
self.assertEqual(user_role_assignment_api.description, 'Observer assignments')
self.assertEqual(user_role_assignment_api.roles, ['observer'])
def test_load_role_definitions_duplicate_role_definition(self):
loader = RBACDefinitionsLoader()
# Try to load all the roles from disk where two definitions refer to the same role
file_path1 = os.path.join(get_fixtures_base_path(), 'rbac_invalid/roles/role_three1.yaml')
file_path2 = os.path.join(get_fixtures_base_path(), 'rbac_invalid/roles/role_three2.yaml')
file_paths = [file_path1, file_path2]
loader._get_role_definitions_file_paths = mock.Mock()
loader._get_role_definitions_file_paths.return_value = file_paths
expected_msg = 'Duplicate definition file found for role "role_three_name_conflict"'
self.assertRaisesRegexp(ValueError, expected_msg, loader.load_role_definitions)
def test_load_role_definitions_disabled_role_definition(self):
loader = RBACDefinitionsLoader()
# Disabled role which means this method shouldn't include it in the result
file_path = os.path.join(get_fixtures_base_path(), 'rbac/roles/role_disabled.yaml')
file_paths = [file_path]
loader._get_role_definitions_file_paths = mock.Mock()
loader._get_role_definitions_file_paths.return_value = file_paths
result = loader.load_role_definitions()
self.assertItemsEqual(result, [])
def test_load_role_definitions_empty_definition_file(self):
loader = RBACDefinitionsLoader()
file_path = os.path.join(get_fixtures_base_path(), 'rbac_invalid/roles/role_empty.yaml')
file_paths = [file_path]
loader._get_role_definitions_file_paths = mock.Mock()
loader._get_role_definitions_file_paths.return_value = file_paths
expected_msg = 'Role definition file .+? is empty and invalid'
self.assertRaisesRegexp(ValueError, expected_msg, loader.load_role_definitions)
def test_load_user_role_assignments_duplicate_user_definition(self):
loader = RBACDefinitionsLoader()
# Try to load all the user role assignments from disk where two definitions refer to the
# same user
file_path1 = os.path.join(get_fixtures_base_path(),
'rbac_invalid/assignments/user_foo1.yaml')
file_path2 = os.path.join(get_fixtures_base_path(),
'rbac_invalid/assignments/user_foo2.yaml')
file_paths = [file_path1, file_path2]
loader._get_role_assiginments_file_paths = mock.Mock()
loader._get_role_assiginments_file_paths.return_value = file_paths
expected_msg = 'Duplicate definition file found for user "userfoo"'
self.assertRaisesRegexp(ValueError, expected_msg, loader.load_user_role_assignments)
def test_load_user_role_assignments_disabled_assignment(self):
loader = RBACDefinitionsLoader()
# Disabled role assignment which means this method shouldn't include it in the result
file_path = os.path.join(get_fixtures_base_path(), 'rbac/assignments/user_disabled.yaml')
file_paths = [file_path]
loader._get_role_assiginments_file_paths = mock.Mock()
loader._get_role_assiginments_file_paths.return_value = file_paths
result = loader.load_user_role_assignments()
self.assertItemsEqual(result, [])
def test_load_user_role_assignments_empty_definition_file(self):
loader = RBACDefinitionsLoader()
file_path = os.path.join(get_fixtures_base_path(),
'rbac_invalid/assignments/user_empty.yaml')
file_paths = [file_path]
loader._get_role_assiginments_file_paths = mock.Mock()
loader._get_role_assiginments_file_paths.return_value = file_paths
expected_msg = 'Role assignment file .+? is empty and invalid'
self.assertRaisesRegexp(ValueError, expected_msg, loader.load_user_role_assignments)
def test_load_sample_role_definition(self):
"""
Validate that the sample role definition which we ship with default installation works.
"""
loader = RBACDefinitionsLoader()
file_path = os.path.join(get_fixtures_base_path(), 'rbac/roles/role_sample.yaml')
role_api = loader.load_role_definition_from_file(file_path=file_path)
self.assertEqual(role_api.name, 'sample')
self.assertFalse(role_api.enabled)
def test_load_sample_user_role_assignment_definition(self):
"""
Validate that the sample user role assignment definition which we ship with default
installation works.
"""
loader = RBACDefinitionsLoader()
file_path = os.path.join(get_fixtures_base_path(), 'rbac/assignments/user_sample.yaml')
assignment_api = loader.load_user_role_assignments_from_file(file_path=file_path)
self.assertEqual(assignment_api.username, 'stackstorm_user')
self.assertFalse(assignment_api.enabled)
| apache-2.0 | -8,715,461,840,394,912,000 | 45.593407 | 100 | 0.684788 | false |
arshbot/Slack-Analytics | scripts/help.py | 1 | 1231 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# help script that details the list of commands
def help(response):
text = "PantherBot works by prefacing commands with \"!\"\n"
text += "Commands:\n"
text += "```!help\n"
text += "!coin\n"
text += "!helloworld\n"
text += "!version\n"
text += "!fortune\n"
text += "!flip <Optional:String>\n"
text += "!unflip <Optional:String>\n"
text += "!rage <Optional:String>\n"
text += "!catfact\n"
text += "!pugbomb <int>\n"
text += "!taskme\n"
text += "!poll <begin/start/end/results> [arguments followed by a `;`]"
text += "!talk <String>\n"
text += "\"Hey PantherBot\"```\n"
text += "Try saying `Hey PantherBot` or `!coin`"
motext = "Admins are able to use admin commands prefaced with \"$\"\n"
motext += "```$calendar add ; <Title> ; <Date in format YYYY-MM-DD> ; <Start time in format HH:mm> ; <End time in format HH:mm> ; <Description> ; <Location>\n" # noqa: 501
motext += "$admin <reconnect/update>\n"
motext += "$log <true/false> <channels>```\n"
motext += "Got suggestions for PantherBot? Fill out our typeform to leave your ideas! https://goo.gl/rEb0B7" # noqa: 501
return [text, motext]
| mpl-2.0 | 8,409,772,346,828,189,000 | 40.033333 | 176 | 0.594639 | false |
Ameriks/velo.lv | velo/team/views.py | 1 | 20504 | from django.contrib import messages
from django.http import Http404, HttpResponseRedirect
from django.utils import timezone
from django.views.generic import ListView, DetailView
from django.core.urlresolvers import reverse
from django.utils.translation import ugettext_lazy as _
from difflib import get_close_matches
from django_tables2 import SingleTableView
from django.contrib.auth.mixins import LoginRequiredMixin
from extra_views import NamedFormsetsMixin, CreateWithInlinesView, UpdateWithInlinesView, InlineFormSet
import datetime
from velo.core.formsets import CustomBaseInlineFormSet
from velo.core.models import Competition
from velo.payment.utils import get_total
from velo.registration.models import Participant, Application
from velo.team.forms import MemberInlineForm, TeamForm
from velo.team.models import Team, Member, MemberApplication
from velo.team.tables import TeamTable, TeamMyTable
from velo.velo.mixins.forms import GetClassNameMixin
from velo.velo.mixins.views import SetCompetitionContextMixin, SingleTableViewWithRequest, RequestFormKwargsMixin, NeverCacheMixin
class TeamAppliedView(SetCompetitionContextMixin, ListView):
"""
This class is used to display teams that have applied to competition.
This is optimized view.
"""
model = Team
template_name = 'team/applied.html'
def get(self, *args, **kwargs):
self.set_competition(kwargs.get('pk'))
self.set_distances(only_w_teams=True) # Based on self.competition
self.set_distance(self.request.GET.get('distance', None))
return super(TeamAppliedView, self).get(*args, **kwargs)
def get_queryset(self):
queryset = super(TeamAppliedView, self).get_queryset()
queryset = queryset.filter(distance=self.distance, member__memberapplication__competition=self.competition, status__gte=0)
search = self.request.GET.get('search', None)
if search:
queryset = queryset.filter(title__icontains=search)
queryset = queryset.order_by('-is_featured', 'title',
'member__memberapplication__kind', 'member__memberapplication__participant__primary_number__number',)
queryset = queryset.values_list('id', 'title', 'is_featured',
'member__first_name', 'member__last_name', 'member__birthday',
'member__memberapplication__kind',
'member__memberapplication__participant__primary_number__number',
'member__memberapplication__participant_id',
)
return queryset
class TeamListView(SingleTableViewWithRequest):
model = Team
table_class = TeamTable
def get(self, *args, **kwargs):
self.set_competition(kwargs.get('pk'))
self.set_distances(only_w_teams=True) # Based on self.competition
self.set_distance(self.request.GET.get('distance', None))
return super(TeamListView, self).get(*args, **kwargs)
def get_context_data(self, **kwargs):
context = super(ListView, self).get_context_data(**kwargs)
table = self.get_table(request=self.request, request_kwargs=self.kwargs)
context[self.get_context_table_name(table)] = table
context.update({'competition': self.competition})
context.update({'distances': self.distances})
context.update({'distance_active': self.distance})
context.update({'banners': self.get_banners()})
return context
def get_queryset(self):
queryset = super(TeamListView, self).get_queryset()
queryset = queryset.filter(distance=self.distance, distance__competition_id__in=self.competition.get_ids(), status__gte=0)
if self.request.GET.get("search", None):
queryset = queryset.filter(title__icontains=self.request.GET.get("search", None))
return queryset
class TeamView(DetailView):
model = Team
pk_url_kwarg = 'pk2'
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
context.update({'competition': Competition.objects.get(id=self.kwargs.get('pk'))})
context.update({'members': self.object.member_set.filter(status=Member.STATUS_ACTIVE).order_by('last_name')})
for member in context["members"]:
if Participant.objects.filter(slug=member.slug, is_shown_public=False).count():
member.first_name = member.last_name = _("Anonymized")
setattr(member, "not_public", True)
return context
class TeamMemberProfileView(DetailView):
model = Member
pk_url_kwarg = 'pk3'
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
context.update({'competition': Competition.objects.get(id=self.kwargs.get('pk'))})
context.update({'members': self.object.team.member_set.filter(status=Member.STATUS_ACTIVE).order_by('last_name')})
if Participant.objects.filter(slug=context["member"].slug, is_shown_public=False).count():
context["member"].first_name = context["member"].last_name = _("Anonymized")
setattr(context["member"], "not_public", True)
for member in context["members"]:
if Participant.objects.filter(slug=member.slug, is_shown_public=False).count():
member.first_name = member.last_name = _("Anonymized")
setattr(member, "not_public", True)
return context
class MyTeamList(NeverCacheMixin, LoginRequiredMixin, SingleTableView):
model = Team
table_class = TeamMyTable
template_name = 'team/team_list_my.html'
paginate_by = 100
def get_queryset(self):
queryset = super().get_queryset()
queryset = queryset.filter(owner=self.request.user).select_related('distance', 'distance__competition', 'distance__competition__parent')
return queryset
class MemberInline(GetClassNameMixin, InlineFormSet):
can_order = False
model = Member
formset_class = CustomBaseInlineFormSet
form_class = MemberInlineForm
competition = None
fields = MemberInlineForm.Meta.fields
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
if self.object:
self.competition = self.object.distance.competition
@property
def can_delete(self):
delete_date_obj = datetime.date.today()
if self.competition and self.competition.params:
delete_date = self.competition.params_dict.get('team_member_delete_final', None)
if delete_date:
delete_date_obj = datetime.datetime.strptime(delete_date, '%Y-%m-%d').date()
if datetime.date.today() <= delete_date_obj:
print('CAN DELETE')
return True
else:
print('CANNOT DELETE')
return False
@property
def extra(self):
if self.object and self.object.member_set.count() > 0:
return 0
else:
return 1
def get_formset_kwargs(self):
kwargs = super(MemberInline, self).get_formset_kwargs()
kwargs.update({'empty_form_class': self.form_class})
kwargs.update({'required': 1})
kwargs.update({'can_add_new': True})
kwargs.update({'max_num': self.competition.params_dict.get('team_member_count', 1000) if self.competition else 1000})
# Quick fix for women teams - there can only be 2 members in women teams.
if self.object and self.object.is_w:
kwargs.update({'max_num': 2})
kwargs.update({'queryset': Member.objects.filter(status=Member.STATUS_ACTIVE) })
return kwargs
def get_extra_form_kwargs(self):
kwargs = super(MemberInline, self).get_extra_form_kwargs()
kwargs.update({'request': self.request})
kwargs.update({'request_kwargs': self.kwargs})
return kwargs
class TeamCreateView(NeverCacheMixin, LoginRequiredMixin, RequestFormKwargsMixin, NamedFormsetsMixin, CreateWithInlinesView):
template_name = 'team/team_form.html'
inlines = [MemberInline, ]
inlines_names = ['member']
model = Team
form_class = TeamForm
def get_success_url(self):
return reverse('account:team_list')
class TeamUpdateView(NeverCacheMixin, LoginRequiredMixin, RequestFormKwargsMixin, NamedFormsetsMixin, UpdateWithInlinesView):
template_name = 'team/team_form.html'
inlines = [MemberInline, ]
inlines_names = ['member']
model = Team
form_class = TeamForm
pk_url_kwarg = 'pk2'
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
if not self.request.user.has_perm('team.change_member'):
competition = self.object.distance.competition
if competition.get_root().id == 1:
next_competition = self.object.distance.competition.children.filter(
competition_date__gt=timezone.now())[:1]
context.update({'next_competition': next_competition[0] if next_competition else None})
elif competition.competition_date and competition.competition_date > datetime.date.today():
context.update({'next_competition': competition})
return context
def get_success_url(self):
return reverse('account:team_list')
def get_queryset(self):
queryset = super(TeamUpdateView, self).get_queryset()
if not self.request.user.is_superuser:
queryset = queryset.filter(owner=self.request.user)
return queryset
def post(self, request, *args, **kwargs):
ret = super().post(request, *args, **kwargs)
if request.POST.get('submit_pay', None):
next_competition = None
competition = self.object.distance.competition
if competition.get_root().id == 1:
next_competition = self.object.distance.competition.children.filter(competition_date__gt=timezone.now())[:1]
elif competition.competition_date and competition.competition_date > datetime.date.today():
next_competition = [competition, ]
if next_competition:
next_competition = next_competition[0]
application = Application.objects.create(competition=next_competition, email=request.user.email)
for member in self.object.member_set.filter(status=Member.STATUS_ACTIVE):
price = None
total = get_total(next_competition, self.object.distance_id, member.birthday.year)
if total:
price = total.get('price_obj', None)
application.participant_set.create(first_name=member.first_name,
last_name=member.last_name,
country=member.country,
birthday=member.birthday,
ssn=member.ssn,
gender=member.gender,
competition=next_competition,
distance=self.object.distance,
team_name=self.object.title,
price=price
)
return HttpResponseRedirect(reverse('application', kwargs={'slug': application.code}))
return ret
class TeamApplyList(NeverCacheMixin, LoginRequiredMixin, RequestFormKwargsMixin, NamedFormsetsMixin, DetailView):
model = Team
template_name = 'team/team_apply_list.html'
pk_url_kwarg = 'pk2'
def get_queryset(self):
queryset = super(TeamApplyList, self).get_queryset()
queryset = queryset.filter(owner=self.request.user)
return queryset
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
competition = self.object.distance.competition
child_competitions = competition.get_children()
if child_competitions:
competitions = child_competitions
else:
competitions = (competition, )
final_competitions = []
for competition in competitions:
members = MemberApplication.objects.filter(competition=competition, member__team=self.object).order_by('kind')
final_competitions.append((competition, members))
if competition.competition_date > datetime.date.today():
break
context.update({'competitions': final_competitions})
return context
def post(self, request, *args, **kwargs):
self.object = self.get_object()
context = self.get_context_data(object=self.object)
pay_members = request.POST.getlist('pay_member')
if pay_members:
member_ids = {}
for pay_member in pay_members:
competition_id, member_id = pay_member.split('__')
if not competition_id in member_ids:
member_ids.update({competition_id: []})
member_ids.get(competition_id).append(member_id)
key = list(member_ids.keys())[0]
competition = Competition.objects.get(id=key)
if request.POST.get('kind') == 'all_season' and competition.parent.complex_payment_enddate > timezone.now():
competition = competition.parent
application = Application.objects.create(competition=competition, email=request.user.email)
for member_id in member_ids.get(key):
member = self.object.member_set.get(id=member_id)
price = None
total = get_total(competition, self.object.distance_id, member.birthday.year)
if total:
price = total.get('price_obj', None)
application.participant_set.create(first_name=member.first_name,
last_name=member.last_name,
country=member.country,
birthday=member.birthday,
gender=member.gender,
ssn=member.ssn,
competition=competition,
distance=self.object.distance,
team_name=self.object.title,
price=price,
phone_number=member.phone_number,
)
return HttpResponseRedirect(reverse('application', kwargs={'slug': application.code}))
else:
return self.get(request, *args, **kwargs)
class TeamApply(NeverCacheMixin, LoginRequiredMixin, RequestFormKwargsMixin, NamedFormsetsMixin, DetailView):
model = Team
template_name = 'team/team_apply.html'
pk_url_kwarg = 'pk2'
def get_queryset(self):
queryset = super(TeamApply, self).get_queryset()
if not self.request.user.has_perm('registration.add_number'):
queryset = queryset.filter(owner=self.request.user)
return queryset
def get_context_data(self, **kwargs):
context = super(TeamApply, self).get_context_data(**kwargs)
competition = Competition.objects.get(id=self.kwargs.get('competition_pk'))
team_competition = self.object.distance.competition
child_competitions = team_competition.get_children()
if child_competitions:
competitions = child_competitions
else:
competitions = (team_competition, )
if competition not in competitions:
raise Http404
members = Member.objects.filter(team=self.object, status=Member.STATUS_ACTIVE).extra(select={
'kind': 'Select team_memberapplication.kind from team_memberapplication where team_memberapplication.member_id = team_member.id and team_memberapplication.competition_id=%s'
}, select_params=(competition.id, ))
context.update({'members': members, 'competition': competition, 'team_competition': team_competition})
return context
def match_applied_to_participant(self, application):
distance = application.member.team.distance
application.participant = None
application.participant_unpaid = None
application.participant_potential = None
participant = Participant.objects.filter(competition_id__in=application.competition.get_ids(), slug=application.member.slug, is_participating=True, distance=distance)
if participant:
application.participant = participant[0]
else:
participant = Participant.objects.filter(competition_id__in=application.competition.get_ids(), slug=application.member.slug, distance=distance)
if participant:
application.participant_unpaid = participant[0]
else:
slugs = [obj.slug for obj in Participant.objects.filter(competition_id__in=application.competition.get_ids(), distance=distance, is_participating=True)]
matches = get_close_matches(application.member.slug, slugs, 1, 0.5)
if matches:
participants = Participant.objects.filter(competition=application.competition, slug=matches[0], distance=distance).order_by('-id')
if participants:
application.participant_potential = participants[0]
application.save()
def post(self, request, *args, **kwargs):
self.object = self.get_object()
context = self.get_context_data(object=self.object)
team_competition = context.get('team_competition')
competition = context.get('competition')
riders = []
reserve = []
nothing = []
for member in context.get('members'):
data = int(request.POST.get('member_%i' % member.id))
if data == MemberApplication.KIND_PARTICIPANT:
riders.append(member.id)
elif data == MemberApplication.KIND_RESERVE:
reserve.append(member.id)
else:
nothing.append(member.id)
max_team_riders = team_competition.params_dict.get('max_team_riders', 1000)
max_team_reserve = team_competition.params_dict.get('max_team_reserve', 1000)
if len(riders) > max_team_riders:
messages.error(request, _('Too many team members marked as participants. MAX-%i') % max_team_riders)
elif len(reserve) > max_team_reserve:
messages.error(request, _('Too many team members marked as reserve. MAX-%i') % max_team_reserve)
else:
for rider in riders:
application, created = MemberApplication.objects.get_or_create(member_id=rider, competition=competition, defaults={'kind': MemberApplication.KIND_PARTICIPANT})
if not created:
application.kind = MemberApplication.KIND_PARTICIPANT
application.save()
self.match_applied_to_participant(application)
for rider in reserve:
application, created = MemberApplication.objects.get_or_create(member_id=rider, competition=competition, defaults={'kind': MemberApplication.KIND_RESERVE})
if not created:
application.kind = MemberApplication.KIND_RESERVE
application.save()
self.match_applied_to_participant(application)
MemberApplication.objects.filter(competition=competition).filter(member_id__in=nothing).delete()
messages.info(request, _('Successfuly saved.'))
if 'pk' in self.kwargs:
return HttpResponseRedirect(reverse('manager:team_apply_list', kwargs={'pk2': self.object.id, 'pk': self.kwargs.get('pk')}))
else:
return HttpResponseRedirect(reverse('account:team_apply_list', kwargs={'pk2': self.object.id}))
return self.render_to_response(context)
| gpl-3.0 | -7,865,938,040,519,920,000 | 43.573913 | 185 | 0.615587 | false |
zerothi/sisl | sisl/viz/plotly/input_fields/queries.py | 1 | 17709 | # This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at https://mozilla.org/MPL/2.0/.
from collections import defaultdict
import itertools
import numpy as np
from .._input_field import InputField
from .dropdown import AtomSelect, SpeciesSelect, OrbitalsNameSelect, SpinSelect
from ..configurable import Configurable
class QueriesInput(InputField):
"""
Parameters
----------
queryForm: list of InputField
The list of input fields that conform a query.
"""
dtype = "array-like of dict"
_type = 'queries'
_default = {
"width": "s100%",
"queryForm": []
}
def __init__(self, *args, queryForm=[], help="", **kwargs):
query_form = self._sanitize_queryform(queryForm)
inputFieldAttrs = {
**kwargs.get("inputFieldAttrs", {}),
"queryForm": query_form
}
def get_queryform_help():
return "\n\t".join([f"'{param.key}': {param.help}" for param in query_form])
help += "\n\n Each item is a dict. Structure of the expected dicts:{\n\t" + get_queryform_help() + "\n}"
super().__init__(*args, **kwargs, help=help, inputFieldAttrs = inputFieldAttrs)
def get_query_param(self, key, **kwargs):
"""
Gets the parameter info for a given key. It uses the Configurable.get_param method.
"""
return Configurable.get_param(self, key, paramsExtractor = lambda obj: obj.inputField["queryForm"], **kwargs)
def get_param(self, *args, **kwargs):
"""
Just a clone of get_query_param.
Because Configurable looks for this method when modifying parameters, but the other name is clearer.
"""
return self.get_query_param(*args, **kwargs)
def modify_query_param(self, key, *args, **kwargs):
"""
Uses Configurable.modify_param to modify a parameter inside QueryForm
"""
return Configurable.modify_param(self, key, *args, **kwargs)
def complete_query(self, query, **kwargs):
"""
Completes a partially build query with the default values
Parameters
-----------
query: dict
the query to be completed.
**kwargs:
other keys that need to be added to the query IN CASE THEY DON'T ALREADY EXIST
"""
return {
"active": True,
**{param.key: param.default for param in self.inputField["queryForm"]},
**kwargs,
**query
}
def filter_df(self, df, query, key_to_cols, raise_not_active=False):
"""
Filters a dataframe according to a query
Parameters
-----------
df: pd.DataFrame
the dataframe to filter.
query: dict
the query to be used as a filter. Can be incomplete, it will be completed using
`self.complete_query()`
keys_to_cols: array-like of tuples
An array of tuples that look like (key, col)
where key is the key of the parameter in the query and col the corresponding
column in the dataframe.
"""
query = self.complete_query(query)
if raise_not_active:
if not group["active"]:
raise ValueError(f"Query {query} is not active and you are trying to use it")
query_str = []
for key, val in query.items():
key = key_to_cols.get(key, key)
if key in df and val is not None:
if isinstance(val, (np.ndarray, tuple)):
val = np.ravel(val).tolist()
query_str.append(f'{key}=={repr(val)}')
return df.query(" & ".join(query_str))
def _sanitize_queryform(self, queryform):
"""
Parses a query form to fields, converting strings
to the known input fields (under self._fields). As an example,
see OrbitalQueries.
"""
sanitized_form = []
for i, field in enumerate(queryform):
if isinstance(field, str):
if field not in self._fields:
raise KeyError(
f"{self.__class__.__name__} has no pre-built field for '{field}'")
built_field = self._fields[field]['field'](
key=field, **{key: val for key, val in self._fields[field].items() if key != 'field'}
)
sanitized_form.append(built_field)
else:
sanitized_form.append(field)
return sanitized_form
def __getitem__(self, key):
for field in self.inputField['queryForm']:
if field.key == key:
return field
return super().__getitem__(key)
def __contains__(self, key):
for field in self.inputField['queryForm']:
if field.key == key:
return True
return False
class OrbitalQueries(QueriesInput):
"""
This class implements an input field that allows you to select orbitals by atom, species, etc...
"""
_fields = {
"species": {"field": SpeciesSelect, "name": "Species"},
"atoms": {"field": AtomSelect, "name": "Atoms"},
"orbitals": {"field": OrbitalsNameSelect, "name": "Orbitals"},
"spin": {"field": SpinSelect, "name": "Spin"},
}
_keys_to_cols = {
"atoms": "atom",
"orbitals": "orbital_name",
}
def _build_orb_filtering_df(self, geom):
import pandas as pd
orb_props = defaultdict(list)
del_key = set()
#Loop over all orbitals of the basis
for at, iorb in geom.iter_orbitals():
atom = geom.atoms[at]
orb = atom[iorb]
orb_props["atom"].append(at)
orb_props["Z"].append(atom.Z)
orb_props["species"].append(atom.symbol)
orb_props["orbital_name"].append(orb.name())
for key in ("n", "l", "m", "zeta"):
val = getattr(orb, key, None)
if val is None:
del_key.add(key)
orb_props[key].append(val)
for key in del_key:
del orb_props[key]
self.orb_filtering_df = pd.DataFrame(orb_props)
def update_options(self, geometry, spin=""):
"""
Updates the options of the orbital queries.
Parameters
-----------
geometry: sisl.Geometry
the geometry that contains the orbitals that can be selected.
spin: sisl.Spin, str or int
It is used to indicate the kind of spin so that the spin selector
(in case there is one) can display the appropiate options.
See also
---------
sisl.viz.plotly.input_fields.dropdown.SpinSelect
sisl.physics.Spin
"""
self.geometry = geometry
for key in ("species", "atoms", "orbitals"):
try:
self.get_query_param(key).update_options(geometry)
except KeyError:
pass
try:
self.get_query_param('spin').update_options(spin)
except KeyError:
pass
self._build_orb_filtering_df(geometry)
def get_options(self, key, **kwargs):
"""
Gets the options for a given key or combination of keys.
Parameters
------------
key: str, {"species", "atoms", "Z", "orbitals", "n", "l", "m", "zeta", "spin"}
the parameter that you want the options for.
Note that you can combine them with a "+" to get all the possible combinations.
You can get the same effect also by passing a list.
See examples.
**kwargs:
keyword arguments that add additional conditions to the query. The values of this
keyword arguments can be lists, in which case it indicates that you want a value
that is in the list. See examples.
Returns
----------
np.ndarray of shape (n_options, [n_keys])
all the possible options.
If only one key was provided, it is a one dimensional array.
Examples
-----------
>>> plot = H.plot.pdos()
>>> plot.get_param("requests").get_options("l", species="Au")
>>> plot.get_param("requests").get_options("n+l", atoms=[0,1])
"""
# Get the tadatframe
df = self.orb_filtering_df
# Filter the dataframe according to the constraints imposed by the kwargs,
# if there are any.
if kwargs:
query = ' & '.join([f'{self._keys_to_cols.get(k, k)}=={repr(v)}' for k, v in kwargs.items() if self._keys_to_cols.get(k, k) in df])
if query:
df = df.query(query)
# If + is in key, it is a composite key. In that case we are going to
# split it into all the keys that are present and get the options for all
# of them. At the end we are going to return a list of tuples that will be all
# the possible combinations of the keys.
keys = [self._keys_to_cols.get(k, k) for k in key.split("+")]
# Spin values are not stored in the orbital filtering dataframe. If the options
# for spin are requested, we need to pop the key out and get the current options
# for spin from the input field
spin_in_keys = "spin" in keys
if spin_in_keys:
spin_key_i = keys.index("spin")
keys.remove("spin")
spin_options = self.get_param("spin").options
# We might have some constraints on what the spin value can be
if "spin" in kwargs:
spin_options = set(spin_options).intersection(kwargs["spin"])
# Now get the unique options from the dataframe
if keys:
options = df.drop_duplicates(subset=keys)[keys].values.astype(np.object)
else:
# It might be the only key was "spin", then we are going to fake it
# to get an options array that can be treated in the same way.
options = np.array([[]], dtype=np.object)
# If "spin" was one of the keys, we are going to incorporate the spin options, taking into
# account the position (column index) where they are expected to be returned.
if spin_in_keys:
options = np.concatenate([np.insert(options, spin_key_i, spin, axis=1) for spin in spin_options])
# Squeeze the options array, just in case there is only one key
# There's a special case: if there is only one option for that key,
# squeeze converts it to a number, so we need to make sure there is at least 1d
if options.shape[1] == 1:
options = options.squeeze()
options = np.atleast_1d(options)
return options
def get_orbitals(self, query):
if "atoms" in query:
query["atoms"] = self.geometry._sanitize_atoms(query["atoms"])
filtered_df = self.filter_df(self.orb_filtering_df, query, self._keys_to_cols)
return filtered_df.index
def _split_query(self, query, on, only=None, exclude=None, query_gen=None, ignore_constraints=False, **kwargs):
"""
Splits a query into multiple queries based on one of its parameters.
Parameters
--------
query: dict
the query that we want to split
on: str, {"species", "atoms", "Z", "orbitals", "n", "l", "m", "zeta", "spin"}, or list of str
the parameter to split along.
Note that you can combine parameters with a "+" to split along multiple parameters
at the same time. You can get the same effect also by passing a list.
only: array-like, optional
if desired, the only values that should be plotted out of
all of the values that come from the splitting.
exclude: array-like, optional
values of the splitting that should not be plotted.
query_gen: function, optional
the request generator. It is a function that takes all the parameters for each
request that this method has come up with and gets a chance to do some modifications.
This may be useful, for example, to give each request a color, or a custom name.
ignore_constraints: boolean or array-like, optional
determines whether constraints (imposed by the query that you want to split)
on the parameters that we want to split along should be taken into consideration.
If `False`: all constraints considered.
If `True`: no constraints considered.
If array-like: parameters contained in the list ignore their constraints.
**kwargs:
keyword arguments that go directly to each new request.
This is useful to add extra filters. For example:
`self._split_query(request, on="orbitals", spin=[0])`
will split the request on the different orbitals but will take
only the contributions from spin up.
"""
if exclude is None:
exclude = []
# Divide the splitting request into all the parameters
if isinstance(on, str):
on = on.split("+")
# Get the current values of the parameters that we want to split the request on
# because these will be our constraints. If a parameter is set to None or not
# provided, we have no constraints for that parameter.
constraints = {}
if ignore_constraints is not True:
if ignore_constraints is False:
ignore_constraints = ()
for key in filter(lambda key: key not in ignore_constraints, on):
val = query.get(key, None)
if val is not None:
constraints[key] = val
# Knowing what are our constraints (which may be none), get the available options
values = self.get_options("+".join(on), **constraints)
# We are going to make sure that, even if there was only one parameter to split on,
# the values are two dimensional. In this way, we can take the same actions for the
# case when there is only one parameter and the case when there are multiple.
if values.ndim == 1:
values = values.reshape(-1, 1)
# If no function to modify queries was provided we are just going to generate a
# dummy one that just returns the query as it gets it
if query_gen is None:
def query_gen(**kwargs):
return kwargs
# We ensure that on is a list even if there is only one parameter, for the same
# reason we ensured values was 2 dimensional
if isinstance(on, str):
on = on.split("+")
# Define the name that we will give to the new queries, using templating
# If a splitting parameter is not used by the name, we are going to
# append it, in order to make names unique and self-explanatory.
base_name = kwargs.pop("name", query.get("name", ""))
first_added = True
for key in on:
kwargs.pop(key, None)
if f"${key}" not in base_name:
base_name += f"{' | ' if first_added else ', '}{key}=${key}"
first_added = False
# Now build all the queries
queries = []
for i, value in enumerate(values):
if value not in exclude and (only is None or value in only):
# Use the name template to generate the name for this query
name = base_name
for key, val in zip(on, value):
name = name.replace(f"${key}", str(val))
# And append the new query to the queries
queries.append(
query_gen(**{
**query,
**{key: [val] for key, val in zip(on, value)},
"name": name, **kwargs
})
)
return queries
def _generate_queries(self, on, only=None, exclude=None, query_gen=None, **kwargs):
"""
Automatically generates queries based on the current options.
Parameters
--------
on: str, {"species", "atoms", "Z", "orbitals", "n", "l", "m", "zeta", "spin"} or list of str
the parameter to split along.
Note that you can combine parameters with a "+" to split along multiple parameters
at the same time. You can get the same effect also by passing a list.
only: array-like, optional
if desired, the only values that should be plotted out of
all of the values that come from the splitting.
exclude: array-like, optional
values that should not be plotted
query_gen: function, optional
the request generator. It is a function that takes all the parameters for each
request that this method has come up with and gets a chance to do some modifications.
This may be useful, for example, to give each request a color, or a custom name.
**kwargs:
keyword arguments that go directly to each request.
This is useful to add extra filters. For example:
`plot._generate_requests(on="orbitals", species=["C"])`
will split the PDOS on the different orbitals but will take
only those that belong to carbon atoms.
"""
return self._split_query({}, on=on, only=only, exclude=exclude, query_gen=query_gen, **kwargs)
| lgpl-3.0 | 3,700,784,672,871,433,000 | 37.24838 | 143 | 0.577108 | false |
scikit-optimize/scikit-optimize | skopt/tests/test_parallel_cl.py | 1 | 5399 | """This script contains set of functions that test parallel optimization with
skopt, where constant liar parallelization strategy is used.
"""
from numpy.testing import assert_equal
from numpy.testing import assert_raises
from skopt.space import Real
from skopt import Optimizer
from skopt.benchmarks import branin
import skopt.learning as sol
from scipy.spatial.distance import pdist
import pytest
# list of all strategies for parallelization
supported_strategies = ["cl_min", "cl_mean", "cl_max"]
# test one acq function that incorporates the runtime, and one that does not
supported_acq_functions = ["EI", "EIps"]
# Extract available surrogates, so that new ones are used automatically
available_surrogates = [
getattr(sol, name) for name in sol.__all__
if "GradientBoostingQuantileRegressor" not in name
] # excluding the GradientBoostingQuantileRegressor, will open issue later
n_steps = 5 # number of steps to test the algorithms with
n_points = 4 # number of points to evaluate at a single step
# n_steps x n_points > n_random_restarts should hold
@pytest.mark.parametrize("strategy", supported_strategies)
@pytest.mark.parametrize("surrogate", available_surrogates)
@pytest.mark.parametrize("acq_func", supported_acq_functions)
def test_constant_liar_runs(strategy, surrogate, acq_func):
"""
Tests whether the optimizer runs properly during the random
initialization phase and beyond
Parameters
----------
* `strategy` [string]:
Name of the strategy to use during optimization.
* `surrogate` [scikit-optimize surrogate class]:
A class of the scikit-optimize surrogate used in Optimizer.
"""
optimizer = Optimizer(
base_estimator=surrogate(),
dimensions=[Real(-5.0, 10.0), Real(0.0, 15.0)],
acq_func=acq_func,
acq_optimizer='sampling',
random_state=0
)
# test arguments check
assert_raises(ValueError, optimizer.ask, {"strategy": "cl_maen"})
assert_raises(ValueError, optimizer.ask, {"n_points": "0"})
assert_raises(ValueError, optimizer.ask, {"n_points": 0})
for i in range(n_steps):
x = optimizer.ask(n_points=n_points, strategy=strategy)
# check if actually n_points was generated
assert_equal(len(x), n_points)
if "ps" in acq_func:
optimizer.tell(x, [[branin(v), 1.1] for v in x])
else:
optimizer.tell(x, [branin(v) for v in x])
@pytest.mark.parametrize("strategy", supported_strategies)
@pytest.mark.parametrize("surrogate", available_surrogates)
def test_all_points_different(strategy, surrogate):
"""
Tests whether the parallel optimizer always generates
different points to evaluate.
Parameters
----------
* `strategy` [string]:
Name of the strategy to use during optimization.
* `surrogate` [scikit-optimize surrogate class]:
A class of the scikit-optimize surrogate used in Optimizer.
"""
optimizer = Optimizer(
base_estimator=surrogate(),
dimensions=[Real(-5.0, 10.0), Real(0.0, 15.0)],
acq_optimizer='sampling',
random_state=1
)
tolerance = 1e-3 # distance above which points are assumed same
for i in range(n_steps):
x = optimizer.ask(n_points, strategy)
optimizer.tell(x, [branin(v) for v in x])
distances = pdist(x)
assert all(distances > tolerance)
@pytest.mark.parametrize("strategy", supported_strategies)
@pytest.mark.parametrize("surrogate", available_surrogates)
def test_same_set_of_points_ask(strategy, surrogate):
"""
For n_points not None, tests whether two consecutive calls to ask
return the same sets of points.
Parameters
----------
* `strategy` [string]:
Name of the strategy to use during optimization.
* `surrogate` [scikit-optimize surrogate class]:
A class of the scikit-optimize surrogate used in Optimizer.
"""
optimizer = Optimizer(
base_estimator=surrogate(),
dimensions=[Real(-5.0, 10.0), Real(0.0, 15.0)],
acq_optimizer='sampling',
random_state=2
)
for i in range(n_steps):
xa = optimizer.ask(n_points, strategy)
xb = optimizer.ask(n_points, strategy)
optimizer.tell(xa, [branin(v) for v in xa])
assert_equal(xa, xb) # check if the sets of points generated are equal
@pytest.mark.parametrize("strategy", supported_strategies)
@pytest.mark.parametrize("surrogate", available_surrogates)
def test_reproducible_runs(strategy, surrogate):
# two runs of the optimizer should yield exactly the same results
optimizer = Optimizer(
base_estimator=surrogate(random_state=1),
dimensions=[Real(-5.0, 10.0), Real(0.0, 15.0)],
acq_optimizer='sampling',
random_state=1
)
points = []
for i in range(n_steps):
x = optimizer.ask(n_points, strategy)
points.append(x)
optimizer.tell(x, [branin(v) for v in x])
# the x's should be exaclty as they are in `points`
optimizer = Optimizer(
base_estimator=surrogate(random_state=1),
dimensions=[Real(-5.0, 10.0), Real(0.0, 15.0)],
acq_optimizer='sampling',
random_state=1
)
for i in range(n_steps):
x = optimizer.ask(n_points, strategy)
assert points[i] == x
optimizer.tell(x, [branin(v) for v in x])
| bsd-3-clause | -7,413,750,497,891,699,000 | 31.920732 | 79 | 0.665308 | false |
academicsystems/Qengine | pkg/routes/__init__.py | 1 | 1447 | # public imports
import flask
from flask import Flask
from flask import request
# private imports
from ..config.qconfig import Config
config = Config()
if not config.initialized:
config.init(None)
app = Flask(__name__)
# route hooks
@app.before_request
def log_request():
if config.QENGINE_LOG_REQUESTS:
with open('./logs/request.log','a+') as file:
file.write("%s %s\n%s%s\n\n" % (request.method, request.url, request.headers, request.get_json(silent=True)))
return None
@app.after_request
def log_response(response):
if config.QENGINE_LOG_RESPONSE:
with open('./logs/response.log','a+') as file:
file.write("%s\n%s%s\n\n" % (response.status, response.headers, response.get_data()))
return response
# import routes
from pkg.routes.getEngineInfo import qengine_einfo
from pkg.routes.getQuestionMetadata import qengine_qmetadata
from pkg.routes.postQuestionFile import qengine_create # not part of Opaque spec
from pkg.routes.start import qengine_start
from pkg.routes.process import qengine_process
from pkg.routes.stop import qengine_stop
# register route blueprints
app.register_blueprint(qengine_einfo)
app.register_blueprint(qengine_qmetadata)
app.register_blueprint(qengine_create)
app.register_blueprint(qengine_start)
app.register_blueprint(qengine_process)
app.register_blueprint(qengine_stop)
# catch all routes
@app.route('/<path:badpath>')
def fallback(badpath):
return flask.make_response('Path Not Found', 404) | apache-2.0 | 8,081,228,743,135,665,000 | 28.55102 | 112 | 0.769178 | false |
cmallwitz/Sunflower | application/plugins/file_list/trash_list.py | 1 | 1931 | from gi.repository import Gtk
from file_list import FileList
from gio_provider import TrashProvider
from operation import DeleteOperation
class TrashList(FileList):
"""Trash file list plugin
Generic operations related to trash management are provided with this
class. By extending FileList standard features such as drag and drop are
supported.
"""
def __init__(self, parent, notebook, options):
FileList.__init__(self, parent, notebook, options)
def _create_buttons(self):
"""Create titlebar buttons."""
options = self._parent.options
# empty trash button
self._empty_button = Gtk.Button.new_from_icon_name('edittrash', Gtk.IconSize.MENU)
self._empty_button.set_focus_on_click(False)
self._empty_button.set_tooltip_text(_('Empty trash'))
self._empty_button.connect('clicked', self.empty_trash)
self._title_bar.add_control(self._empty_button)
def empty_trash(self, widget=None, data=None):
"""Empty trash can."""
# ask user to confirm
dialog = Gtk.MessageDialog(
self._parent,
Gtk.DialogFlags.DESTROY_WITH_PARENT,
Gtk.MessageType.QUESTION,
Gtk.ButtonsType.YES_NO,
_(
"All items in the Trash will be permanently deleted. "
"Are you sure?"
)
)
dialog.set_default_response(Gtk.ResponseType.YES)
result = dialog.run()
dialog.destroy()
# remove all items in trash
if result == Gtk.ResponseType.YES:
provider = self.get_provider()
# create delete operation
operation = DeleteOperation(
self._parent,
provider
)
operation.set_force_delete(True)
operation.set_selection(provider.list_dir(provider.get_root_path(None)))
# perform removal
operation.start()
def change_path(self, path=None, selected=None):
"""Change file list path."""
if path is not None and not path.startswith('trash://'):
path = 'trash:///'
FileList.change_path(self, path, selected)
| gpl-3.0 | -785,780,227,369,612,700 | 26.985507 | 84 | 0.68928 | false |
underyx/bkkcsirip | setup.py | 1 | 1096 | import io
from setuptools import setup
with io.open('README.md', encoding='utf-8') as f:
README = f.read()
setup(
name='bkkcsirip',
version='1.1.0',
url='https://github.com/underyx/bkkcsirip',
author='Bence Nagy',
author_email='bence@underyx@me',
maintainer='Bence Nagy',
maintainer_email='[email protected]',
download_url='https://github.com/underyx/bkkcsirip/releases',
long_description=README,
py_modules=['bkkcsirip'],
package_data={'': ['LICENSE']},
install_requires=[
'arrow<0.9',
'oauthlib<2',
'redis<3',
'requests<3',
'requests-oauthlib<0.6',
],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
]
)
| mit | 5,421,074,572,178,243,000 | 28.621622 | 65 | 0.589416 | false |
avelino/bottle-auth | bottle_auth/core/auth.py | 1 | 54118 | #!/usr/bin/env python
#
# Copyright 2009 Facebook
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Implementations of various third-party authentication schemes.
All the classes in this file are class Mixins designed to be used with
web.py RequestHandler classes. The primary methods for each service are
authenticate_redirect(), authorize_redirect(), and get_authenticated_user().
The former should be called to redirect the user to, e.g., the OpenID
authentication page on the third party service, and the latter should
be called upon return to get the user data from the data returned by
the third party service.
They all take slightly different arguments due to the fact all these
services implement authentication and authorization slightly differently.
See the individual service classes below for complete documentation.
Example usage for Google OpenID::
class GoogleHandler(tornado.web.RequestHandler, tornado.auth.GoogleMixin):
@tornado.web.asynchronous
def get(self):
if self.get_argument("openid.mode", None):
self.get_authenticated_user(self.async_callback(self._on_auth))
return
self.authenticate_redirect()
def _on_auth(self, user):
if not user:
raise tornado.web.HTTPError(500, "Google auth failed")
# Save the user with, e.g., set_secure_cookie()
"""
import base64
import binascii
import cgi
import hashlib
import hmac
import logging
import time
import urllib
import urlparse
import uuid
import pprint
from bottle_auth.core import httpclient
from bottle_auth.core import escape
from bottle_auth.core.escape import _unicode
from bottle_auth.core.httputil import url_concat, bytes_type, b
import webob
import functools
import re
log = logging.getLogger('bottleauth.auth')
class HTTPError(Exception):
def __init__(self, code, description):
self.code = code
self.description = description
class HTTPRedirect(Exception):
def __init__(self, url):
self.url = url
class WebobRequestWrapper(object):
def __init__(self, inst):
self.inst = inst
def full_url(self):
return self.inst.url
@property
def uri(self):
return self.inst.url
@property
def host(self):
return self.inst.host
@property
def params(self):
return self.inst.params
@property
def arguments(self):
return self.inst.GET.dict_of_lists()
class WebobResponseWrapper(object):
def __init__(self, inst):
self.inst = inst
def set_cookie(self, name, value):
self.inst.set_cookie(name, value)
def get_cookie(self, name, default=None):
return self.inst.cookies.get(name, default)
def delete_cookie(self, name):
self.inst.delete_cookie(name)
class GenericAuth(object):
"""Generic base class to emulate a tornado.Request
using the current WSGI environ.
"""
def __init__(self, request, settings=None, cookie_monster=None):
self.settings = settings or {}
if not isinstance(request, webob.Request):
request = webob.Request(request)
self.request = WebobRequestWrapper(request)
if isinstance(cookie_monster, webob.Response):
self.cookie_monster = WebobResponseWrapper(cookie_monster)
else:
self.cookie_monster = cookie_monster
def redirect(self, url):
raise HTTPRedirect(url)
def require_setting(self, name, feature="this feature"):
"""Raises an exception if the given app setting is not defined."""
if name not in self.settings:
raise Exception("You must define the '%s' setting in your "
"application to use %s" % (name, feature))
_ARG_DEFAULT = []
def get_argument(self, name, default=_ARG_DEFAULT, strip=True):
"""Returns the value of the argument with the given name.
If default is not provided, the argument is considered to be
required, and we throw an HTTP 400 exception if it is missing.
If the argument appears in the url more than once, we return the
last value.
The returned value is always unicode.
"""
args = self.get_arguments(name, strip=strip)
if not args:
if default is self._ARG_DEFAULT:
raise HTTPError(400, "Missing argument %s" % name)
return default
return args[-1]
def get_arguments(self, name, strip=True):
"""Returns a list of the arguments with the given name.
If the argument is not present, returns an empty list.
The returned values are always unicode.
"""
values = []
for v in self.request.params.getall(name):
v = self.decode_argument(v, name=name)
if isinstance(v, unicode):
# Get rid of any weird control chars (unless decoding gave
# us bytes, in which case leave it alone)
v = re.sub(r"[\x00-\x08\x0e-\x1f]", " ", v)
if strip:
v = v.strip()
values.append(v)
return values
def decode_argument(self, value, name=None):
"""Decodes an argument from the request.
The argument has been percent-decoded and is now a byte string.
By default, this method decodes the argument as utf-8 and returns
a unicode string, but this may be overridden in subclasses.
This method is used as a filter for both get_argument() and for
values extracted from the url and passed to get()/post()/etc.
The name of the argument is provided if known, but may be None
(e.g. for unnamed groups in the url regex).
"""
return _unicode(value)
def async_callback(self, callback, *args, **kwargs):
"""Obsolete - catches exceptions from the wrapped function.
This function is unnecessary since Tornado 1.1.
"""
if callback is None:
return None
if args or kwargs:
callback = functools.partial(callback, *args, **kwargs)
#FIXME what about the exception wrapper?
return callback
def get_cookie(self, name, default=None):
"""Gets the value of the cookie with the given name, else default."""
assert self.cookie_monster, 'Cookie Monster not set'
return self.cookie_monster.get_cookie(name, default)
def set_cookie(self, name, value, domain=None, expires=None, path="/",
expires_days=None, **kwargs):
"""Sets the given cookie name/value with the given options.
Additional keyword arguments are set on the Cookie.Morsel
directly.
See http://docs.python.org/library/cookie.html#morsel-objects
for available attributes.
"""
assert self.cookie_monster, 'Cookie Monster not set'
#, domain=domain, path=path)
self.cookie_monster.set_cookie(name, value)
def clear_cookie(self, name, path="/", domain=None):
"""Deletes the cookie with the given name."""
assert self.cookie_monster, 'Cookie Monster not set'
#, path=path, domain=domain)
self.cookie_monster.delete_cookie(name)
class OpenIdMixin(GenericAuth):
"""Abstract implementation of OpenID and Attribute Exchange.
See GoogleMixin below for example implementations.
"""
def authenticate_redirect(
self, callback_uri=None, ax_attrs=["name", "email", "language",
"username"]):
"""Returns the authentication URL for this service.
After authentication, the service will redirect back to the given
callback URI.
We request the given attributes for the authenticated user by
default (name, email, language, and username). If you don't need
all those attributes for your app, you can request fewer with
the ax_attrs keyword argument.
"""
callback_uri = callback_uri or self.request.uri
args = self._openid_args(callback_uri, ax_attrs=ax_attrs)
self.redirect(self._OPENID_ENDPOINT + "?" + urllib.urlencode(args))
def get_authenticated_user(self, callback):
"""Fetches the authenticated user data upon redirect.
This method should be called by the handler that receives the
redirect from the authenticate_redirect() or authorize_redirect()
methods.
"""
# Verify the OpenID response via direct request to the OP
# Recommendation @hmarrao, ref #3
args = dict((k, unicode(v[-1]).encode('utf-8')) for k, v in self.request.arguments.iteritems())
args["openid.mode"] = u"check_authentication"
url = self._OPENID_ENDPOINT
http = httpclient.AsyncHTTPClient()
log.debug("OpenID requesting {0} at uri {1}".format(args, url))
http.fetch(url, self.async_callback(
self._on_authentication_verified, callback),
method="POST", body=urllib.urlencode(args))
def _openid_args(self, callback_uri, ax_attrs=[], oauth_scope=None):
url = urlparse.urljoin(self.request.full_url(), callback_uri)
args = {
"openid.ns": "http://specs.openid.net/auth/2.0",
"openid.claimed_id": "http://specs.openid.net/auth/2.0/"
"identifier_select",
"openid.identity": "http://specs.openid.net/auth/2.0/"
"identifier_select",
"openid.return_to": url,
"openid.realm": urlparse.urljoin(url, '/'),
"openid.mode": "checkid_setup",
}
if ax_attrs:
args.update({
"openid.ns.ax": "http://openid.net/srv/ax/1.0",
"openid.ax.mode": "fetch_request",
})
ax_attrs = set(ax_attrs)
required = []
if "name" in ax_attrs:
ax_attrs -= set(["name", "firstname", "fullname", "lastname"])
required += ["firstname", "fullname", "lastname"]
args.update({
"openid.ax.type.firstname":
"http://axschema.org/namePerson/first",
"openid.ax.type.fullname":
"http://axschema.org/namePerson",
"openid.ax.type.lastname":
"http://axschema.org/namePerson/last",
})
known_attrs = {
"email": "http://axschema.org/contact/email",
"language": "http://axschema.org/pref/language",
"username": "http://axschema.org/namePerson/friendly",
}
for name in ax_attrs:
args["openid.ax.type." + name] = known_attrs[name]
required.append(name)
args["openid.ax.required"] = ",".join(required)
if oauth_scope:
args.update({
"openid.ns.oauth":
"http://specs.openid.net/extensions/oauth/1.0",
"openid.oauth.consumer": self.request.host.split(":")[0],
"openid.oauth.scope": oauth_scope,
})
return args
def _on_authentication_verified(self, callback, response):
log.debug('Verifying token {0}'.format(pprint.pformat({
'status_code': response.status_code,
'headers': response.headers,
'error': response.error,
'body': response.body,
})))
if response.error or b("is_valid:true") not in response.body:
log.warning("Invalid OpenID response: %s", response.error or
response.body)
callback(None)
return
# Make sure we got back at least an email from attribute exchange
ax_ns = None
for name in self.request.arguments.iterkeys():
if name.startswith("openid.ns.") and \
self.get_argument(name) == u"http://openid.net/srv/ax/1.0":
ax_ns = name[10:]
break
def get_ax_arg(uri):
log.debug('Getting {0}'.format(uri))
if not ax_ns: return u""
prefix = "openid." + ax_ns + ".type."
ax_name = None
for name in self.request.arguments.iterkeys():
if self.get_argument(name) == uri and name.startswith(prefix):
part = name[len(prefix):]
ax_name = "openid." + ax_ns + ".value." + part
break
if not ax_name: return u""
return self.get_argument(ax_name, u"")
email = get_ax_arg("http://axschema.org/contact/email")
name = get_ax_arg("http://axschema.org/namePerson")
first_name = get_ax_arg("http://axschema.org/namePerson/first")
last_name = get_ax_arg("http://axschema.org/namePerson/last")
username = get_ax_arg("http://axschema.org/namePerson/friendly")
locale = get_ax_arg("http://axschema.org/pref/language").lower()
user = dict()
name_parts = []
if first_name:
user["first_name"] = first_name
name_parts.append(first_name)
if last_name:
user["last_name"] = last_name
name_parts.append(last_name)
if name:
user["name"] = name
elif name_parts:
user["name"] = u" ".join(name_parts)
elif email:
user["name"] = email.split("@")[0]
if email: user["email"] = email
if locale: user["locale"] = locale
if username: user["username"] = username
user['claimed_id'] = self.request.arguments.get('openid.claimed_id')[-1]
log.debug('Final step, got claimed_id {0}'.format(user['claimed_id']))
callback(user)
class OAuthMixin(GenericAuth):
"""Abstract implementation of OAuth.
See TwitterMixin and FriendFeedMixin below for example implementations.
"""
def authorize_redirect(self, callback_uri=None, extra_params=None):
"""Redirects the user to obtain OAuth authorization for this service.
Twitter and FriendFeed both require that you register a Callback
URL with your application. You should call this method to log the
user in, and then call get_authenticated_user() in the handler
you registered as your Callback URL to complete the authorization
process.
This method sets a cookie called _oauth_request_token which is
subsequently used (and cleared) in get_authenticated_user for
security purposes.
"""
if callback_uri and getattr(self, "_OAUTH_NO_CALLBACKS", False):
raise Exception("This service does not support oauth_callback")
http = httpclient.AsyncHTTPClient()
if getattr(self, "_OAUTH_VERSION", "1.0a") == "1.0a":
http.fetch(self._oauth_request_token_url(callback_uri=callback_uri,
extra_params=extra_params),
self.async_callback(
self._on_request_token,
self._OAUTH_AUTHORIZE_URL,
callback_uri))
else:
http.fetch(self._oauth_request_token_url(), self.async_callback(
self._on_request_token, self._OAUTH_AUTHORIZE_URL, callback_uri))
def get_authenticated_user(self, callback):
"""Gets the OAuth authorized user and access token on callback.
This method should be called from the handler for your registered
OAuth Callback URL to complete the registration process. We call
callback with the authenticated user, which in addition to standard
attributes like 'name' includes the 'access_key' attribute, which
contains the OAuth access you can use to make authorized requests
to this service on behalf of the user.
"""
request_key = self.get_argument("oauth_token")
oauth_verifier = self.get_argument("oauth_verifier", None)
request_cookie = self.get_cookie("_oauth_request_token")
if not request_cookie:
log.warning("Missing OAuth request token cookie")
callback(None)
return
self.clear_cookie("_oauth_request_token")
cookie_key, cookie_secret = [base64.b64decode(i) for i in request_cookie.split("|")]
if cookie_key != request_key:
log.warning("Request token does not match cookie")
callback(None)
return
token = dict(key=cookie_key, secret=cookie_secret)
if oauth_verifier:
token["verifier"] = oauth_verifier
http = httpclient.AsyncHTTPClient()
http.fetch(self._oauth_access_token_url(token), self.async_callback(
self._on_access_token, callback))
def _oauth_request_token_url(self, callback_uri= None, extra_params=None):
consumer_token = self._oauth_consumer_token()
url = self._OAUTH_REQUEST_TOKEN_URL
args = dict(
oauth_consumer_key=consumer_token["key"],
oauth_signature_method="HMAC-SHA1",
oauth_timestamp=str(int(time.time())),
oauth_nonce=binascii.b2a_hex(uuid.uuid4().bytes),
oauth_version=getattr(self, "_OAUTH_VERSION", "1.0a"),
)
if getattr(self, "_OAUTH_VERSION", "1.0a") == "1.0a":
if callback_uri:
args["oauth_callback"] = urlparse.urljoin(
self.request.full_url(), callback_uri)
if extra_params: args.update(extra_params)
signature = _oauth10a_signature(consumer_token, "GET", url, args)
else:
signature = _oauth_signature(consumer_token, "GET", url, args)
args["oauth_signature"] = signature
return url + "?" + urllib.urlencode(args)
def _on_request_token(self, authorize_url, callback_uri, response):
if response.error:
raise Exception("Could not get request token")
request_token = _oauth_parse_response(response.body)
data = "|".join([base64.b64encode(request_token["key"]),
base64.b64encode(request_token["secret"])])
self.set_cookie("_oauth_request_token", data)
args = dict(oauth_token=request_token["key"])
if callback_uri:
args["oauth_callback"] = urlparse.urljoin(
self.request.full_url(), callback_uri)
self.redirect(authorize_url + "?" + urllib.urlencode(args))
def _oauth_access_token_url(self, request_token):
consumer_token = self._oauth_consumer_token()
url = self._OAUTH_ACCESS_TOKEN_URL
args = dict(
oauth_consumer_key=consumer_token["key"],
oauth_token=request_token["key"],
oauth_signature_method="HMAC-SHA1",
oauth_timestamp=str(int(time.time())),
oauth_nonce=binascii.b2a_hex(uuid.uuid4().bytes),
oauth_version=getattr(self, "_OAUTH_VERSION", "1.0a"),
)
if "verifier" in request_token:
args["oauth_verifier"]=request_token["verifier"]
if getattr(self, "_OAUTH_VERSION", "1.0a") == "1.0a":
signature = _oauth10a_signature(consumer_token, "GET", url, args,
request_token)
else:
signature = _oauth_signature(consumer_token, "GET", url, args,
request_token)
args["oauth_signature"] = signature
return url + "?" + urllib.urlencode(args)
def _on_access_token(self, callback, response):
if response.error:
log.warning("Could not fetch access token")
callback(None)
return
access_token = _oauth_parse_response(response.body)
user = self._oauth_get_user(access_token, self.async_callback(
self._on_oauth_get_user, access_token, callback))
def _oauth_get_user(self, access_token, callback):
raise NotImplementedError()
def _on_oauth_get_user(self, access_token, callback, user):
if not user:
callback(None)
return
user["access_token"] = access_token
callback(user)
def _oauth_request_parameters(self, url, access_token, parameters={},
method="GET"):
"""Returns the OAuth parameters as a dict for the given request.
parameters should include all POST arguments and query string arguments
that will be sent with the request.
"""
consumer_token = self._oauth_consumer_token()
base_args = dict(
oauth_consumer_key=consumer_token["key"],
oauth_token=access_token["key"],
oauth_signature_method="HMAC-SHA1",
oauth_timestamp=str(int(time.time())),
oauth_nonce=binascii.b2a_hex(uuid.uuid4().bytes),
oauth_version=getattr(self, "_OAUTH_VERSION", "1.0a"),
)
args = {}
args.update(base_args)
args.update(parameters)
if getattr(self, "_OAUTH_VERSION", "1.0a") == "1.0a":
signature = _oauth10a_signature(consumer_token, method, url, args,
access_token)
else:
signature = _oauth_signature(consumer_token, method, url, args,
access_token)
base_args["oauth_signature"] = signature
return base_args
class OAuth2Mixin(GenericAuth):
"""Abstract implementation of OAuth v 2."""
def authorize_redirect(self, redirect_uri=None, client_id=None,
client_secret=None, extra_params=None ):
"""Redirects the user to obtain OAuth authorization for this service.
Some providers require that you register a Callback
URL with your application. You should call this method to log the
user in, and then call get_authenticated_user() in the handler
you registered as your Callback URL to complete the authorization
process.
"""
args = {
"redirect_uri": redirect_uri,
"client_id": client_id
}
if extra_params: args.update(extra_params)
self.redirect(
url_concat(self._OAUTH_AUTHORIZE_URL, args))
def _oauth_request_token_url(self, redirect_uri= None, client_id = None,
client_secret=None, code=None,
extra_params=None):
url = self._OAUTH_ACCESS_TOKEN_URL
args = dict(
redirect_uri=redirect_uri,
code=code,
client_id=client_id,
client_secret=client_secret,
)
if extra_params: args.update(extra_params)
return url_concat(url, args)
class TwitterMixin(OAuthMixin):
"""Twitter OAuth authentication.
To authenticate with Twitter, register your application with
Twitter at http://twitter.com/apps. Then copy your Consumer Key and
Consumer Secret to the application settings 'twitter_consumer_key' and
'twitter_consumer_secret'. Use this Mixin on the handler for the URL
you registered as your application's Callback URL.
When your application is set up, you can use this Mixin like this
to authenticate the user with Twitter and get access to their stream::
class TwitterHandler(tornado.web.RequestHandler,
tornado.auth.TwitterMixin):
@tornado.web.asynchronous
def get(self):
if self.get_argument("oauth_token", None):
self.get_authenticated_user(self.async_callback(self._on_auth))
return
self.authorize_redirect()
def _on_auth(self, user):
if not user:
raise tornado.web.HTTPError(500, "Twitter auth failed")
# Save the user using, e.g., set_secure_cookie()
The user object returned by get_authenticated_user() includes the
attributes 'username', 'name', and all of the custom Twitter user
attributes describe at
http://apiwiki.twitter.com/Twitter-REST-API-Method%3A-users%C2%A0show
in addition to 'access_token'. You should save the access token with
the user; it is required to make requests on behalf of the user later
with twitter_request().
"""
_OAUTH_REQUEST_TOKEN_URL = "http://api.twitter.com/oauth/request_token"
_OAUTH_ACCESS_TOKEN_URL = "http://api.twitter.com/oauth/access_token"
_OAUTH_AUTHORIZE_URL = "http://api.twitter.com/oauth/authorize"
_OAUTH_AUTHENTICATE_URL = "http://api.twitter.com/oauth/authenticate"
_OAUTH_NO_CALLBACKS = False
def authenticate_redirect(self):
"""Just like authorize_redirect(), but auto-redirects if authorized.
This is generally the right interface to use if you are using
Twitter for single-sign on.
"""
http = httpclient.AsyncHTTPClient()
http.fetch(self._oauth_request_token_url(), self.async_callback(
self._on_request_token, self._OAUTH_AUTHENTICATE_URL, None))
def twitter_request(self, path, callback, access_token=None,
post_args=None, **args):
"""Fetches the given API path, e.g., "/statuses/user_timeline/btaylor"
The path should not include the format (we automatically append
".json" and parse the JSON output).
If the request is a POST, post_args should be provided. Query
string arguments should be given as keyword arguments.
All the Twitter methods are documented at
http://apiwiki.twitter.com/Twitter-API-Documentation.
Many methods require an OAuth access token which you can obtain
through authorize_redirect() and get_authenticated_user(). The
user returned through that process includes an 'access_token'
attribute that can be used to make authenticated requests via
this method. Example usage::
class MainHandler(tornado.web.RequestHandler,
tornado.auth.TwitterMixin):
@tornado.web.authenticated
@tornado.web.asynchronous
def get(self):
self.twitter_request(
"/statuses/update",
post_args={"status": "Testing Tornado Web Server"},
access_token=user["access_token"],
callback=self.async_callback(self._on_post))
def _on_post(self, new_entry):
if not new_entry:
# Call failed; perhaps missing permission?
self.authorize_redirect()
return
self.finish("Posted a message!")
"""
# Add the OAuth resource request signature if we have credentials
url = "http://api.twitter.com/1" + path + ".json"
if access_token:
all_args = {}
all_args.update(args)
all_args.update(post_args or {})
consumer_token = self._oauth_consumer_token()
method = "POST" if post_args is not None else "GET"
oauth = self._oauth_request_parameters(
url, access_token, all_args, method=method)
args.update(oauth)
if args: url += "?" + urllib.urlencode(args)
callback = self.async_callback(self._on_twitter_request, callback)
http = httpclient.AsyncHTTPClient()
if post_args is not None:
http.fetch(url, method="POST", body=urllib.urlencode(post_args),
callback=callback)
else:
http.fetch(url, callback=callback)
def _on_twitter_request(self, callback, response):
if response.error:
log.warning("Error response %s fetching %s", response.error,
response.request.url)
callback(None)
return
callback(escape.json_decode(response.body))
def _oauth_consumer_token(self):
self.require_setting("twitter_consumer_key", "Twitter OAuth")
self.require_setting("twitter_consumer_secret", "Twitter OAuth")
return dict(
key=self.settings["twitter_consumer_key"],
secret=self.settings["twitter_consumer_secret"])
def _oauth_get_user(self, access_token, callback):
callback = self.async_callback(self._parse_user_response, callback)
self.twitter_request(
"/users/show/" + access_token["screen_name"],
access_token=access_token, callback=callback)
def _parse_user_response(self, callback, user):
if user:
user["username"] = user["screen_name"]
callback(user)
class FriendFeedMixin(OAuthMixin):
"""FriendFeed OAuth authentication.
To authenticate with FriendFeed, register your application with
FriendFeed at http://friendfeed.com/api/applications. Then
copy your Consumer Key and Consumer Secret to the application settings
'friendfeed_consumer_key' and 'friendfeed_consumer_secret'. Use
this Mixin on the handler for the URL you registered as your
application's Callback URL.
When your application is set up, you can use this Mixin like this
to authenticate the user with FriendFeed and get access to their feed::
class FriendFeedHandler(tornado.web.RequestHandler,
tornado.auth.FriendFeedMixin):
@tornado.web.asynchronous
def get(self):
if self.get_argument("oauth_token", None):
self.get_authenticated_user(self.async_callback(self._on_auth))
return
self.authorize_redirect()
def _on_auth(self, user):
if not user:
raise tornado.web.HTTPError(500, "FriendFeed auth failed")
# Save the user using, e.g., set_secure_cookie()
The user object returned by get_authenticated_user() includes the
attributes 'username', 'name', and 'description' in addition to
'access_token'. You should save the access token with the user;
it is required to make requests on behalf of the user later with
friendfeed_request().
"""
_OAUTH_VERSION = "1.0"
_OAUTH_REQUEST_TOKEN_URL = "https://friendfeed.com/account/oauth/request_token"
_OAUTH_ACCESS_TOKEN_URL = "https://friendfeed.com/account/oauth/access_token"
_OAUTH_AUTHORIZE_URL = "https://friendfeed.com/account/oauth/authorize"
_OAUTH_NO_CALLBACKS = True
_OAUTH_VERSION = "1.0"
def friendfeed_request(self, path, callback, access_token=None,
post_args=None, **args):
"""Fetches the given relative API path, e.g., "/bret/friends"
If the request is a POST, post_args should be provided. Query
string arguments should be given as keyword arguments.
All the FriendFeed methods are documented at
http://friendfeed.com/api/documentation.
Many methods require an OAuth access token which you can obtain
through authorize_redirect() and get_authenticated_user(). The
user returned through that process includes an 'access_token'
attribute that can be used to make authenticated requests via
this method. Example usage::
class MainHandler(tornado.web.RequestHandler,
tornado.auth.FriendFeedMixin):
@tornado.web.authenticated
@tornado.web.asynchronous
def get(self):
self.friendfeed_request(
"/entry",
post_args={"body": "Testing Tornado Web Server"},
access_token=self.current_user["access_token"],
callback=self.async_callback(self._on_post))
def _on_post(self, new_entry):
if not new_entry:
# Call failed; perhaps missing permission?
self.authorize_redirect()
return
self.finish("Posted a message!")
"""
# Add the OAuth resource request signature if we have credentials
url = "http://friendfeed-api.com/v2" + path
if access_token:
all_args = {}
all_args.update(args)
all_args.update(post_args or {})
consumer_token = self._oauth_consumer_token()
method = "POST" if post_args is not None else "GET"
oauth = self._oauth_request_parameters(
url, access_token, all_args, method=method)
args.update(oauth)
if args: url += "?" + urllib.urlencode(args)
callback = self.async_callback(self._on_friendfeed_request, callback)
http = httpclient.AsyncHTTPClient()
if post_args is not None:
http.fetch(url, method="POST", body=urllib.urlencode(post_args),
callback=callback)
else:
http.fetch(url, callback=callback)
def _on_friendfeed_request(self, callback, response):
if response.error:
log.warning("Error response %s fetching %s", response.error,
response.request.url)
callback(None)
return
callback(escape.json_decode(response.body))
def _oauth_consumer_token(self):
self.require_setting("friendfeed_consumer_key", "FriendFeed OAuth")
self.require_setting("friendfeed_consumer_secret", "FriendFeed OAuth")
return dict(
key=self.settings["friendfeed_consumer_key"],
secret=self.settings["friendfeed_consumer_secret"])
def _oauth_get_user(self, access_token, callback):
callback = self.async_callback(self._parse_user_response, callback)
self.friendfeed_request(
"/feedinfo/" + access_token["username"],
include="id,name,description", access_token=access_token,
callback=callback)
def _parse_user_response(self, callback, user):
if user:
user["username"] = user["id"]
callback(user)
class GoogleMixin(OpenIdMixin, OAuthMixin):
"""Google Open ID / OAuth authentication.
No application registration is necessary to use Google for authentication
or to access Google resources on behalf of a user. To authenticate with
Google, redirect with authenticate_redirect(). On return, parse the
response with get_authenticated_user(). We send a dict containing the
values for the user, including 'email', 'name', and 'locale'.
Example usage::
class GoogleHandler(tornado.web.RequestHandler, tornado.auth.GoogleMixin):
@tornado.web.asynchronous
def get(self):
if self.get_argument("openid.mode", None):
self.get_authenticated_user(self.async_callback(self._on_auth))
return
self.authenticate_redirect()
def _on_auth(self, user):
if not user:
raise tornado.web.HTTPError(500, "Google auth failed")
# Save the user with, e.g., set_secure_cookie()
"""
_OPENID_ENDPOINT = "https://www.google.com/accounts/o8/ud"
_OAUTH_ACCESS_TOKEN_URL = "https://www.google.com/accounts/OAuthGetAccessToken"
def authorize_redirect(self, oauth_scope, callback_uri=None,
ax_attrs=["name","email","language","username"]):
"""Authenticates and authorizes for the given Google resource.
Some of the available resources are:
* Gmail Contacts - http://www.google.com/m8/feeds/
* Calendar - http://www.google.com/calendar/feeds/
* Finance - http://finance.google.com/finance/feeds/
You can authorize multiple resources by separating the resource
URLs with a space.
"""
callback_uri = callback_uri or self.request.uri
args = self._openid_args(callback_uri, ax_attrs=ax_attrs,
oauth_scope=oauth_scope)
self.redirect(self._OPENID_ENDPOINT + "?" + urllib.urlencode(args))
def get_authenticated_user(self, callback):
"""Fetches the authenticated user data upon redirect."""
# Look to see if we are doing combined OpenID/OAuth
oauth_ns = ""
for name, values in self.request.arguments.iteritems():
if name.startswith("openid.ns.") and \
values[-1] == u"http://specs.openid.net/extensions/oauth/1.0":
oauth_ns = name[10:]
break
token = self.get_argument("openid." + oauth_ns + ".request_token", "")
if token:
http = httpclient.AsyncHTTPClient()
token = dict(key=token, secret="")
http.fetch(self._oauth_access_token_url(token),
self.async_callback(self._on_access_token, callback))
else:
OpenIdMixin.get_authenticated_user(self, callback)
def _oauth_consumer_token(self):
self.require_setting("google_consumer_key", "Google OAuth")
self.require_setting("google_consumer_secret", "Google OAuth")
return dict(
key=self.settings["google_consumer_key"],
secret=self.settings["google_consumer_secret"])
def _oauth_get_user(self, access_token, callback):
OpenIdMixin.get_authenticated_user(self, callback)
class FacebookMixin(GenericAuth):
"""Facebook Connect authentication.
New applications should consider using `FacebookGraphMixin` below instead
of this class.
To authenticate with Facebook, register your application with
Facebook at http://www.facebook.com/developers/apps.php. Then
copy your API Key and Application Secret to the application settings
'facebook_api_key' and 'facebook_secret'.
When your application is set up, you can use this Mixin like this
to authenticate the user with Facebook::
class FacebookHandler(tornado.web.RequestHandler,
tornado.auth.FacebookMixin):
@tornado.web.asynchronous
def get(self):
if self.get_argument("session", None):
self.get_authenticated_user(self.async_callback(self._on_auth))
return
self.authenticate_redirect()
def _on_auth(self, user):
if not user:
raise tornado.web.HTTPError(500, "Facebook auth failed")
# Save the user using, e.g., set_secure_cookie()
The user object returned by get_authenticated_user() includes the
attributes 'facebook_uid' and 'name' in addition to session attributes
like 'session_key'. You should save the session key with the user; it is
required to make requests on behalf of the user later with
facebook_request().
"""
def authenticate_redirect(self, callback_uri=None, cancel_uri=None,
extended_permissions=None):
"""Authenticates/installs this app for the current user."""
self.require_setting("facebook_api_key", "Facebook Connect")
callback_uri = callback_uri or self.request.uri
args = {
"api_key": self.settings["facebook_api_key"],
"v": "1.0",
"fbconnect": "true",
"display": "page",
"next": urlparse.urljoin(self.request.full_url(), callback_uri),
"return_session": "true",
}
if cancel_uri:
args["cancel_url"] = urlparse.urljoin(
self.request.full_url(), cancel_uri)
if extended_permissions:
if isinstance(extended_permissions, (unicode, bytes_type)):
extended_permissions = [extended_permissions]
args["req_perms"] = ",".join(extended_permissions)
self.redirect("http://www.facebook.com/login.php?" +
urllib.urlencode(args))
def authorize_redirect(self, extended_permissions, callback_uri=None,
cancel_uri=None):
"""Redirects to an authorization request for the given FB resource.
The available resource names are listed at
http://wiki.developers.facebook.com/index.php/Extended_permission.
The most common resource types include:
* publish_stream
* read_stream
* email
* sms
extended_permissions can be a single permission name or a list of
names. To get the session secret and session key, call
get_authenticated_user() just as you would with
authenticate_redirect().
"""
self.authenticate_redirect(callback_uri, cancel_uri,
extended_permissions)
def get_authenticated_user(self, callback):
"""Fetches the authenticated Facebook user.
The authenticated user includes the special Facebook attributes
'session_key' and 'facebook_uid' in addition to the standard
user attributes like 'name'.
"""
self.require_setting("facebook_api_key", "Facebook Connect")
session = escape.json_decode(self.get_argument("session"))
self.facebook_request(
method="facebook.users.getInfo",
callback=self.async_callback(
self._on_get_user_info, callback, session),
session_key=session["session_key"],
uids=session["uid"],
fields="uid,first_name,last_name,name,locale,pic_square," \
"profile_url,username")
def facebook_request(self, method, callback, **args):
"""Makes a Facebook API REST request.
We automatically include the Facebook API key and signature, but
it is the callers responsibility to include 'session_key' and any
other required arguments to the method.
The available Facebook methods are documented here:
http://wiki.developers.facebook.com/index.php/API
Here is an example for the stream.get() method::
class MainHandler(tornado.web.RequestHandler,
tornado.auth.FacebookMixin):
@tornado.web.authenticated
@tornado.web.asynchronous
def get(self):
self.facebook_request(
method="stream.get",
callback=self.async_callback(self._on_stream),
session_key=self.current_user["session_key"])
def _on_stream(self, stream):
if stream is None:
# Not authorized to read the stream yet?
self.redirect(self.authorize_redirect("read_stream"))
return
self.render("stream.html", stream=stream)
"""
self.require_setting("facebook_api_key", "Facebook Connect")
self.require_setting("facebook_secret", "Facebook Connect")
if not method.startswith("facebook."):
method = "facebook." + method
args["api_key"] = self.settings["facebook_api_key"]
args["v"] = "1.0"
args["method"] = method
args["call_id"] = str(long(time.time() * 1e6))
args["format"] = "json"
args["sig"] = self._signature(args)
url = "http://api.facebook.com/restserver.php?" + \
urllib.urlencode(args)
http = httpclient.AsyncHTTPClient()
http.fetch(url, callback=self.async_callback(
self._parse_response, callback))
def _on_get_user_info(self, callback, session, users):
if users is None:
callback(None)
return
callback({
"name": users[0]["name"],
"first_name": users[0]["first_name"],
"last_name": users[0]["last_name"],
"uid": users[0]["uid"],
"locale": users[0]["locale"],
"pic_square": users[0]["pic_square"],
"profile_url": users[0]["profile_url"],
"username": users[0].get("username"),
"session_key": session["session_key"],
"session_expires": session.get("expires"),
})
def _parse_response(self, callback, response):
if response.error:
log.warning("HTTP error from Facebook: %s", response.error)
callback(None)
return
try:
json = escape.json_decode(response.body)
except Exception:
log.warning("Invalid JSON from Facebook: %r", response.body)
callback(None)
return
if isinstance(json, dict) and json.get("error_code"):
log.warning("Facebook error: %d: %r", json["error_code"],
json.get("error_msg"))
callback(None)
return
callback(json)
def _signature(self, args):
parts = ["%s=%s" % (n, args[n]) for n in sorted(args.keys())]
body = "".join(parts) + self.settings["facebook_secret"]
if isinstance(body, unicode): body = body.encode("utf-8")
return hashlib.md5(body).hexdigest()
class FacebookGraphMixin(OAuth2Mixin):
"""Facebook authentication using the new Graph API and OAuth2."""
_OAUTH_ACCESS_TOKEN_URL = "https://graph.facebook.com/oauth/access_token?"
_OAUTH_AUTHORIZE_URL = "https://graph.facebook.com/oauth/authorize?"
_OAUTH_NO_CALLBACKS = False
def get_authenticated_user(self, redirect_uri, client_id, client_secret,
code, callback, fields=None):
"""Handles the login for the Facebook user, returning a user object.
Example usage::
class FacebookGraphLoginHandler(LoginHandler, tornado.auth.FacebookGraphMixin):
@tornado.web.asynchronous
def get(self):
if self.get_argument("code", False):
self.get_authenticated_user(
redirect_uri='/auth/facebookgraph/',
client_id=self.settings["facebook_api_key"],
client_secret=self.settings["facebook_secret"],
code=self.get_argument("code"),
callback=self.async_callback(
self._on_login))
return
self.authorize_redirect(redirect_uri='/auth/facebookgraph/',
client_id=self.settings["facebook_api_key"],
extra_params={"scope": "read_stream,offline_access"})
def _on_login(self, user):
log.error(user)
self.finish()
"""
http = httpclient.AsyncHTTPClient()
args = {
"redirect_uri": redirect_uri,
"code": code,
"client_id": client_id,
"client_secret": client_secret,
}
#fields = set(['id', 'name', 'first_name', 'last_name',
# 'locale', 'picture', 'link'])
#if extra_fields: fields.update(extra_fields)
if fields:
fields = fields.split(',')
http.fetch(self._oauth_request_token_url(**args),
self.async_callback(self._on_access_token, redirect_uri, client_id,
client_secret, callback, fields))
def _on_access_token(self, redirect_uri, client_id, client_secret,
callback, fields, response):
if response.error:
log.warning('Facebook auth error: %s' % str(response))
callback(None)
return
args = escape.parse_qs_bytes(escape.native_str(response.body))
session = {
"access_token": args["access_token"][-1],
"expires": args.get("expires")
}
if fields is not None:
self.facebook_request(
path="/me",
callback=self.async_callback(
self._on_get_user_info, callback, session, fields),
access_token=session["access_token"],
fields=",".join(fields)
)
else:
self.facebook_request(
path="/me",
callback=self.async_callback(
self._on_get_user_info, callback, session, fields),
access_token=session["access_token"],
)
def _on_get_user_info(self, callback, session, fields, user):
if user is None:
callback(None)
return
fieldmap = {}
if fields is None:
fieldmap.update(user)
else:
for field in fields:
fieldmap[field] = user.get(field)
fieldmap.update({"access_token": session["access_token"], "session_expires": session.get("expires")})
callback(fieldmap)
def facebook_request(self, path, callback, access_token=None,
post_args=None, **args):
"""Fetches the given relative API path, e.g., "/btaylor/picture"
If the request is a POST, post_args should be provided. Query
string arguments should be given as keyword arguments.
An introduction to the Facebook Graph API can be found at
http://developers.facebook.com/docs/api
Many methods require an OAuth access token which you can obtain
through authorize_redirect() and get_authenticated_user(). The
user returned through that process includes an 'access_token'
attribute that can be used to make authenticated requests via
this method. Example usage::
class MainHandler(tornado.web.RequestHandler,
tornado.auth.FacebookGraphMixin):
@tornado.web.authenticated
@tornado.web.asynchronous
def get(self):
self.facebook_request(
"/me/feed",
post_args={"message": "I am posting from my Tornado application!"},
access_token=self.current_user["access_token"],
callback=self.async_callback(self._on_post))
def _on_post(self, new_entry):
if not new_entry:
# Call failed; perhaps missing permission?
self.authorize_redirect()
return
self.finish("Posted a message!")
"""
url = "https://graph.facebook.com" + path
all_args = {}
if access_token:
all_args["access_token"] = access_token
all_args.update(args)
all_args.update(post_args or {})
if all_args: url += "?" + urllib.urlencode(all_args)
callback = self.async_callback(self._on_facebook_request, callback)
http = httpclient.AsyncHTTPClient()
if post_args is not None:
http.fetch(url, method="POST", body=urllib.urlencode(post_args),
callback=callback)
else:
http.fetch(url, callback=callback)
def _on_facebook_request(self, callback, response):
if response.error:
log.warning("Error response %s fetching %s", response.error,
response.request.url)
callback(None)
return
callback(escape.json_decode(response.body))
def _oauth_signature(consumer_token, method, url, parameters={}, token=None):
"""Calculates the HMAC-SHA1 OAuth signature for the given request.
See http://oauth.net/core/1.0/#signing_process
"""
parts = urlparse.urlparse(url)
scheme, netloc, path = parts[:3]
normalized_url = scheme.lower() + "://" + netloc.lower() + path
base_elems = []
base_elems.append(method.upper())
base_elems.append(normalized_url)
base_elems.append("&".join("%s=%s" % (k, _oauth_escape(str(v)))
for k, v in sorted(parameters.items())))
base_string = "&".join(_oauth_escape(e) for e in base_elems)
key_elems = [consumer_token["secret"]]
key_elems.append(token["secret"] if token else "")
key = "&".join(key_elems)
hash = hmac.new(key, base_string, hashlib.sha1)
return binascii.b2a_base64(hash.digest())[:-1]
def _oauth10a_signature(consumer_token, method, url, parameters={}, token=None):
"""Calculates the HMAC-SHA1 OAuth 1.0a signature for the given request.
See http://oauth.net/core/1.0a/#signing_process
"""
parts = urlparse.urlparse(url)
scheme, netloc, path = parts[:3]
normalized_url = scheme.lower() + "://" + netloc.lower() + path
base_elems = []
base_elems.append(method.upper())
base_elems.append(normalized_url)
base_elems.append("&".join("%s=%s" % (k, _oauth_escape(str(v)))
for k, v in sorted(parameters.items())))
base_string = "&".join(_oauth_escape(e) for e in base_elems)
key_elems = [urllib.quote(consumer_token["secret"], safe='~')]
key_elems.append(urllib.quote(token["secret"], safe='~') if token else "")
key = "&".join(key_elems)
hash = hmac.new(key, base_string, hashlib.sha1)
return binascii.b2a_base64(hash.digest())[:-1]
def _oauth_escape(val):
if isinstance(val, unicode):
val = val.encode("utf-8")
return urllib.quote(val, safe="~")
def _oauth_parse_response(body):
p = cgi.parse_qs(body, keep_blank_values=False)
token = dict(key=p["oauth_token"][0], secret=p["oauth_token_secret"][0])
# Add the extra parameters the Provider included to the token
special = ("oauth_token", "oauth_token_secret")
token.update((k, p[k][0]) for k in p if k not in special)
return token
| mit | -3,886,845,082,925,986,300 | 39.751506 | 109 | 0.595717 | false |
renebentes/JoomlaPack | lib/extensions/plugin.py | 1 | 2214 | # coding: utf-8
import sublime
import os
import re
st_version = int(sublime.version())
if st_version > 3000:
from JoomlaPack.lib import *
from JoomlaPack.lib.extensions.base import Base
from JoomlaPack.lib.inflector import *
else:
from lib import *
from lib.extensions.base import Base
from lib.inflector import *
class Plugin(Base):
'''
Implements the Joomla's Plugin extension.
'''
def __init__(self, content=None, inflector=English):
Base.__init__(self, inflector)
self.prefix = 'plg_'
self.template_path = 'plugin'
if content is not None:
self.group, self.name = self.inflector.humanize(content,
prefix='plg_') \
.split(' ')
self.fullname = self.inflector.underscore(
self.inflector.variablize(self.prefix +
self.group + ' ' + self.name))
else:
self.fullname = self.inflector.underscore(
Project().get_project_name())
self.group, self.name = self.inflector.humanize(self.fullname,
prefix='plg_') \
.split(' ')
def rename(self):
for root, dirs, files in os.walk(self.path):
for filename in files:
newname = re.sub('{{name}}', self.name,
re.sub('{{group}}', self.group,
re.sub('{{locale}}',
Helper().language(), filename)))
if newname != filename:
os.rename(os.path.join(root, filename),
os.path.join(root, newname))
for root, dirs, files in os.walk(self.path):
for folder in dirs:
newname = folder.replace(
'{{locale}}', Helper().language())
if newname != folder:
os.rename(os.path.join(root, folder),
os.path.join(root, newname))
def __str__(self):
return "JoomlaPack: Joomla Plugin"
| mit | -5,881,219,763,017,948,000 | 33.061538 | 79 | 0.484192 | false |
HenriquePaulo/projeto | backend/test/editar_produto_tests/editar_produto_new_tests.py | 1 | 1576 | # -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
from base import GAETestCase
from datetime import datetime, date
from decimal import Decimal
from editar_produto_app.editar_produto_model import Editar_produto
from routes.editar_produtos.new import index, save
from tekton.gae.middleware.redirect import RedirectResponse
class IndexTests(GAETestCase):
def test_success(self):
template_response = index()
self.assert_can_render(template_response)
class SaveTests(GAETestCase):
def test_success(self):
self.assertIsNone(Editar_produto.query().get())
redirect_response = save(titulo='titulo_string', preco='preco_string', descricao='descricao_string', imagem='imagem_string', nome='nome_string')
self.assertIsInstance(redirect_response, RedirectResponse)
saved_editar_produto = Editar_produto.query().get()
self.assertIsNotNone(saved_editar_produto)
self.assertEquals('titulo_string', saved_editar_produto.titulo)
self.assertEquals('preco_string', saved_editar_produto.preco)
self.assertEquals('descricao_string', saved_editar_produto.descricao)
self.assertEquals('imagem_string', saved_editar_produto.imagem)
self.assertEquals('nome_string', saved_editar_produto.nome)
def test_error(self):
template_response = save()
errors = template_response.context['errors']
self.assertSetEqual(set(['titulo', 'preco', 'descricao', 'imagem', 'nome']), set(errors.keys()))
self.assert_can_render(template_response)
| mit | 3,332,256,648,107,032,600 | 45.352941 | 152 | 0.719543 | false |
taohungyang/cloud-custodian | c7n/resources/asg.py | 1 | 60737 | # Copyright 2015-2017 Capital One Services, LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function, unicode_literals
from botocore.client import ClientError
from collections import Counter
from concurrent.futures import as_completed
from datetime import datetime, timedelta
from dateutil import zoneinfo
from dateutil.parser import parse
import logging
import itertools
import time
from c7n.actions import Action, ActionRegistry
from c7n.exceptions import PolicyValidationError
from c7n.filters import (
FilterRegistry, ValueFilter, AgeFilter, Filter,
OPERATORS)
from c7n.filters.offhours import OffHour, OnHour, Time
import c7n.filters.vpc as net_filters
from c7n.manager import resources
from c7n import query
from c7n.tags import TagActionFilter, DEFAULT_TAG, TagCountFilter, TagTrim
from c7n.utils import (
local_session, type_schema, chunks, get_retry, worker)
from .ec2 import deserialize_user_data
log = logging.getLogger('custodian.asg')
filters = FilterRegistry('asg.filters')
actions = ActionRegistry('asg.actions')
filters.register('offhour', OffHour)
filters.register('onhour', OnHour)
filters.register('tag-count', TagCountFilter)
filters.register('marked-for-op', TagActionFilter)
@resources.register('asg')
class ASG(query.QueryResourceManager):
class resource_type(object):
service = 'autoscaling'
type = 'autoScalingGroup'
id = name = 'AutoScalingGroupName'
date = 'CreatedTime'
dimension = 'AutoScalingGroupName'
enum_spec = ('describe_auto_scaling_groups', 'AutoScalingGroups', None)
filter_name = 'AutoScalingGroupNames'
filter_type = 'list'
config_type = 'AWS::AutoScaling::AutoScalingGroup'
default_report_fields = (
'AutoScalingGroupName',
'CreatedTime',
'LaunchConfigurationName',
'count:Instances',
'DesiredCapacity',
'HealthCheckType',
'list:LoadBalancerNames',
)
filter_registry = filters
action_registry = actions
retry = staticmethod(get_retry(('ResourceInUse', 'Throttling',)))
class LaunchConfigFilterBase(object):
"""Mixin base class for querying asg launch configs."""
permissions = ("autoscaling:DescribeLaunchConfigurations",)
configs = None
def initialize(self, asgs):
"""Get launch configs for the set of asgs"""
config_names = set()
skip = []
for a in asgs:
# Per https://github.com/capitalone/cloud-custodian/issues/143
if 'LaunchConfigurationName' not in a:
skip.append(a)
continue
config_names.add(a['LaunchConfigurationName'])
for a in skip:
asgs.remove(a)
self.configs = {}
self.log.debug(
"Querying launch configs for filter %s",
self.__class__.__name__)
lc_resources = self.manager.get_resource_manager('launch-config')
if len(config_names) < 5:
configs = lc_resources.get_resources(list(config_names))
else:
configs = lc_resources.resources()
self.configs = {
cfg['LaunchConfigurationName']: cfg for cfg in configs
if cfg['LaunchConfigurationName'] in config_names}
@filters.register('security-group')
class SecurityGroupFilter(
net_filters.SecurityGroupFilter, LaunchConfigFilterBase):
RelatedIdsExpression = ""
def get_permissions(self):
return ("autoscaling:DescribeLaunchConfigurations",
"ec2:DescribeSecurityGroups",)
def get_related_ids(self, asgs):
group_ids = set()
for asg in asgs:
cfg = self.configs.get(asg['LaunchConfigurationName'])
group_ids.update(cfg.get('SecurityGroups', ()))
return group_ids
def process(self, asgs, event=None):
self.initialize(asgs)
return super(SecurityGroupFilter, self).process(asgs, event)
@filters.register('subnet')
class SubnetFilter(net_filters.SubnetFilter):
RelatedIdsExpression = ""
def get_related_ids(self, asgs):
subnet_ids = set()
for asg in asgs:
subnet_ids.update(
[sid.strip() for sid in asg.get('VPCZoneIdentifier', '').split(',')])
return subnet_ids
filters.register('network-location', net_filters.NetworkLocation)
@filters.register('launch-config')
class LaunchConfigFilter(ValueFilter, LaunchConfigFilterBase):
"""Filter asg by launch config attributes.
:example:
.. code-block:: yaml
policies:
- name: launch-configs-with-public-address
resource: asg
filters:
- type: launch-config
key: AssociatePublicIpAddress
value: true
"""
schema = type_schema(
'launch-config', rinherit=ValueFilter.schema)
permissions = ("autoscaling:DescribeLaunchConfigurations",)
def process(self, asgs, event=None):
self.initialize(asgs)
return super(LaunchConfigFilter, self).process(asgs, event)
def __call__(self, asg):
# Active launch configs can be deleted..
cfg = self.configs.get(asg['LaunchConfigurationName'])
return self.match(cfg)
class ConfigValidFilter(Filter, LaunchConfigFilterBase):
def get_permissions(self):
return list(itertools.chain([
self.manager.get_resource_manager(m).get_permissions()
for m in ('subnet', 'security-group', 'key-pair', 'elb',
'app-elb-target-group', 'ebs-snapshot', 'ami')]))
def validate(self):
if self.manager.data.get('mode'):
raise PolicyValidationError(
"invalid-config makes too many queries to be run in lambda")
return self
def initialize(self, asgs):
super(ConfigValidFilter, self).initialize(asgs)
# pylint: disable=attribute-defined-outside-init
self.subnets = self.get_subnets()
self.security_groups = self.get_security_groups()
self.key_pairs = self.get_key_pairs()
self.elbs = self.get_elbs()
self.appelb_target_groups = self.get_appelb_target_groups()
self.snapshots = self.get_snapshots()
self.images, self.image_snaps = self.get_images()
def get_subnets(self):
manager = self.manager.get_resource_manager('subnet')
return set([s['SubnetId'] for s in manager.resources()])
def get_security_groups(self):
manager = self.manager.get_resource_manager('security-group')
return set([s['GroupId'] for s in manager.resources()])
def get_key_pairs(self):
manager = self.manager.get_resource_manager('key-pair')
return set([k['KeyName'] for k in manager.resources()])
def get_elbs(self):
manager = self.manager.get_resource_manager('elb')
return set([e['LoadBalancerName'] for e in manager.resources()])
def get_appelb_target_groups(self):
manager = self.manager.get_resource_manager('app-elb-target-group')
return set([a['TargetGroupArn'] for a in manager.resources()])
def get_images(self):
manager = self.manager.get_resource_manager('ami')
images = set()
image_snaps = set()
image_ids = list({lc['ImageId'] for lc in self.configs.values()})
# Pull account images, we should be able to utilize cached values,
# drawn down the image population to just images not in the account.
account_images = [
i for i in manager.resources() if i['ImageId'] in image_ids]
account_image_ids = {i['ImageId'] for i in account_images}
image_ids = [image_id for image_id in image_ids
if image_id not in account_image_ids]
# To pull third party images, we explicitly use a describe
# source without any cache.
#
# Can't use a config source since it won't have state for
# third party ami, we auto propagate source normally, so we
# explicitly pull a describe source. Can't use a cache either
# as their not in the account.
#
while image_ids:
try:
amis = manager.get_source('describe').get_resources(
image_ids, cache=False)
account_images.extend(amis)
break
except ClientError as e:
msg = e.response['Error']['Message']
if e.response['Error']['Code'] != 'InvalidAMIID.NotFound':
raise
for n in msg[msg.find('[') + 1: msg.find(']')].split(','):
image_ids.remove(n.strip())
for a in account_images:
images.add(a['ImageId'])
# Capture any snapshots, images strongly reference their
# snapshots, and some of these will be third party in the
# case of a third party image.
for bd in a.get('BlockDeviceMappings', ()):
if 'Ebs' not in bd or 'SnapshotId' not in bd['Ebs']:
continue
image_snaps.add(bd['Ebs']['SnapshotId'].strip())
return images, image_snaps
def get_snapshots(self):
manager = self.manager.get_resource_manager('ebs-snapshot')
return set([s['SnapshotId'] for s in manager.resources()])
def process(self, asgs, event=None):
self.initialize(asgs)
return super(ConfigValidFilter, self).process(asgs, event)
def get_asg_errors(self, asg):
errors = []
subnets = asg.get('VPCZoneIdentifier', '').split(',')
for subnet in subnets:
subnet = subnet.strip()
if subnet not in self.subnets:
errors.append(('invalid-subnet', subnet))
for elb in asg['LoadBalancerNames']:
elb = elb.strip()
if elb not in self.elbs:
errors.append(('invalid-elb', elb))
for appelb_target in asg.get('TargetGroupARNs', []):
appelb_target = appelb_target.strip()
if appelb_target not in self.appelb_target_groups:
errors.append(('invalid-appelb-target-group', appelb_target))
cfg_id = asg.get(
'LaunchConfigurationName', asg['AutoScalingGroupName'])
cfg_id = cfg_id.strip()
cfg = self.configs.get(cfg_id)
if cfg is None:
errors.append(('invalid-config', cfg_id))
self.log.debug(
"asg:%s no launch config found" % asg['AutoScalingGroupName'])
asg['Invalid'] = errors
return True
for sg in cfg['SecurityGroups']:
sg = sg.strip()
if sg not in self.security_groups:
errors.append(('invalid-security-group', sg))
if cfg['KeyName'] and cfg['KeyName'].strip() not in self.key_pairs:
errors.append(('invalid-key-pair', cfg['KeyName']))
if cfg['ImageId'].strip() not in self.images:
errors.append(('invalid-image', cfg['ImageId']))
for bd in cfg['BlockDeviceMappings']:
if 'Ebs' not in bd or 'SnapshotId' not in bd['Ebs']:
continue
snapshot_id = bd['Ebs']['SnapshotId'].strip()
if snapshot_id in self.image_snaps:
continue
if snapshot_id not in self.snapshots:
errors.append(('invalid-snapshot', bd['Ebs']['SnapshotId']))
return errors
@filters.register('valid')
class ValidConfigFilter(ConfigValidFilter):
"""Filters autoscale groups to find those that are structurally valid.
This operates as the inverse of the invalid filter for multi-step
workflows.
See details on the invalid filter for a list of checks made.
:example:
.. code-base: yaml
policies:
- name: asg-valid-config
resource: asg
filters:
- valid
"""
schema = type_schema('valid')
def __call__(self, asg):
errors = self.get_asg_errors(asg)
return not bool(errors)
@filters.register('invalid')
class InvalidConfigFilter(ConfigValidFilter):
"""Filter autoscale groups to find those that are structurally invalid.
Structurally invalid means that the auto scale group will not be able
to launch an instance succesfully as the configuration has
- invalid subnets
- invalid security groups
- invalid key pair name
- invalid launch config volume snapshots
- invalid amis
- invalid health check elb (slower)
Internally this tries to reuse other resource managers for better
cache utilization.
:example:
.. code-base: yaml
policies:
- name: asg-invalid-config
resource: asg
filters:
- invalid
"""
schema = type_schema('invalid')
def __call__(self, asg):
errors = self.get_asg_errors(asg)
if errors:
asg['Invalid'] = errors
return True
@filters.register('not-encrypted')
class NotEncryptedFilter(Filter, LaunchConfigFilterBase):
"""Check if an ASG is configured to have unencrypted volumes.
Checks both the ami snapshots and the launch configuration.
:example:
.. code-block:: yaml
policies:
- name: asg-unencrypted
resource: asg
filters:
- type: not-encrypted
exclude_image: true
"""
schema = type_schema('not-encrypted', exclude_image={'type': 'boolean'})
permissions = (
'ec2:DescribeImages',
'ec2:DescribeSnapshots',
'autoscaling:DescribeLaunchConfigurations')
images = unencrypted_configs = unencrypted_images = None
# TODO: resource-manager, notfound err mgr
def process(self, asgs, event=None):
self.initialize(asgs)
return super(NotEncryptedFilter, self).process(asgs, event)
def __call__(self, asg):
cfg = self.configs.get(asg['LaunchConfigurationName'])
if not cfg:
self.log.warning(
"ASG %s instances: %d has missing config: %s",
asg['AutoScalingGroupName'], len(asg['Instances']),
asg['LaunchConfigurationName'])
return False
unencrypted = []
if (not self.data.get('exclude_image') and cfg['ImageId'] in self.unencrypted_images):
unencrypted.append('Image')
if cfg['LaunchConfigurationName'] in self.unencrypted_configs:
unencrypted.append('LaunchConfig')
if unencrypted:
asg['Unencrypted'] = unencrypted
return bool(unencrypted)
def initialize(self, asgs):
super(NotEncryptedFilter, self).initialize(asgs)
ec2 = local_session(self.manager.session_factory).client('ec2')
self.unencrypted_images = self.get_unencrypted_images(ec2)
self.unencrypted_configs = self.get_unencrypted_configs(ec2)
def _fetch_images(self, ec2, image_ids):
while True:
try:
return ec2.describe_images(ImageIds=list(image_ids))
except ClientError as e:
if e.response['Error']['Code'] == 'InvalidAMIID.NotFound':
msg = e.response['Error']['Message']
e_ami_ids = [
e_ami_id.strip() for e_ami_id
in msg[msg.find("'[") + 2:msg.rfind("]'")].split(',')]
self.log.warning(
"asg:not-encrypted filter image not found %s",
e_ami_ids)
for e_ami_id in e_ami_ids:
image_ids.remove(e_ami_id)
continue
raise
def get_unencrypted_images(self, ec2):
"""retrieve images which have unencrypted snapshots referenced."""
image_ids = set()
for cfg in self.configs.values():
image_ids.add(cfg['ImageId'])
self.log.debug("querying %d images", len(image_ids))
results = self._fetch_images(ec2, image_ids)
self.images = {i['ImageId']: i for i in results['Images']}
unencrypted_images = set()
for i in self.images.values():
for bd in i['BlockDeviceMappings']:
if 'Ebs' in bd and not bd['Ebs'].get('Encrypted'):
unencrypted_images.add(i['ImageId'])
break
return unencrypted_images
def get_unencrypted_configs(self, ec2):
"""retrieve configs that have unencrypted ebs voluems referenced."""
unencrypted_configs = set()
snaps = {}
for cid, c in self.configs.items():
image = self.images.get(c['ImageId'])
# image deregistered/unavailable
if image is not None:
image_block_devs = {
bd['DeviceName']: bd['Ebs']
for bd in image['BlockDeviceMappings'] if 'Ebs' in bd}
else:
image_block_devs = {}
for bd in c['BlockDeviceMappings']:
if 'Ebs' not in bd:
continue
# Launch configs can shadow image devices, images have
# precedence.
if bd['DeviceName'] in image_block_devs:
continue
if 'SnapshotId' in bd['Ebs']:
snaps.setdefault(
bd['Ebs']['SnapshotId'].strip(), []).append(cid)
elif not bd['Ebs'].get('Encrypted'):
unencrypted_configs.add(cid)
if not snaps:
return unencrypted_configs
self.log.debug("querying %d snapshots", len(snaps))
for s in self.get_snapshots(ec2, list(snaps.keys())):
if not s.get('Encrypted'):
unencrypted_configs.update(snaps[s['SnapshotId']])
return unencrypted_configs
def get_snapshots(self, ec2, snap_ids):
"""get snapshots corresponding to id, but tolerant of invalid id's."""
while snap_ids:
try:
result = ec2.describe_snapshots(SnapshotIds=snap_ids)
except ClientError as e:
bad_snap = NotEncryptedFilter.get_bad_snapshot(e)
if bad_snap:
snap_ids.remove(bad_snap)
continue
raise
else:
return result.get('Snapshots', ())
return ()
@staticmethod
def get_bad_snapshot(e):
"""Handle various client side errors when describing snapshots"""
msg = e.response['Error']['Message']
error = e.response['Error']['Code']
e_snap_id = None
if error == 'InvalidSnapshot.NotFound':
e_snap_id = msg[msg.find("'") + 1:msg.rfind("'")]
log.warning("Snapshot not found %s" % e_snap_id)
elif error == 'InvalidSnapshotID.Malformed':
e_snap_id = msg[msg.find('"') + 1:msg.rfind('"')]
log.warning("Snapshot id malformed %s" % e_snap_id)
return e_snap_id
@filters.register('image-age')
class ImageAgeFilter(AgeFilter, LaunchConfigFilterBase):
"""Filter asg by image age (in days).
:example:
.. code-block:: yaml
policies:
- name: asg-older-image
resource: asg
filters:
- type: image-age
days: 90
op: ge
"""
permissions = (
"ec2:DescribeImages",
"autoscaling:DescribeLaunchConfigurations")
date_attribute = "CreationDate"
schema = type_schema(
'image-age',
op={'type': 'string', 'enum': list(OPERATORS.keys())},
days={'type': 'number'})
def process(self, asgs, event=None):
self.initialize(asgs)
return super(ImageAgeFilter, self).process(asgs, event)
def initialize(self, asgs):
super(ImageAgeFilter, self).initialize(asgs)
image_ids = set()
for cfg in self.configs.values():
image_ids.add(cfg['ImageId'])
results = self.manager.get_resource_manager('ami').resources()
self.images = {i['ImageId']: i for i in results}
def get_resource_date(self, i):
cfg = self.configs[i['LaunchConfigurationName']]
ami = self.images.get(cfg['ImageId'], {})
return parse(ami.get(
self.date_attribute, "2000-01-01T01:01:01.000Z"))
@filters.register('image')
class ImageFilter(ValueFilter, LaunchConfigFilterBase):
"""Filter asg by image
:example:
.. code-block:: yaml
policies:
- name: non-windows-asg
resource: asg
filters:
- type: image
key: Platform
value: Windows
op: ne
"""
permissions = (
"ec2:DescribeImages",
"autoscaling:DescribeLaunchConfigurations")
schema = type_schema('image', rinherit=ValueFilter.schema)
def process(self, asgs, event=None):
self.initialize(asgs)
return super(ImageFilter, self).process(asgs, event)
def initialize(self, asgs):
super(ImageFilter, self).initialize(asgs)
image_ids = set()
for cfg in self.configs.values():
image_ids.add(cfg['ImageId'])
results = self.manager.get_resource_manager('ami').resources()
base_image_map = {i['ImageId']: i for i in results}
resources = {i: base_image_map[i] for i in image_ids if i in base_image_map}
missing = list(set(image_ids) - set(resources.keys()))
if missing:
loaded = self.manager.get_resource_manager('ami').get_resources(missing, False)
resources.update({image['ImageId']: image for image in loaded})
self.images = resources
def __call__(self, i):
cfg = self.configs[i['LaunchConfigurationName']]
image = self.images.get(cfg['ImageId'], {})
# Finally, if we have no image...
if not image:
self.log.warning(
"Could not locate image for instance:%s ami:%s" % (
i['InstanceId'], i["ImageId"]))
# Match instead on empty skeleton?
return False
return self.match(image)
@filters.register('vpc-id')
class VpcIdFilter(ValueFilter):
"""Filters ASG based on the VpcId
This filter is available as a ValueFilter as the vpc-id is not natively
associated to the results from describing the autoscaling groups.
:example:
.. code-block:: yaml
policies:
- name: asg-vpc-xyz
resource: asg
filters:
- type: vpc-id
value: vpc-12ab34cd
"""
schema = type_schema(
'vpc-id', rinherit=ValueFilter.schema)
schema['properties'].pop('key')
permissions = ('ec2:DescribeSubnets',)
# TODO: annotation
def __init__(self, data, manager=None):
super(VpcIdFilter, self).__init__(data, manager)
self.data['key'] = 'VpcId'
def process(self, asgs, event=None):
subnets = {}
for a in asgs:
subnet_ids = a.get('VPCZoneIdentifier', '')
if not subnet_ids:
continue
subnets.setdefault(subnet_ids.split(',')[0], []).append(a)
subnet_manager = self.manager.get_resource_manager('subnet')
# Invalid subnets on asgs happen, so query all
all_subnets = {s['SubnetId']: s for s in subnet_manager.resources()}
for s, s_asgs in subnets.items():
if s not in all_subnets:
self.log.warning(
"invalid subnet %s for asgs: %s",
s, [a['AutoScalingGroupName'] for a in s_asgs])
continue
for a in s_asgs:
a['VpcId'] = all_subnets[s]['VpcId']
return super(VpcIdFilter, self).process(asgs)
@filters.register('progagated-tags')
@filters.register('propagated-tags')
class PropagatedTagFilter(Filter):
"""Filter ASG based on propagated tags
This filter is designed to find all autoscaling groups that have a list
of tag keys (provided) that are set to propagate to new instances. Using
this will allow for easy validation of asg tag sets are in place across an
account for compliance.
:example:
.. code-block: yaml
policies:
- name: asg-non-propagated-tags
resource: asg
filters:
- type: propagated-tags
keys: ["ABC", "BCD"]
match: false
propagate: true
"""
schema = type_schema(
'progagated-tags',
aliases=('propagated-tags',),
keys={'type': 'array', 'items': {'type': 'string'}},
match={'type': 'boolean'},
propagate={'type': 'boolean'})
permissions = (
"autoscaling:DescribeLaunchConfigurations",
"autoscaling:DescribeAutoScalingGroups")
def process(self, asgs, event=None):
keys = self.data.get('keys', [])
match = self.data.get('match', True)
results = []
for asg in asgs:
if self.data.get('propagate', True):
tags = [t['Key'] for t in asg.get('Tags', []) if t[
'Key'] in keys and t['PropagateAtLaunch']]
if match and all(k in tags for k in keys):
results.append(asg)
if not match and not all(k in tags for k in keys):
results.append(asg)
else:
tags = [t['Key'] for t in asg.get('Tags', []) if t[
'Key'] in keys and not t['PropagateAtLaunch']]
if match and all(k in tags for k in keys):
results.append(asg)
if not match and not all(k in tags for k in keys):
results.append(asg)
return results
@actions.register('tag-trim')
class GroupTagTrim(TagTrim):
"""Action to trim the number of tags to avoid hitting tag limits
:example:
.. code-block:: yaml
policies:
- name: asg-tag-trim
resource: asg
filters:
- type: tag-count
count: 10
actions:
- type: tag-trim
space: 1
preserve:
- OwnerName
- OwnerContact
"""
max_tag_count = 10
permissions = ('autoscaling:DeleteTags',)
def process_tag_removal(self, resource, candidates):
client = local_session(
self.manager.session_factory).client('autoscaling')
tags = []
for t in candidates:
tags.append(
dict(Key=t, ResourceType='auto-scaling-group',
ResourceId=resource['AutoScalingGroupName']))
client.delete_tags(Tags=tags)
@filters.register('capacity-delta')
class CapacityDelta(Filter):
"""Filter returns ASG that have less instances than desired or required
:example:
.. code-block:: yaml
policies:
- name: asg-capacity-delta
resource: asg
filters:
- capacity-delta
"""
schema = type_schema('capacity-delta')
def process(self, asgs, event=None):
return [a for a in asgs
if len(a['Instances']) < a['DesiredCapacity'] or
len(a['Instances']) < a['MinSize']]
@filters.register('user-data')
class UserDataFilter(ValueFilter, LaunchConfigFilterBase):
"""Filter on ASG's whose launch configs have matching userdata.
Note: It is highly recommended to use regexes with the ?sm flags, since Custodian
uses re.match() and userdata spans multiple lines.
:example:
.. code-block:: yaml
policies:
- name: lc_userdata
resource: asg
filters:
- type: user-data
op: regex
value: (?smi).*password=
actions:
- delete
"""
schema = type_schema('user-data', rinherit=ValueFilter.schema)
batch_size = 50
annotation = 'c7n:user-data'
def get_permissions(self):
return self.manager.get_resource_manager('asg').get_permissions()
def process(self, asgs, event=None):
'''
Get list of autoscaling groups whose launch configs match the user-data filter.
Note: Since this is an autoscaling filter, this won't match unused launch configs.
:param launch_configs: List of launch configurations
:param event: Event
:return: List of ASG's with matching launch configs
'''
self.data['key'] = '"c7n:user-data"'
results = []
super(UserDataFilter, self).initialize(asgs)
for asg in asgs:
launch_config = self.configs.get(asg['LaunchConfigurationName'])
if self.annotation not in launch_config:
if not launch_config['UserData']:
asg[self.annotation] = None
else:
asg[self.annotation] = deserialize_user_data(
launch_config['UserData'])
if self.match(asg):
results.append(asg)
return results
@actions.register('resize')
class Resize(Action):
"""Action to resize the min/max/desired instances in an ASG
There are several ways to use this action:
1. set min/desired to current running instances
.. code-block:: yaml
policies:
- name: asg-resize
resource: asg
filters:
- capacity-delta
actions:
- type: resize
desired-size: "current"
2. apply a fixed resize of min, max or desired, optionally saving the
previous values to a named tag (for restoring later):
.. code-block:: yaml
policies:
- name: offhours-asg-off
resource: asg
filters:
- type: offhour
offhour: 19
default_tz: bst
actions:
- type: resize
min-size: 0
desired-size: 0
save-options-tag: OffHoursPrevious
3. restore previous values for min/max/desired from a tag:
.. code-block:: yaml
policies:
- name: offhours-asg-on
resource: asg
filters:
- type: onhour
onhour: 8
default_tz: bst
actions:
- type: resize
restore-options-tag: OffHoursPrevious
"""
schema = type_schema(
'resize',
**{
'min-size': {'type': 'integer', 'minimum': 0},
'max-size': {'type': 'integer', 'minimum': 0},
'desired-size': {
"anyOf": [
{'enum': ["current"]},
{'type': 'integer', 'minimum': 0}
]
},
# support previous key name with underscore
'desired_size': {
"anyOf": [
{'enum': ["current"]},
{'type': 'integer', 'minimum': 0}
]
},
'save-options-tag': {'type': 'string'},
'restore-options-tag': {'type': 'string'},
}
)
permissions = (
'autoscaling:UpdateAutoScalingGroup',
'autoscaling:CreateOrUpdateTags'
)
def process(self, asgs):
# ASG parameters to save to/restore from a tag
asg_params = ['MinSize', 'MaxSize', 'DesiredCapacity']
# support previous param desired_size when desired-size is not present
if 'desired_size' in self.data and 'desired-size' not in self.data:
self.data['desired-size'] = self.data['desired_size']
client = local_session(self.manager.session_factory).client(
'autoscaling')
for a in asgs:
tag_map = {t['Key']: t['Value'] for t in a.get('Tags', [])}
update = {}
current_size = len(a['Instances'])
if 'restore-options-tag' in self.data:
# we want to restore all ASG size params from saved data
log.debug('Want to restore ASG %s size from tag %s' %
(a['AutoScalingGroupName'], self.data['restore-options-tag']))
if self.data['restore-options-tag'] in tag_map:
for field in tag_map[self.data['restore-options-tag']].split(';'):
(param, value) = field.split('=')
if param in asg_params:
update[param] = int(value)
else:
# we want to resize, parse provided params
if 'min-size' in self.data:
update['MinSize'] = self.data['min-size']
if 'max-size' in self.data:
update['MaxSize'] = self.data['max-size']
if 'desired-size' in self.data:
if self.data['desired-size'] == 'current':
update['DesiredCapacity'] = min(current_size, a['DesiredCapacity'])
if 'MinSize' not in update:
# unless we were given a new value for min_size then
# ensure it is at least as low as current_size
update['MinSize'] = min(current_size, a['MinSize'])
elif type(self.data['desired-size']) == int:
update['DesiredCapacity'] = self.data['desired-size']
if update:
log.debug('ASG %s size: current=%d, min=%d, max=%d, desired=%d'
% (a['AutoScalingGroupName'], current_size, a['MinSize'],
a['MaxSize'], a['DesiredCapacity']))
if 'save-options-tag' in self.data:
# save existing ASG params to a tag before changing them
log.debug('Saving ASG %s size to tag %s' %
(a['AutoScalingGroupName'], self.data['save-options-tag']))
tags = [dict(
Key=self.data['save-options-tag'],
PropagateAtLaunch=False,
Value=';'.join({'%s=%d' % (param, a[param]) for param in asg_params}),
ResourceId=a['AutoScalingGroupName'],
ResourceType='auto-scaling-group',
)]
self.manager.retry(client.create_or_update_tags, Tags=tags)
log.debug('Resizing ASG %s with %s' % (a['AutoScalingGroupName'],
str(update)))
self.manager.retry(
client.update_auto_scaling_group,
AutoScalingGroupName=a['AutoScalingGroupName'],
**update)
else:
log.debug('nothing to resize')
@actions.register('remove-tag')
@actions.register('untag')
@actions.register('unmark')
class RemoveTag(Action):
"""Action to remove tag/tags from an ASG
:example:
.. code-block:: yaml
policies:
- name: asg-remove-unnecessary-tags
resource: asg
filters:
- "tag:UnnecessaryTag": present
actions:
- type: remove-tag
key: UnnecessaryTag
"""
schema = type_schema(
'remove-tag',
aliases=('untag', 'unmark'),
key={'type': 'string'})
permissions = ('autoscaling:DeleteTags',)
batch_size = 1
def process(self, asgs):
error = False
key = self.data.get('key', DEFAULT_TAG)
with self.executor_factory(max_workers=3) as w:
futures = {}
for asg_set in chunks(asgs, self.batch_size):
futures[w.submit(self.process_asg_set, asg_set, key)] = asg_set
for f in as_completed(futures):
asg_set = futures[f]
if f.exception():
error = f.exception()
self.log.exception(
"Exception untagging asg:%s tag:%s error:%s" % (
", ".join([a['AutoScalingGroupName']
for a in asg_set]),
self.data.get('key', DEFAULT_TAG),
f.exception()))
if error:
raise error
def process_asg_set(self, asgs, key):
session = local_session(self.manager.session_factory)
client = session.client('autoscaling')
tags = [dict(
Key=key, ResourceType='auto-scaling-group',
ResourceId=a['AutoScalingGroupName']) for a in asgs]
self.manager.retry(client.delete_tags, Tags=tags)
@actions.register('tag')
@actions.register('mark')
class Tag(Action):
"""Action to add a tag to an ASG
The *propagate* parameter can be used to specify that the tag being added
will need to be propagated down to each ASG instance associated or simply
to the ASG itself.
:example:
.. code-block:: yaml
policies:
- name: asg-add-owner-tag
resource: asg
filters:
- "tag:OwnerName": absent
actions:
- type: tag
key: OwnerName
value: OwnerName
propagate: true
"""
schema = type_schema(
'tag',
key={'type': 'string'},
value={'type': 'string'},
# Backwards compatibility
tag={'type': 'string'},
msg={'type': 'string'},
propagate={'type': 'boolean'},
aliases=('mark',)
)
permissions = ('autoscaling:CreateOrUpdateTags',)
batch_size = 1
def process(self, asgs):
key = self.data.get('key', self.data.get('tag', DEFAULT_TAG))
value = self.data.get(
'value', self.data.get(
'msg', 'AutoScaleGroup does not meet policy guidelines'))
return self.tag(asgs, key, value)
def tag(self, asgs, key, value):
error = None
with self.executor_factory(max_workers=3) as w:
futures = {}
for asg_set in chunks(asgs, self.batch_size):
futures[w.submit(
self.process_asg_set, asg_set, key, value)] = asg_set
for f in as_completed(futures):
asg_set = futures[f]
if f.exception():
self.log.exception(
"Exception untagging tag:%s error:%s asg:%s" % (
self.data.get('key', DEFAULT_TAG),
f.exception(),
", ".join([a['AutoScalingGroupName']
for a in asg_set])))
if error:
raise error
def process_asg_set(self, asgs, key, value):
session = local_session(self.manager.session_factory)
client = session.client('autoscaling')
propagate = self.data.get('propagate_launch', True)
tags = [
dict(Key=key, ResourceType='auto-scaling-group', Value=value,
PropagateAtLaunch=propagate,
ResourceId=a['AutoScalingGroupName']) for a in asgs]
self.manager.retry(client.create_or_update_tags, Tags=tags)
@actions.register('propagate-tags')
class PropagateTags(Action):
"""Propagate tags to an asg instances.
In AWS changing an asg tag does not propagate to instances.
This action exists to do that, and can also trim older tags
not present on the asg anymore that are present on instances.
:example:
.. code-block:: yaml
policies:
- name: asg-propagate-required
resource: asg
filters:
- "tag:OwnerName": present
actions:
- type: propagate-tags
tags:
- OwnerName
"""
schema = type_schema(
'propagate-tags',
tags={'type': 'array', 'items': {'type': 'string'}},
trim={'type': 'boolean'})
permissions = ('ec2:DeleteTags', 'ec2:CreateTags')
def validate(self):
if not isinstance(self.data.get('tags', []), (list, tuple)):
raise ValueError("No tags specified")
return self
def process(self, asgs):
if not asgs:
return
if self.data.get('trim', False):
self.instance_map = self.get_instance_map(asgs)
with self.executor_factory(max_workers=10) as w:
instance_count = sum(list(w.map(self.process_asg, asgs)))
self.log.info("Applied tags to %d instances" % instance_count)
def process_asg(self, asg):
client = local_session(self.manager.session_factory).client('ec2')
instance_ids = [i['InstanceId'] for i in asg['Instances']]
tag_map = {t['Key']: t['Value'] for t in asg.get('Tags', [])
if t['PropagateAtLaunch'] and not t['Key'].startswith('aws:')}
if self.data.get('tags'):
tag_map = {
k: v for k, v in tag_map.items()
if k in self.data['tags']}
tag_set = set(tag_map)
if self.data.get('trim', False):
instances = [self.instance_map[i] for i in instance_ids]
self.prune_instance_tags(client, asg, tag_set, instances)
if not self.manager.config.dryrun:
client.create_tags(
Resources=instance_ids,
Tags=[{'Key': k, 'Value': v} for k, v in tag_map.items()])
return len(instance_ids)
def prune_instance_tags(self, client, asg, tag_set, instances):
"""Remove tags present on all asg instances which are not present
on the asg.
"""
instance_tags = Counter()
instance_count = len(instances)
remove_tags = []
extra_tags = []
for i in instances:
instance_tags.update([
t['Key'] for t in i['Tags']
if not t['Key'].startswith('aws:')])
for k, v in instance_tags.items():
if not v >= instance_count:
extra_tags.append(k)
continue
if k not in tag_set:
remove_tags.append(k)
if remove_tags:
log.debug("Pruning asg:%s instances:%d of old tags: %s" % (
asg['AutoScalingGroupName'], instance_count, remove_tags))
if extra_tags:
log.debug("Asg: %s has uneven tags population: %s" % (
asg['AutoScalingGroupName'], instance_tags))
# Remove orphan tags
remove_tags.extend(extra_tags)
if not self.manager.config.dryrun:
client.delete_tags(
Resources=[i['InstanceId'] for i in instances],
Tags=[{'Key': t} for t in remove_tags])
def get_instance_map(self, asgs):
instance_ids = [
i['InstanceId'] for i in
list(itertools.chain(*[
g['Instances']
for g in asgs if g['Instances']]))]
if not instance_ids:
return {}
return {i['InstanceId']: i for i in
self.manager.get_resource_manager(
'ec2').get_resources(instance_ids)}
@actions.register('rename-tag')
class RenameTag(Action):
"""Rename a tag on an AutoScaleGroup.
:example:
.. code-block:: yaml
policies:
- name: asg-rename-owner-tag
resource: asg
filters:
- "tag:OwnerNames": present
actions:
- type: rename-tag
propagate: true
source: OwnerNames
dest: OwnerName
"""
schema = type_schema(
'rename-tag', required=['source', 'dest'],
propagate={'type': 'boolean'},
source={'type': 'string'},
dest={'type': 'string'})
def get_permissions(self):
permissions = (
'autoscaling:CreateOrUpdateTags',
'autoscaling:DeleteTags')
if self.data.get('propagate', True):
permissions += ('ec2:CreateTags', 'ec2:DeleteTags')
return permissions
def process(self, asgs):
source = self.data.get('source')
dest = self.data.get('dest')
count = len(asgs)
filtered = []
for a in asgs:
for t in a.get('Tags'):
if t['Key'] == source:
filtered.append(a)
break
asgs = filtered
self.log.info("Filtered from %d asgs to %d", count, len(asgs))
self.log.info(
"Renaming %s to %s on %d asgs", source, dest, len(filtered))
with self.executor_factory(max_workers=3) as w:
list(w.map(self.process_asg, asgs))
def process_asg(self, asg):
"""Move source tag to destination tag.
Check tag count on asg
Create new tag tag
Delete old tag
Check tag count on instance
Create new tag
Delete old tag
"""
source_tag = self.data.get('source')
tag_map = {t['Key']: t for t in asg.get('Tags', [])}
source = tag_map[source_tag]
destination_tag = self.data.get('dest')
propagate = self.data.get('propagate', True)
client = local_session(
self.manager.session_factory).client('autoscaling')
# technically safer to create first, but running into
# max tags constraints, otherwise.
#
# delete_first = len([t for t in tag_map if not t.startswith('aws:')])
client.delete_tags(Tags=[
{'ResourceId': asg['AutoScalingGroupName'],
'ResourceType': 'auto-scaling-group',
'Key': source_tag,
'Value': source['Value']}])
client.create_or_update_tags(Tags=[
{'ResourceId': asg['AutoScalingGroupName'],
'ResourceType': 'auto-scaling-group',
'PropagateAtLaunch': propagate,
'Key': destination_tag,
'Value': source['Value']}])
if propagate:
self.propagate_instance_tag(source, destination_tag, asg)
def propagate_instance_tag(self, source, destination_tag, asg):
client = local_session(self.manager.session_factory).client('ec2')
client.delete_tags(
Resources=[i['InstanceId'] for i in asg['Instances']],
Tags=[{"Key": source['Key']}])
client.create_tags(
Resources=[i['InstanceId'] for i in asg['Instances']],
Tags=[{'Key': destination_tag, 'Value': source['Value']}])
@actions.register('mark-for-op')
class MarkForOp(Tag):
"""Action to create a delayed action for a later date
:example:
.. code-block:: yaml
policies:
- name: asg-suspend-schedule
resource: asg
filters:
- type: value
key: MinSize
value: 2
actions:
- type: mark-for-op
tag: custodian_suspend
message: "Suspending: {op}@{action_date}"
op: suspend
days: 7
"""
schema = type_schema(
'mark-for-op',
op={'enum': ['suspend', 'resume', 'delete']},
key={'type': 'string'},
tag={'type': 'string'},
message={'type': 'string'},
days={'type': 'number', 'minimum': 0},
hours={'type': 'number', 'minimum': 0})
default_template = (
'AutoScaleGroup does not meet org policy: {op}@{action_date}')
def validate(self):
self.tz = zoneinfo.gettz(
Time.TZ_ALIASES.get(self.data.get('tz', 'utc')))
if not self.tz:
raise PolicyValidationError(
"Invalid timezone specified %s on %s" % (self.tz, self.manager.data))
return self
def process(self, asgs):
self.tz = zoneinfo.gettz(
Time.TZ_ALIASES.get(self.data.get('tz', 'utc')))
msg_tmpl = self.data.get('message', self.default_template)
key = self.data.get('key', self.data.get('tag', DEFAULT_TAG))
op = self.data.get('op', 'suspend')
days = self.data.get('days', 0)
hours = self.data.get('hours', 0)
action_date = self._generate_timestamp(days, hours)
try:
msg = msg_tmpl.format(
op=op, action_date=action_date)
except Exception:
self.log.warning("invalid template %s" % msg_tmpl)
msg = self.default_template.format(
op=op, action_date=action_date)
self.log.info("Tagging %d asgs for %s on %s" % (
len(asgs), op, action_date))
self.tag(asgs, key, msg)
def _generate_timestamp(self, days, hours):
n = datetime.now(tz=self.tz)
if days == hours == 0:
# maintains default value of days being 4 if nothing is provided
days = 4
action_date = (n + timedelta(days=days, hours=hours))
if hours > 0:
action_date_string = action_date.strftime('%Y/%m/%d %H%M %Z')
else:
action_date_string = action_date.strftime('%Y/%m/%d')
return action_date_string
@actions.register('suspend')
class Suspend(Action):
"""Action to suspend ASG processes and instances
AWS ASG suspend/resume and process docs https://goo.gl/XYtKQ8
:example:
.. code-block:: yaml
policies:
- name: asg-suspend-processes
resource: asg
filters:
- "tag:SuspendTag": present
actions:
- type: suspend
"""
permissions = ("autoscaling:SuspendProcesses", "ec2:StopInstances")
ASG_PROCESSES = [
"Launch",
"Terminate",
"HealthCheck",
"ReplaceUnhealthy",
"AZRebalance",
"AlarmNotification",
"ScheduledActions",
"AddToLoadBalancer"]
schema = type_schema(
'suspend',
exclude={
'type': 'array',
'title': 'ASG Processes to not suspend',
'items': {'enum': ASG_PROCESSES}})
ASG_PROCESSES = set(ASG_PROCESSES)
def process(self, asgs):
with self.executor_factory(max_workers=3) as w:
list(w.map(self.process_asg, asgs))
def process_asg(self, asg):
"""Multistep process to stop an asg aprori of setup
- suspend processes
- stop instances
"""
session = local_session(self.manager.session_factory)
asg_client = session.client('autoscaling')
processes = list(self.ASG_PROCESSES.difference(
self.data.get('exclude', ())))
try:
self.manager.retry(
asg_client.suspend_processes,
ScalingProcesses=processes,
AutoScalingGroupName=asg['AutoScalingGroupName'])
except ClientError as e:
if e.response['Error']['Code'] == 'ValidationError':
return
raise
ec2_client = session.client('ec2')
try:
instance_ids = [i['InstanceId'] for i in asg['Instances']]
if not instance_ids:
return
retry = get_retry((
'RequestLimitExceeded', 'Client.RequestLimitExceeded'))
retry(ec2_client.stop_instances, InstanceIds=instance_ids)
except ClientError as e:
if e.response['Error']['Code'] in (
'InvalidInstanceID.NotFound',
'IncorrectInstanceState'):
log.warning("Erroring stopping asg instances %s %s" % (
asg['AutoScalingGroupName'], e))
return
raise
@actions.register('resume')
class Resume(Action):
"""Resume a suspended autoscale group and its instances
Parameter 'delay' is the amount of time (in seconds) to wait between
resuming each instance within the ASG (default value: 30)
:example:
.. code-block:: yaml
policies:
- name: asg-resume-processes
resource: asg
filters:
- "tag:Resume": present
actions:
- type: resume
delay: 300
"""
schema = type_schema('resume', delay={'type': 'number'})
permissions = ("autoscaling:ResumeProcesses", "ec2:StartInstances")
def process(self, asgs):
original_count = len(asgs)
asgs = [a for a in asgs if a['SuspendedProcesses']]
self.delay = self.data.get('delay', 30)
self.log.debug("Filtered from %d to %d suspended asgs",
original_count, len(asgs))
with self.executor_factory(max_workers=3) as w:
futures = {}
for a in asgs:
futures[w.submit(self.resume_asg_instances, a)] = a
for f in as_completed(futures):
if f.exception():
log.error("Traceback resume asg:%s instances error:%s" % (
futures[f]['AutoScalingGroupName'],
f.exception()))
continue
log.debug("Sleeping for asg health check grace")
time.sleep(self.delay)
with self.executor_factory(max_workers=3) as w:
futures = {}
for a in asgs:
futures[w.submit(self.resume_asg, a)] = a
for f in as_completed(futures):
if f.exception():
log.error("Traceback resume asg:%s error:%s" % (
futures[f]['AutoScalingGroupName'],
f.exception()))
def resume_asg_instances(self, asg):
"""Resume asg instances.
"""
session = local_session(self.manager.session_factory)
ec2_client = session.client('ec2')
instance_ids = [i['InstanceId'] for i in asg['Instances']]
if not instance_ids:
return
retry = get_retry((
'RequestLimitExceeded', 'Client.RequestLimitExceeded'))
retry(ec2_client.start_instances, InstanceIds=instance_ids)
def resume_asg(self, asg):
"""Resume asg processes.
"""
session = local_session(self.manager.session_factory)
asg_client = session.client('autoscaling')
self.manager.retry(
asg_client.resume_processes,
AutoScalingGroupName=asg['AutoScalingGroupName'])
@actions.register('delete')
class Delete(Action):
"""Action to delete an ASG
The 'force' parameter is needed when deleting an ASG that has instances
attached to it.
:example:
.. code-block:: yaml
policies:
- name: asg-unencrypted
resource: asg
filters:
- type: not-encrypted
exclude_image: true
actions:
- type: delete
force: true
"""
schema = type_schema('delete', force={'type': 'boolean'})
permissions = ("autoscaling:DeleteAutoScalingGroup",)
def process(self, asgs):
with self.executor_factory(max_workers=3) as w:
list(w.map(self.process_asg, asgs))
@worker
def process_asg(self, asg):
force_delete = self.data.get('force', False)
if force_delete:
log.info('Forcing deletion of Auto Scaling group %s',
asg['AutoScalingGroupName'])
session = local_session(self.manager.session_factory)
asg_client = session.client('autoscaling')
try:
self.manager.retry(
asg_client.delete_auto_scaling_group,
AutoScalingGroupName=asg['AutoScalingGroupName'],
ForceDelete=force_delete)
except ClientError as e:
if e.response['Error']['Code'] == 'ValidationError':
log.warning("Erroring deleting asg %s %s",
asg['AutoScalingGroupName'], e)
return
raise
@resources.register('launch-config')
class LaunchConfig(query.QueryResourceManager):
class resource_type(object):
service = 'autoscaling'
type = 'launchConfiguration'
id = name = 'LaunchConfigurationName'
date = 'CreatedTime'
dimension = None
enum_spec = (
'describe_launch_configurations', 'LaunchConfigurations', None)
filter_name = 'LaunchConfigurationNames'
filter_type = 'list'
config_type = 'AWS::AutoScaling::LaunchConfiguration'
retry = staticmethod(get_retry(('Throttling',)))
def get_source(self, source_type):
if source_type == 'describe':
return DescribeLaunchConfig(self)
elif source_type == 'config':
return query.ConfigSource(self)
raise ValueError('invalid source %s' % source_type)
class DescribeLaunchConfig(query.DescribeSource):
def augment(self, resources):
return resources
@LaunchConfig.filter_registry.register('age')
class LaunchConfigAge(AgeFilter):
"""Filter ASG launch configuration by age (in days)
:example:
.. code-block:: yaml
policies:
- name: asg-launch-config-old
resource: launch-config
filters:
- type: age
days: 90
op: ge
"""
date_attribute = "CreatedTime"
schema = type_schema(
'age',
op={'type': 'string', 'enum': list(OPERATORS.keys())},
days={'type': 'number'})
@LaunchConfig.filter_registry.register('unused')
class UnusedLaunchConfig(Filter):
"""Filters all launch configurations that are not in use but exist
:example:
.. code-block:: yaml
policies:
- name: asg-unused-launch-config
resource: launch-config
filters:
- unused
"""
schema = type_schema('unused')
def get_permissions(self):
return self.manager.get_resource_manager('asg').get_permissions()
def process(self, configs, event=None):
asgs = self.manager.get_resource_manager('asg').resources()
self.used = set([
a.get('LaunchConfigurationName', a['AutoScalingGroupName'])
for a in asgs])
return super(UnusedLaunchConfig, self).process(configs)
def __call__(self, config):
return config['LaunchConfigurationName'] not in self.used
@LaunchConfig.action_registry.register('delete')
class LaunchConfigDelete(Action):
"""Filters all unused launch configurations
:example:
.. code-block:: yaml
policies:
- name: asg-unused-launch-config-delete
resource: launch-config
filters:
- unused
actions:
- delete
"""
schema = type_schema('delete')
permissions = ("autoscaling:DeleteLaunchConfiguration",)
def process(self, configs):
with self.executor_factory(max_workers=2) as w:
list(w.map(self.process_config, configs))
@worker
def process_config(self, config):
session = local_session(self.manager.session_factory)
client = session.client('autoscaling')
try:
client.delete_launch_configuration(
LaunchConfigurationName=config[
'LaunchConfigurationName'])
except ClientError as e:
# Catch already deleted
if e.response['Error']['Code'] == 'ValidationError':
return
raise
| apache-2.0 | 2,157,153,835,286,534,400 | 32.988248 | 94 | 0.553369 | false |
freeman-lab/dask | dask/array/random.py | 1 | 11940 | from __future__ import absolute_import, division, print_function
import numpy as np
from itertools import product
from .core import normalize_chunks, Array, names
def doc_wraps(func):
""" Copy docstring from one function to another """
def _(func2):
func2.__doc__ = func.__doc__.replace('>>>', '>>').replace('...', '..')
return func2
return _
class RandomState(object):
"""
Mersenne Twister pseudo-random number generator
This object contains state to deterministicly generate pseudo-random
numbers from a variety of probabilitiy distributions. It is identical to
``np.random.RandomState`` except that all functions also take a ``chunks=``
keyword argument.
Examples
--------
>>> import dask.array as da
>>> state = da.random.RandomState(1234) # a seed
>>> x = state.normal(10, 0.1, size=3, chunks=(2,))
>>> x.compute()
array([ 9.95487579, 10.02999135, 10.08498441])
See Also:
np.random.RandomState
"""
def __init__(self, seed=None):
self._numpy_state = np.random.RandomState(seed)
def _wrap(self, func, *args, **kwargs):
size = kwargs.pop('size')
chunks = kwargs.pop('chunks')
if not isinstance(size, (tuple, list)):
size = (size,)
chunks = normalize_chunks(chunks, size)
name = next(names)
# Get dtype
kw = kwargs.copy()
kw['size'] = (0,)
dtype = func(np.random.RandomState(), *args, **kw).dtype
# Build graph
keys = product([name], *[range(len(bd)) for bd in chunks])
sizes = product(*chunks)
vals = ((_apply_random,
func.__name__,
self._numpy_state.randint(np.iinfo(np.int32).max),
size, args, kwargs)
for size in sizes)
dsk = dict(zip(keys, vals))
return Array(dsk, name, chunks, dtype=dtype)
@doc_wraps(np.random.RandomState.beta)
def beta(self, a, b, size=None, chunks=None):
return self._wrap(np.random.RandomState.beta, a, b,
size=size, chunks=chunks)
@doc_wraps(np.random.RandomState.binomial)
def binomial(self, n, p, size=None, chunks=None):
return self._wrap(np.random.RandomState.binomial, n, p,
size=size, chunks=chunks)
@doc_wraps(np.random.RandomState.chisquare)
def chisquare(self, df, size=None, chunks=None):
return self._wrap(np.random.RandomState.chisquare, df,
size=size, chunks=chunks)
@doc_wraps(np.random.RandomState.choice)
def choice(self, a, size=None, replace=True, p=None, chunks=None):
return self._wrap(np.random.RandomState.choice, a,
size=size, replace=True, p=None, chunks=chunks)
# @doc_wraps(np.random.RandomState.dirichlet)
# def dirichlet(self, alpha, size=None, chunks=None):
@doc_wraps(np.random.RandomState.exponential)
def exponential(self, scale=1.0, size=None, chunks=None):
return self._wrap(np.random.RandomState.exponential, scale,
size=size, chunks=chunks)
@doc_wraps(np.random.RandomState.f)
def f(self, dfnum, dfden, size=None, chunks=None):
return self._wrap(np.random.RandomState.f, dfnum, dfden,
size=size, chunks=chunks)
@doc_wraps(np.random.RandomState.gamma)
def gamma(self, shape, scale=1.0, chunks=None):
return self._wrap(np.random.RandomState.gamma, scale,
size=shape, chunks=chunks)
@doc_wraps(np.random.RandomState.geometric)
def geometric(self, p, size=None, chunks=None):
return self._wrap(np.random.RandomState.geometric, p,
size=size, chunks=chunks)
@doc_wraps(np.random.RandomState.gumbel)
def gumbel(self, loc=0.0, scale=1.0, size=None, chunks=None):
return self._wrap(np.random.RandomState.gumbel, loc, scale,
size=size, chunks=chunks)
@doc_wraps(np.random.RandomState.hypergeometric)
def hypergeometric(self, ngood, nbad, nsample, size=None, chunks=None):
return self._wrap(np.random.RandomState.hypergeometric,
ngood, nbad, nsample,
size=size, chunks=chunks)
@doc_wraps(np.random.RandomState.laplace)
def laplace(self, loc=0.0, scale=1.0, size=None, chunks=None):
return self._wrap(np.random.RandomState.laplace, loc, scale,
size=size, chunks=chunks)
@doc_wraps(np.random.RandomState.logistic)
def logistic(self, loc=0.0, scale=1.0, size=None, chunks=None):
return self._wrap(np.random.RandomState.logistic, loc, scale,
size=size, chunks=chunks)
@doc_wraps(np.random.RandomState.lognormal)
def lognormal(self, mean=0.0, sigma=1.0, size=None, chunks=None):
return self._wrap(np.random.RandomState.lognormal, mean, sigma,
size=size, chunks=chunks)
@doc_wraps(np.random.RandomState.logseries)
def logseries(self, p, size=None, chunks=None):
return self._wrap(np.random.RandomState.logseries, p,
size=size, chunks=chunks)
# multinomial
@doc_wraps(np.random.RandomState.negative_binomial)
def negative_binomial(self, n, p, size=None, chunks=None):
return self._wrap(np.random.RandomState.negative_binomial, n, p,
size=size, chunks=chunks)
@doc_wraps(np.random.RandomState.noncentral_chisquare)
def noncentral_chisquare(self, df, nonc, size=None, chunks=None):
return self._wrap(np.random.RandomState.noncentral_chisquare, df, nonc,
size=size, chunks=chunks)
@doc_wraps(np.random.RandomState.noncentral_f)
def noncentral_f(self, dfnum, dfden, nonc, size=None, chunks=None):
return self._wrap(np.random.RandomState.noncentral_f,
dfnum, dfden, nonc,
size=size, chunks=chunks)
@doc_wraps(np.random.RandomState.normal)
def normal(self, loc=0.0, scale=1.0, size=None, chunks=None):
return self._wrap(np.random.RandomState.normal, loc, scale,
size=size, chunks=chunks)
@doc_wraps(np.random.RandomState.pareto)
def pareto(self, a, size=None, chunks=None):
return self._wrap(np.random.RandomState.pareto, a,
size=size, chunks=chunks)
@doc_wraps(np.random.RandomState.poisson)
def poisson(self, lam=1.0, size=None, chunks=None):
return self._wrap(np.random.RandomState.poisson, lam,
size=size, chunks=chunks)
@doc_wraps(np.random.RandomState.power)
def power(self, a, size=None, chunks=None):
return self._wrap(np.random.RandomState.power, a,
size=size, chunks=chunks)
@doc_wraps(np.random.RandomState.randint)
def randint(self, low, high=None, size=None, chunks=None):
return self._wrap(np.random.RandomState.randint, low, high,
size=size, chunks=chunks)
@doc_wraps(np.random.RandomState.random_integers)
def random_integers(self, low, high=None, size=None, chunks=None):
return self._wrap(np.random.RandomState.random_integers, low, high,
size=size, chunks=chunks)
@doc_wraps(np.random.RandomState.random_sample)
def random_sample(self, size=None, chunks=None):
return self._wrap(np.random.RandomState.random_sample,
size=size, chunks=chunks)
random = random_sample
@doc_wraps(np.random.RandomState.rayleigh)
def rayleigh(self, scale=1.0, size=None, chunks=None):
return self._wrap(np.random.RandomState.rayleigh, scale,
size=size, chunks=chunks)
@doc_wraps(np.random.RandomState.standard_cauchy)
def standard_cauchy(self, size=None, chunks=None):
return self._wrap(np.random.RandomState.standard_cauchy,
size=size, chunks=chunks)
@doc_wraps(np.random.RandomState.standard_exponential)
def standard_exponential(self, size=None, chunks=None):
return self._wrap(np.random.RandomState.standard_exponential,
size=size, chunks=chunks)
@doc_wraps(np.random.RandomState.standard_gamma)
def standard_gamma(self, shape, size=None, chunks=None):
return self._wrap(np.random.RandomState.standard_gamma, shape,
size=size, chunks=chunks)
@doc_wraps(np.random.RandomState.standard_normal)
def standard_normal(self, size=None, chunks=None):
return self._wrap(np.random.RandomState.standard_normal,
size=size, chunks=chunks)
@doc_wraps(np.random.RandomState.standard_t)
def standard_t(self, df, size=None, chunks=None):
return self._wrap(np.random.RandomState.standard_t, df,
size=size, chunks=chunks)
@doc_wraps(np.random.RandomState.tomaxint)
def tomaxint(self, size=None, chunks=None):
return self._wrap(np.random.RandomState.tomaxint,
size=size, chunks=chunks)
@doc_wraps(np.random.RandomState.triangular)
def triangular(self, left, mode, right, size=None, chunks=None):
return self._wrap(np.random.RandomState.triangular, left, mode, right,
size=size, chunks=chunks)
@doc_wraps(np.random.RandomState.uniform)
def uniform(self, low=0.0, high=1.0, size=None, chunks=None):
return self._wrap(np.random.RandomState.uniform, low, high,
size=size, chunks=chunks)
@doc_wraps(np.random.RandomState.vonmises)
def vonmises(self, mu, kappa, size=None, chunks=None):
return self._wrap(np.random.RandomState.vonmises, mu, kappa,
size=size, chunks=chunks)
@doc_wraps(np.random.RandomState.wald)
def wald(self, mean, scale, size=None, chunks=None):
return self._wrap(np.random.RandomState.wald, mean, scale,
size=size, chunks=chunks)
@doc_wraps(np.random.RandomState.weibull)
def weibull(self, a, size=None, chunks=None):
return self._wrap(np.random.RandomState.weibull, a,
size=size, chunks=chunks)
@doc_wraps(np.random.RandomState.zipf)
def zipf(self, a, size=None, chunks=None):
return self._wrap(np.random.RandomState.zipf, a,
size=size, chunks=chunks)
def _apply_random(func, seed, size, args, kwargs):
""" Apply RandomState method with seed
>>> _apply_random('normal', 123, 3, (10, 1.0), {})
array([ 8.9143694 , 10.99734545, 10.2829785 ])
"""
state = np.random.RandomState(seed)
func = getattr(state, func)
return func(*args, size=size, **kwargs)
_state = RandomState()
beta = _state.beta
binomial = _state.binomial
chisquare = _state.chisquare
exponential = _state.exponential
f = _state.f
gamma = _state.gamma
geometric = _state.geometric
gumbel = _state.gumbel
hypergeometric = _state.hypergeometric
laplace = _state.laplace
logistic = _state.logistic
lognormal = _state.lognormal
logseries = _state.logseries
negative_binomial = _state.negative_binomial
noncentral_chisquare = _state.noncentral_chisquare
noncentral_f = _state.noncentral_f
normal = _state.normal
pareto = _state.pareto
poisson = _state.poisson
power = _state.power
rayleigh = _state.rayleigh
random_sample = _state.random_sample
random = random_sample
triangular = _state.triangular
uniform = _state.uniform
vonmises = _state.vonmises
wald = _state.wald
weibull = _state.weibull
zipf = _state.zipf
"""
Standard distributions
"""
standard_cauchy = _state.standard_cauchy
standard_exponential = _state.standard_exponential
standard_gamma = _state.standard_gamma
standard_normal = _state.standard_normal
standard_t = _state.standard_t
| bsd-3-clause | 7,957,553,145,970,565,000 | 37.146965 | 79 | 0.63258 | false |
nlgcoin/guldencoin-official | test/functional/mempool_spend_coinbase.py | 2 | 2317 | #!/usr/bin/env python3
# Copyright (c) 2014-2018 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test spending coinbase transactions.
The coinbase transaction in block N can appear in block
N+100... so is valid in the mempool when the best block
height is N+99.
This test makes sure coinbase spends that will be mature
in the next block are accepted into the memory pool,
but less mature coinbase spends are NOT.
"""
from test_framework.test_framework import GuldenTestFramework
from test_framework.blocktools import create_raw_transaction
from test_framework.util import assert_equal, assert_raises_rpc_error
class MempoolSpendCoinbaseTest(GuldenTestFramework):
def set_test_params(self):
self.num_nodes = 1
def skip_test_if_missing_module(self):
self.skip_if_no_wallet()
def run_test(self):
chain_height = self.nodes[0].getblockcount()
assert_equal(chain_height, 200)
node0_address = self.nodes[0].getnewaddress()
# Coinbase at height chain_height-100+1 ok in mempool, should
# get mined. Coinbase at height chain_height-100+2 is
# is too immature to spend.
b = [self.nodes[0].getblockhash(n) for n in range(101, 103)]
coinbase_txids = [self.nodes[0].getblock(h)['tx'][0] for h in b]
spends_raw = [create_raw_transaction(self.nodes[0], txid, node0_address, amount=49.99) for txid in coinbase_txids]
spend_101_id = self.nodes[0].sendrawtransaction(spends_raw[0])
# coinbase at height 102 should be too immature to spend
assert_raises_rpc_error(-26,"bad-txns-premature-spend-of-coinbase", self.nodes[0].sendrawtransaction, spends_raw[1])
# mempool should have just spend_101:
assert_equal(self.nodes[0].getrawmempool(), [ spend_101_id ])
# mine a block, spend_101 should get confirmed
self.nodes[0].generate(1)
assert_equal(set(self.nodes[0].getrawmempool()), set())
# ... and now height 102 can be spent:
spend_102_id = self.nodes[0].sendrawtransaction(spends_raw[1])
assert_equal(self.nodes[0].getrawmempool(), [ spend_102_id ])
if __name__ == '__main__':
MempoolSpendCoinbaseTest().main()
| mit | -8,024,148,556,039,803,000 | 40.375 | 124 | 0.694001 | false |
Tomasuh/Tomasuh.github.io | files/cyclic/dbcommands.py | 1 | 1472 | import sqlite3
import os.path
import datetime
class the_db:
def __init__(self):
exists = os.path.exists('./cyclic.db')
self.conn = sqlite3.connect('cyclic.db')
self.c = self.conn.cursor()
if exists:
return
# If it's a new instance of the db we need to generate the layout
sql = '''CREATE TABLE posts (key text PRIMARY KEY,
title text,
user text,
date integer,
size integer,
syntax text,
expire integer,
scrape_url text,
full_url text)'''
self.c.execute(sql)
self.conn.commit()
def post_exists(self, key):
sql = '''SELECT COUNT(*) FROM posts WHERE key=?'''
self.c.execute(sql, (key,))
return self.c.fetchone()[0]==1
def add_post(self, data):
if self.post_exists(data["key"]):
print "Post exists %s" % data["key"]
return
sql = '''INSERT INTO posts(key, title, user,date, size, syntax, expire, scrape_url, full_url)
VALUES (?,?,?,?,?,?,?,?,?)'''
param = (data["key"], \
data["title"], \
data["user"], \
data["date"], \
data["size"], \
data["syntax"], \
data["expire"], \
data["scrape_url"], \
data["full_url"])
self.c.execute(sql, param)
self.conn.commit()
def fetch_posts(self):
sql = '''SELECT * FROM posts'''
self.c.execute(sql)
rows = self.c.fetchall()
n = 0
while n < len(rows):
tmp = list(rows[n])
tmp[3] = datetime.datetime.fromtimestamp(tmp[3])
tmp[6] = datetime.datetime.fromtimestamp(tmp[6])
rows[n] = tmp
n += 1
return rows
| mit | -3,592,289,148,472,909,300 | 18.626667 | 95 | 0.605978 | false |
cloudmesh/book | cloud-clusters/bin/todo.py | 1 | 1641 | #!/usr/bin/env python
from __future__ import print_function
import sys
import os
import fnmatch
import glob
from pprint import pprint
def recursive_glob(rootdir='.', pattern='*.md'):
"""Search recursively for files matching a specified pattern.
Adapted from http://stackoverflow.com/questions/2186525/use-a-glob-to-find-files-recursively-in-python
"""
matches = []
for root, dirnames, filenames in os.walk(rootdir):
for filename in fnmatch.filter(filenames, pattern):
matches.append(os.path.join(root, filename))
return matches
files = recursive_glob(rootdir="chapters")
def extract_todos(filename):
if 'todo.md' in filename:
return
with open(filename, 'r') as f:
content = f.readlines()
count = 1
for line in content:
line = line.strip()
# path = filename.replace("chapters/","")
path = os.path.basename(filename) .replace(".md", "")
link = '[' + path + '](https://github.com/cloudmesh/book/edit/master/cloud-clusters/' + filename + ')' + '{style="font-size:50%"}'
if "todo" in line:
print ("|", count, "|", link, "|", line, "|")
if "TODO" in line:
line = line.replace("\TODO{","")
line = line.replace("}","")
line = line.replace("TODO:","")
line = line.replace("TODO","")
print("|", count, "|", link, "|", line, "|")
count = count + 1
#print("# TODO")
print()
print('<div class="smalltable">')
print("| Line | Path | Description |")
print("| - | ---- | -------------- |")
for file in files:
extract_todos(file)
print('</div>')
print()
| apache-2.0 | -738,105,093,781,443,800 | 24.246154 | 138 | 0.582572 | false |
mlcommons/training | object_detection/pytorch/maskrcnn_benchmark/utils/registry.py | 1 | 1994 | # Copyright (c) 2021, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved.
def _register_generic(module_dict, module_name, module):
assert module_name not in module_dict
module_dict[module_name] = module
class Registry(dict):
'''
A helper class for managing registering modules, it extends a dictionary
and provides a register functions.
Eg. creeting a registry:
some_registry = Registry({"default": default_module})
There're two ways of registering new modules:
1): normal way is just calling register function:
def foo():
...
some_registry.register("foo_module", foo)
2): used as decorator when declaring the module:
@some_registry.register("foo_module")
@some_registry.register("foo_modeul_nickname")
def foo():
...
Access of module is just like using a dictionary, eg:
f = some_registry["foo_modeul"]
'''
def __init__(self, *args, **kwargs):
super(Registry, self).__init__(*args, **kwargs)
def register(self, module_name, module=None):
# used as function call
if module is not None:
_register_generic(self, module_name, module)
return
# used as decorator
def register_fn(fn):
_register_generic(self, module_name, fn)
return fn
return register_fn
| apache-2.0 | -7,268,464,664,019,282,000 | 33.37931 | 76 | 0.662487 | false |
raiden-network/raiden | tools/pip-compile-wrapper.py | 1 | 13013 | #!/usr/bin/env python
"""
Helper utility to compile / upgrade requirements files from templates.
This only manages dependencies between requirements sources.
The actual compiling is delegated to ``pip-compile`` from the ``pip-tools` package.
NOTE: This utility *must only* use stdlib imports in order to be runnable even
before the dev requirements are installed.
"""
import os
import re
import shlex
import subprocess
import sys
from argparse import ArgumentParser
from enum import Enum
from itertools import chain, groupby, repeat
from operator import itemgetter
from pathlib import Path
from shutil import which
from typing import Dict, Iterable, Iterator, List, Optional, Set, Tuple
# Regex taken from https://www.python.org/dev/peps/pep-0508/#names
# The trailing `$` is intentionally left out since we're dealing with complete requirement lines,
# not just bare package names here. Since regex matching is greedy by default this shouldn't cause
# any problems for valid package names.
REQUIREMENT_RE = re.compile(r"^([A-Z0-9][A-Z0-9._-]*[A-Z0-9])", re.IGNORECASE)
REQUIREMENTS_SOURCE_DEV = "requirements-dev.in"
SCRIPT_NAME = os.environ.get("_SCRIPT_NAME", sys.argv[0])
REQUIREMENTS_DIR = Path(__file__).parent.parent.joinpath("requirements").resolve()
SOURCE_PATHS: Dict[str, Path] = {
path.relative_to(REQUIREMENTS_DIR).stem: path.resolve()
for path in REQUIREMENTS_DIR.glob("*.in")
}
TARGET_PATHS: Dict[str, Path] = {
name: REQUIREMENTS_DIR.joinpath(name).with_suffix(".txt") for name in SOURCE_PATHS.keys()
}
SOURCE_DEPENDENCIES: Dict[str, Set[str]] = {}
class TargetType(Enum):
SOURCE = 1
TARGET = 2
ALL = 3
def _resolve_source_dependencies() -> None:
"""Determine direct dependencies between requirements files
Dependencies of the form ``-r <other-file>`` are recognized.
"""
for source_name, source_path in SOURCE_PATHS.items():
source_path = source_path.resolve()
SOURCE_DEPENDENCIES[source_name] = set()
target_dir: Path = source_path.parent
with source_path.open("rt") as target_file:
for line in target_file:
line = line.strip()
if line.startswith("-r"):
required = (
target_dir.joinpath(line.lstrip("-r").strip())
.resolve()
.relative_to(REQUIREMENTS_DIR)
.stem
)
SOURCE_DEPENDENCIES[source_name].add(required)
_resolve_source_dependencies()
def _run_pip_compile(
source_name: str,
upgrade_all: bool = False,
upgrade_packages: Optional[Set[str]] = None,
verbose: bool = False,
dry_run: bool = False,
pre: bool = False,
) -> None:
"""Run pip-compile with the given parameters
This automatically makes sure that packages listed in ``upgrade_packages`` are only passed
for requirement files that already contain this package either in the source or the target.
This is necessary since pip-compile will otherwise unconditionally add that package to the
output.
"""
assert_msg = "Only one of `upgrade_all` or `upgrade_packages` may be given."
assert not (upgrade_all and upgrade_packages), assert_msg
pip_compile_exe = which("pip-compile")
if not pip_compile_exe:
raise RuntimeError("pip-compile missing. This shouldn't happen.")
if not upgrade_packages:
upgrade_packages = set()
working_path = Path.cwd()
source_path = SOURCE_PATHS[source_name]
target_path = TARGET_PATHS[source_name]
upgrade_packages_cmd: List[str] = []
if upgrade_packages:
packages_in_target = {
package_name
for package_name, _ in _get_requirement_packages(source_name, TargetType.ALL)
}
upgrade_packages_cmd = list(
chain.from_iterable(
zip(repeat("--upgrade-package"), upgrade_packages.intersection(packages_in_target))
)
)
upgrade_all_cmd: List[str] = []
if upgrade_all:
upgrade_all_cmd = ["--upgrade"]
dry_run_cmd = ["--dry-run"] if dry_run else []
pre_cmd = ["--pre"] if pre else []
# We use a relative path for the source file because of
# https://github.com/raiden-network/raiden/pull/5987#discussion_r392145782 and
# https://github.com/jazzband/pip-tools/issues/1084
command = [
pip_compile_exe,
"--verbose" if verbose else "--quiet",
*dry_run_cmd,
*pre_cmd,
"--no-emit-index-url",
*upgrade_packages_cmd,
*upgrade_all_cmd,
"--output-file",
str(target_path),
str(source_path.relative_to(working_path)),
]
print(f"Compiling {source_path.name}...", end="", flush=True)
if verbose:
print(f"\nRunning command: {' '.join(shlex.quote(c) for c in command)}")
env = os.environ.copy()
env[
"CUSTOM_COMPILE_COMMAND"
] = "'requirements/deps compile' (for details see requirements/README)"
process = subprocess.run(
command, capture_output=(not verbose), cwd=str(source_path.parent), env=env
)
if process.returncode == 0:
print("\b\b Success.")
return
print("\b\b Error!")
if not verbose:
print(process.stdout.decode())
print(process.stderr.decode())
process.check_returncode()
def _resolve_deps(source_names: Iterable[str]) -> List[str]:
"""Partially order source_names based on their dependencies
Raises an Exception if not possible.
The resulting list has the following property: Each entry does not depend on a later entry.
"""
requirements = {
source: dependencies.intersection(source_names)
for source, dependencies in SOURCE_DEPENDENCIES.items()
if source in source_names
}
solution: List[str] = []
while requirements:
satisfied = {source for source, targets in requirements.items() if not targets}
if not satisfied:
raise RuntimeError(f"Missing dependencies or circular dependency in: {requirements}")
for source in satisfied:
del requirements[source]
for dependencies in requirements.values():
dependencies -= satisfied
solution.extend(satisfied)
return solution
def _get_requirement_packages(
source_name: str, where: TargetType = TargetType.SOURCE
) -> Iterator[Tuple[str, str]]:
if where is TargetType.SOURCE:
source_paths = [SOURCE_PATHS.get(source_name)]
elif where is TargetType.TARGET:
source_paths = [TARGET_PATHS.get(source_name)]
elif where is TargetType.ALL:
source_paths = [path.get(source_name) for path in [SOURCE_PATHS, TARGET_PATHS]]
else:
raise ValueError("Invalid 'where'")
filtered_source_paths = [source_path for source_path in source_paths if source_path]
if not filtered_source_paths or not all(path.exists() for path in filtered_source_paths):
yield from []
for source_path in filtered_source_paths:
with source_path.open("rt") as source_file:
for line in source_file:
line, *_ = line.strip().partition("#")
line = line.strip()
if not line or line.startswith("-"):
continue
match = REQUIREMENT_RE.search(line)
if match:
yield match.group(1), line
def _get_sources_for_packages(package_names: Set[str], where: TargetType) -> Dict[str, Set[str]]:
"""Return source and / or target files concerned by packages"""
package_to_source = [
(package_name, source_name)
for source_name in SOURCE_PATHS.keys()
for package_name, _ in _get_requirement_packages(source_name, where)
if package_name in package_names
]
return {
key: {source_name for _, source_name in group}
for key, group in groupby(sorted(package_to_source, key=itemgetter(0)), key=itemgetter(0))
}
def _get_requirement_package(source_name: str, target_package_name: str) -> Optional[str]:
for package_name, req_line in _get_requirement_packages(source_name):
if package_name == target_package_name:
return req_line
return None
def _ensure_pip_tools() -> None:
if not which("pip-compile"):
print("pip-tools not available.")
pip_tools_req = _get_requirement_package(
REQUIREMENTS_SOURCE_DEV.replace(".in", ""), "pip-tools"
)
if not pip_tools_req:
raise RuntimeError(f"Package 'pip-tools' not found in {REQUIREMENTS_SOURCE_DEV}")
print(f"Installing {pip_tools_req}...", end="", flush=True)
process = subprocess.run(
[sys.executable, "-m", "pip", "install", pip_tools_req],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
)
process.check_returncode()
print("\b\b Done.")
def compile_source(
upgrade_all: bool = False, verbose: bool = False, dry_run: bool = False
) -> None:
for source_name in _resolve_deps(SOURCE_PATHS.keys()):
_run_pip_compile(source_name, upgrade_all=upgrade_all, verbose=verbose, dry_run=dry_run)
def upgrade_source(
upgrade_package_names: Set[str],
verbose: bool = False,
dry_run: bool = False,
pre: bool = False,
) -> None:
packages_to_sources = _get_sources_for_packages(upgrade_package_names, TargetType.ALL)
_newline = "\n - "
missing_packages = upgrade_package_names - packages_to_sources.keys()
if missing_packages:
print(
"Some of the given packages were not found in either source or target files.\n"
"Please check that the packages are spelled correctly.\n"
"If any of these packages were newly added to any of the source files you need to "
f"run '{SCRIPT_NAME} compile' first.\n"
f"Missing package(s):\n - {_newline.join(missing_packages)}"
)
sys.exit(1)
grouped_packages_to_sources = [
(set(package_name for package_name, _ in group), key)
for key, group in groupby(
sorted(packages_to_sources.items(), key=itemgetter(1)), key=itemgetter(1)
)
]
for package_names, source_names in grouped_packages_to_sources:
print(f"Upgrading package(s):\n - {_newline.join(package_names)}")
for source_name in _resolve_deps(source_names):
_run_pip_compile(
source_name,
upgrade_packages=package_names,
verbose=verbose,
dry_run=dry_run,
pre=pre,
)
def main() -> None:
parser = ArgumentParser(prog=SCRIPT_NAME)
parser.add_argument("-v", "--verbose", action="store_true", default=False)
parser.add_argument("-n", "--dry-run", action="store_true", default=False)
commands = parser.add_subparsers(title="Sub-commands", required=True, dest="command")
commands.add_parser(
"compile",
help=(
"Compile source files. "
"Keep current versions unless changed requirements force newer versions."
),
)
upgrade_parser = commands.add_parser(
"upgrade",
help=(
"Compile source files and upgrade package versions. "
"Optionally specify package names to upgrade only those."
),
)
upgrade_parser.add_argument(
"--pre",
action="store_true",
default=False,
help="Use pre-release versions of packages if available.",
)
upgrade_parser.add_argument("packages", metavar="package", nargs="*")
parsed = parser.parse_args()
_ensure_pip_tools()
if parsed.command == "compile":
compile_source(verbose=parsed.verbose, dry_run=parsed.dry_run)
elif parsed.command == "upgrade":
packages = set(parsed.packages)
if not packages:
# This is a standalone script which is not using gevent
resp = input( # pylint: disable=gevent-input-forbidden
"Are you sure you want to upgrade ALL packages? [y/N] "
)
if resp.lower() != "y":
print("Aborting")
sys.exit(1)
compile_source(upgrade_all=True, verbose=parsed.verbose, dry_run=parsed.dry_run)
else:
if parsed.pre:
print(
"Warning: Using the '--pre' option can cause unintended upgrades to "
"prerelease versions of unrelated packages. This is due to constraints in the "
"underlying tools (pip-compile / pip) that don't currently allow constraining "
"pre-releases to only specific packages.\n"
"Please carefully inspect the generated output files!"
)
upgrade_source(
packages, verbose=parsed.verbose, dry_run=parsed.dry_run, pre=parsed.pre
)
if __name__ == "__main__":
main()
| mit | 8,479,760,281,170,723,000 | 35.656338 | 99 | 0.621763 | false |
jfterpstra/bluebottle | bluebottle/bb_donations/tests/test_api.py | 1 | 29699 | import json
from mock import patch
from bluebottle.donations.models import Donation
from bluebottle.orders.models import Order
from bluebottle.test.utils import BluebottleTestCase, SessionTestMixin
from django.conf import settings
from bluebottle.bb_orders.views import ManageOrderDetail
from django.core.urlresolvers import reverse
from bluebottle.clients import properties
from bluebottle.test.factory_models.accounts import BlueBottleUserFactory
from bluebottle.test.factory_models.projects import ProjectFactory
from bluebottle.test.factory_models.orders import OrderFactory
from bluebottle.test.factory_models.donations import DonationFactory
from bluebottle.test.factory_models.fundraisers import FundraiserFactory
from bluebottle.test.factory_models.rewards import RewardFactory
from bluebottle.utils.utils import StatusDefinition
from rest_framework import status
class DonationApiTestCase(BluebottleTestCase, SessionTestMixin):
def setUp(self):
super(DonationApiTestCase, self).setUp()
self.create_session()
self.user1 = BlueBottleUserFactory.create()
self.user1_token = "JWT {0}".format(self.user1.get_jwt_token())
self.init_projects()
self.project1 = ProjectFactory.create(amount_asked=5000)
self.project1.set_status('campaign')
self.project2 = ProjectFactory.create(amount_asked=3750)
self.project2.set_status('campaign')
self.manage_order_list_url = reverse('manage-order-list')
self.manage_donation_list_url = reverse('manage-donation-list')
self.user = BlueBottleUserFactory.create()
self.user_token = "JWT {0}".format(self.user.get_jwt_token())
self.user2 = BlueBottleUserFactory.create(is_co_financer=True)
self.user2_token = "JWT {0}".format(self.user2.get_jwt_token())
self.project = ProjectFactory.create()
self.order = OrderFactory.create(user=self.user)
# Mock the ManageOrderDetail check_status_psp function which will request status_check at PSP
@patch.object(ManageOrderDetail, 'check_status_psp')
class TestDonationPermissions(DonationApiTestCase):
def test_user_is_order_owner(self, mock_check_status_psp):
""" Test that a user that is owner of the order can post a new donation """
donation1 = {
"project": self.project.slug,
"order": self.order.id,
"amount": 35
}
self.assertEqual(Donation.objects.count(), 0)
response = self.client.post(reverse('manage-donation-list'), donation1,
token=self.user_token)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
self.assertEqual(Donation.objects.count(), 1)
def test_user_is_not_order_owner(self, mock_check_status_psp):
""" Test that a user who is not owner of an order cannot create a new donation """
donation1 = {
"project": self.project.slug,
"order": self.order.id,
"amount": 35
}
self.assertEqual(Donation.objects.count(), 0)
response = self.client.post(reverse('manage-donation-list'), donation1,
token=self.user2_token)
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
self.assertEqual(Donation.objects.count(), 0)
def test_order_status_not_new(self, mock_check_status_psp):
""" Test that a non-new order status produces a forbidden response """
order = OrderFactory.create(user=self.user,
status=StatusDefinition.SUCCESS)
donation1 = {
"project": self.project.slug,
"order": order.id,
"amount": 35
}
self.assertEqual(Donation.objects.count(), 0)
response = self.client.post(reverse('manage-donation-list'), donation1,
token=self.user_token)
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
self.assertEqual(Donation.objects.count(), 0)
def test_order_status_new(self, mock_check_status_psp):
""" Test that a new order status produces a 201 created response """
order = OrderFactory.create(user=self.user,
status=StatusDefinition.CREATED)
donation1 = {
"project": self.project.slug,
"order": order.id,
"amount": 35
}
self.assertEqual(Donation.objects.count(), 0)
response = self.client.post(reverse('manage-donation-list'), donation1,
token=self.user_token)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
self.assertEqual(Donation.objects.count(), 1)
def test_donation_update_not_same_owner(self, mock_check_status_psp):
""" Test that an update to a donation where the user is not the owner produces a 403"""
donation = DonationFactory(order=self.order, amount=35)
updated_donation = {
"project": self.project.slug,
"order": self.order.id,
"amount": 50
}
self.assertEqual(Donation.objects.count(), 1)
response = self.client.put(reverse('manage-donation-detail',
kwargs={'pk': donation.id}),
updated_donation,
token=self.user2_token)
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
self.assertEqual(Donation.objects.count(), 1)
def test_donation_update_same_owner(self, mock_check_status_psp):
""" Test that an update to a donation where the user is the owner produces a 200 OK"""
donation = DonationFactory(order=self.order, amount=35)
updated_donation = {
"project": self.project.slug,
"order": self.order.id,
"amount": 50
}
self.assertEqual(Donation.objects.count(), 1)
response = self.client.put(reverse('manage-donation-detail',
kwargs={'pk': donation.id}),
updated_donation,
token=self.user_token)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(Donation.objects.count(), 1)
def test_donation_update_order_not_new(self, mock_check_status_psp):
""" Test that an update to a donation where the order does not have status CREATED produces 403 FORBIDDEN"""
order = OrderFactory.create(user=self.user,
status=StatusDefinition.SUCCESS)
donation = DonationFactory(order=order, amount=35)
updated_donation = {
"project": self.project.slug,
"order": order.id,
"amount": 50
}
self.assertEqual(Donation.objects.count(), 1)
response = self.client.put(reverse('manage-donation-detail',
kwargs={'pk': donation.id}),
updated_donation,
token=self.user_token)
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
self.assertEqual(Donation.objects.count(), 1)
def test_donation_update_order_new(self, mock_check_status_psp):
""" Test that an update to a donation where the order does has status CREATED produces 200 OK response"""
order = OrderFactory.create(user=self.user,
status=StatusDefinition.CREATED)
donation = DonationFactory(order=order, amount=35)
updated_donation = {
"project": self.project.slug,
"order": order.id,
"amount": 50
}
self.assertEqual(Donation.objects.count(), 1)
response = self.client.put(reverse('manage-donation-detail',
kwargs={'pk': donation.id}),
updated_donation,
token=self.user_token)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(Donation.objects.count(), 1)
# Mock the ManageOrderDetail check_status_psp function which will request status_check at PSP
@patch.object(ManageOrderDetail, 'check_status_psp')
class TestCreateDonation(DonationApiTestCase):
def test_create_single_donation(self, check_status_psp):
"""
Test donation in the current donation flow where we have just one donation that can't be deleted.
"""
# Create an order
response = self.client.post(self.manage_order_list_url, {},
token=self.user1_token)
order_id = response.data['id']
fundraiser = FundraiserFactory.create(amount=100)
donation1 = {
"fundraiser": fundraiser.pk,
"project": fundraiser.project.slug,
"order": order_id,
"amount": 50
}
response = self.client.post(self.manage_donation_list_url, donation1,
token=self.user1_token)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
self.assertEqual(response.data['status'], 'created')
# Check that the order total is equal to the donation amount
order_url = "{0}{1}".format(self.manage_order_list_url, order_id)
response = self.client.get(order_url, token=self.user1_token)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data['total'], u'50.00')
def test_create_fundraiser_donation(self, check_status_psp):
"""
Test donation in the current donation flow where we have just one donation that can't be deleted.
"""
# Create an order
response = self.client.post(self.manage_order_list_url, {},
token=self.user1_token)
order_id = response.data['id']
donation1 = {
"project": self.project1.slug,
"order": order_id,
"amount": 35
}
response = self.client.post(self.manage_donation_list_url, donation1,
token=self.user1_token)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
self.assertEqual(response.data['status'], 'created')
# Check that the order total is equal to the donation amount
order_url = "{0}{1}".format(self.manage_order_list_url, order_id)
response = self.client.get(order_url, token=self.user1_token)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data['total'], u'35.00')
def test_crud_multiple_donations(self, check_status_psp):
"""
Test more advanced modifications to donations and orders that aren't currently supported by our
front-en but
"""
# Create an order
response = self.client.post(self.manage_order_list_url, {},
token=self.user1_token)
order_id = response.data['id']
donation1 = {
"project": self.project1.slug,
"order": order_id,
"amount": 35
}
response = self.client.post(self.manage_donation_list_url, donation1,
token=self.user1_token)
donation_id = response.data['id']
# Check that the order total is equal to the donation amount
order_url = "{0}{1}".format(self.manage_order_list_url, order_id)
response = self.client.get(order_url, token=self.user1_token)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data['total'], u'35.00')
# Check that this user can change the amount
donation_url = "{0}{1}".format(self.manage_donation_list_url,
donation_id)
donation1['amount'] = 50
response = self.client.put(donation_url, donation1,
token=self.user1_token)
self.assertEqual(response.status_code, status.HTTP_200_OK)
# Check that the order total is equal to the increased donation amount
order_url = "{0}{1}".format(self.manage_order_list_url, order_id)
response = self.client.get(order_url, token=self.user1_token)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data['total'], u'50.00')
# Add another donation
donation2 = {
"project": self.project2.slug,
"order": order_id,
"amount": 47
}
response = self.client.post(self.manage_donation_list_url, donation2,
token=self.user1_token)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
self.assertEqual(response.data['status'], 'created')
# Check that the order total is equal to the two donations
order_url = "{0}{1}".format(self.manage_order_list_url, order_id)
response = self.client.get(order_url, token=self.user1_token)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(len(response.data['donations']), 2)
self.assertEqual(response.data['total'], u'97.00')
# remove the first donation
response = self.client.delete(donation_url, token=self.user1_token)
self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)
# Check that the order total is equal to second donation
order_url = "{0}{1}".format(self.manage_order_list_url, order_id)
response = self.client.get(order_url, token=self.user1_token)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(len(response.data['donations']), 1)
self.assertEqual(response.data['total'], u'47.00')
# Set order to status 'locked'
order = Order.objects.get(id=order_id)
order.locked()
order.save()
donation3 = {
"project": self.project1.slug,
"order": order_id,
"amount": 70
}
# Should not be able to add more donations to this order now.
response = self.client.post(self.manage_donation_list_url, donation3,
token=self.user1_token)
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
# Check that this user can't change the amount of an donation
donation1['amount'] = 5
response = self.client.put(donation_url, donation1,
token=self.user1_token)
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
class TestAnonymousAuthenicatedDonationCreate(DonationApiTestCase):
def test_create_anonymous_donation(self):
donation_url = reverse('manage-donation-list')
# create a new anonymous donation
response = self.client.post(donation_url, {'order': self.order.pk,
'project': self.project.slug,
'amount': 50,
'anonymous': True},
token=self.user_token)
self.assertEqual(response.status_code, 201)
# retrieve the donation just created
donation_id = response.data['id']
donation_url = reverse('manage-donation-detail',
kwargs={'pk': donation_id})
response = self.client.get(donation_url, token=self.user_token)
self.assertEqual(response.status_code, status.HTTP_200_OK)
# Check if the anonymous is set to True
self.assertEqual(True, response.data['anonymous'])
# Set the order to success
self.order.locked()
self.order.save()
self.order.success()
self.order.save()
# retrieve the donation through public API
donation_url = reverse('donation-detail', kwargs={'pk': donation_id})
response = self.client.get(donation_url)
self.assertEqual(response.status_code, status.HTTP_200_OK)
# Check that user is NOT shown in public API
self.assertEqual(None, response.data['user'])
class TestUnauthenticatedDonationCreate(DonationApiTestCase):
def setUp(self):
super(TestUnauthenticatedDonationCreate, self).setUp()
self.order_anon = OrderFactory.create()
s = self.session
s['new_order_id'] = self.order_anon.pk
s.save()
def test_create_anonymous_donation(self):
donation_url = reverse('manage-donation-list')
# create a new anonymous donation
response = self.client.post(donation_url, {'order': self.order_anon.pk,
'project': self.project.slug,
'amount': 50,
'anonymous': True})
self.assertEqual(response.status_code, 201)
@patch.object(ManageOrderDetail, 'check_status_psp')
class TestProjectDonationList(DonationApiTestCase):
"""
Test that the project donations list only works for the project owner
"""
def setUp(self):
super(TestProjectDonationList, self).setUp()
self.project3 = ProjectFactory.create(amount_asked=5000,
owner=self.user1)
self.project3.set_status('campaign')
order = OrderFactory.create(user=self.user1, status=StatusDefinition.SUCCESS)
DonationFactory.create(amount=1000, project=self.project3,
order=order)
self.project_donation_list_url = reverse('project-donation-list')
def test_project_donation_list(self, check_status_psp):
response = self.client.get(self.project_donation_list_url,
{'project': self.project3.slug})
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data['count'], 1)
donation = response.data['results'][0]
self.assertEqual(donation['amount'], u'1000.00')
self.assertEqual(donation['project']['title'], self.project3.title)
def test_successful_project_donation_list(self, check_status_psp):
setattr(properties, 'SHOW_DONATION_AMOUNTS', True)
# Unsuccessful donations should not be shown
order = OrderFactory.create(user=self.user2)
reward = RewardFactory.create(project=self.project3)
DonationFactory.create(amount=2000, project=self.project3, reward=reward,
order=order)
response = self.client.get(self.project_donation_list_url,
{'project': self.project3.slug},
token=self.user1_token)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data['count'], 1,
'Only the successful donation should be returned')
self.assertIn('amount', response.data['results'][0])
self.assertIn('reward', response.data['results'][0])
def test_project_donation_list_without_amounts(self, check_status_psp):
setattr(properties, 'SHOW_DONATION_AMOUNTS', False)
reward = RewardFactory.create(project=self.project3)
order = OrderFactory.create(user=self.user2)
DonationFactory.create(amount=2000, project=self.project3, reward=reward,
order=order)
response = self.client.get(self.project_donation_list_url,
{'project': self.project3.slug},
token=self.user1_token)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data['count'], 1,
'Only the successful donation should be returned')
self.assertNotIn('amount', response.data['results'][0])
self.assertNotIn('reward', response.data['results'][0])
def test_successful_project_donation_list_paged(self, check_status_psp):
for i in range(30):
order = OrderFactory.create(user=self.user1, status=StatusDefinition.SUCCESS)
DonationFactory.create(amount=2000, project=self.project3,
order=order)
response = self.client.get(self.project_donation_list_url,
{'project': self.project3.slug,},
token=self.user1_token)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data['count'], 31,
'All the donations should be returned')
self.assertEqual(len(response.data['results']), 20)
def test_project_donation_list_co_financing(self, check_status_psp):
order = OrderFactory.create(user=self.user2, status=StatusDefinition.SUCCESS)
DonationFactory.create(amount=1500, project=self.project3,
order=order)
anonymous_order = OrderFactory.create(status=StatusDefinition.SUCCESS)
DonationFactory.create(amount=1000, project=self.project3,
order=anonymous_order, anonymous=True)
response = self.client.get(self.project_donation_list_url,
{'project': self.project3.slug, 'co_financing': 'true'},
token=self.user1_token)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data['count'], 1,
'Only donations by co-financers should be returned')
self.assertEqual(response.data['results'][0]['amount'], u'1500.00')
def test_project_donation_list_co_financing_is_false(self, check_status_psp):
# Co_financing order and donation
order = OrderFactory.create(user=self.user2, status=StatusDefinition.SUCCESS)
DonationFactory.create(amount=1500, project=self.project3,
order=order)
# Anonymous order and donation
anonymous_order = OrderFactory.create(status=StatusDefinition.SUCCESS)
DonationFactory.create(amount=1500, project=self.project3,
order=order, anonymous=True)
response = self.client.get(self.project_donation_list_url,
{'project': self.project3.slug, 'co_financing': 'false'},
token=self.user1_token)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data['count'], 2,
'Only donations and anonymous donations should be returned')
self.assertEqual(response.data['results'][0]['amount'], u'1500.00')
self.assertEqual(response.data['results'][1]['amount'], u'1000.00')
def test_project_donation_list_co_financing_is_unspecified(self, check_status_psp):
# Co_financing order and donation
order = OrderFactory.create(user=self.user2, status=StatusDefinition.SUCCESS)
DonationFactory.create(amount=1500, project=self.project3,
order=order)
# Anonymous order and donation
anonymous_order = OrderFactory.create(status=StatusDefinition.SUCCESS)
DonationFactory.create(amount=1500, project=self.project3,
order=anonymous_order, anonymous=True)
response = self.client.get(self.project_donation_list_url,
{'project': self.project3.slug},
token=self.user1_token)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data['count'], 2,
'Donations and anonymous donations should be returned')
self.assertEqual(response.data['results'][0]['amount'], u'1500.00')
self.assertEqual(response.data['results'][1]['amount'], u'1000.00')
@patch.object(ManageOrderDetail, 'check_status_psp')
class TestMyProjectDonationList(DonationApiTestCase):
"""
Test that the project donations list only works for the project owner
"""
def setUp(self):
super(TestMyProjectDonationList, self).setUp()
self.project3 = ProjectFactory.create(amount_asked=5000,
owner=self.user1)
self.project3.set_status('campaign')
# User 2 makes a donation
order = OrderFactory.create(user=self.user2)
DonationFactory.create(amount=1000, project=self.project3,
order=order)
order.locked()
order.save()
order.success()
order.save()
self.project_donation_list_url = reverse('my-project-donation-list')
def tearDown(self):
super(TestMyProjectDonationList, self).tearDown()
Order.objects.all().delete()
def test_my_project_donation_list(self, check_status_psp):
response = self.client.get(self.project_donation_list_url,
{'project': self.project3.slug},
token=self.user1_token)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(len(response.data['results']), 1)
donation = response.data['results'][0]
self.assertEqual(donation['amount'], u'1000.00')
self.assertEqual(donation['project']['title'], self.project3.title)
def test_successful_my_project_donation_list(self, check_status_psp):
# Unsuccessful donations should not be shown
order = OrderFactory.create(user=self.user2)
DonationFactory.create(amount=2000, project=self.project3,
order=order)
response = self.client.get(self.project_donation_list_url,
{'project': self.project3.slug},
token=self.user1_token)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(len(response.data['results']), 1,
'Only the successful donation should be returned')
def test_my_project_donation_list_unauthorized(self, check_status_psp):
response = self.client.get(self.project_donation_list_url,
{'project': self.project3.slug},
token=self.user2_token)
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
@patch.object(ManageOrderDetail, 'check_status_psp')
class TestMyFundraiserDonationList(DonationApiTestCase):
"""
Test that the fundraiser donations list only works for the fundraiser owner
"""
def setUp(self):
super(TestMyFundraiserDonationList, self).setUp()
self.project4 = ProjectFactory.create(amount_asked=5000,
owner=self.user1)
self.project4.set_status('campaign')
self.fundraiser = FundraiserFactory.create(amount=4000,
owner=self.user1,
project=self.project4)
# User 2 makes a donation
order = OrderFactory.create(user=self.user2)
DonationFactory.create(amount=1000, project=self.project4,
fundraiser=self.fundraiser,
order=order)
order.locked()
order.save()
order.success()
order.save()
self.fundraiser_donation_list_url = reverse(
'my-fundraiser-donation-list')
def test_my_fundraiser_donation_list(self, check_status_psp):
response = self.client.get(self.fundraiser_donation_list_url,
{'fundraiser': self.fundraiser.pk},
token=self.user1_token)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(len(response.data), 1)
donation = response.data[0]
self.assertEqual(donation['amount'], u'1000.00')
self.assertEqual(donation['project']['title'], self.project4.title)
self.assertEqual(donation['fundraiser'], self.fundraiser.pk)
def test_successful_my_fundraiser_donation_list(self, check_status_psp):
# Unsuccessful donations should not be shown
order = OrderFactory.create(user=self.user2)
DonationFactory.create(amount=2000, project=self.project4,
fundraiser=self.fundraiser,
order=order)
response = self.client.get(self.fundraiser_donation_list_url,
{'fundraiser': self.fundraiser.pk},
token=self.user1_token)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(len(response.data), 1,
'Only the successful donation should be returned')
def test_my_fundraiser_donation_list_unauthorized(self, check_status_psp):
response = self.client.get(self.fundraiser_donation_list_url,
{'project': self.fundraiser.pk},
token=self.user2_token)
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
| bsd-3-clause | -1,832,299,947,430,814,500 | 42.229985 | 116 | 0.603152 | false |
Netflix-Skunkworks/swag-client | swag_client/cli.py | 1 | 11229 | import logging
import os
import time
import simplejson as json
import boto3
import click
import click_log
from tabulate import tabulate
from swag_client.backend import SWAGManager
from swag_client.__about__ import __version__
from swag_client.migrations import run_migration
from swag_client.util import parse_swag_config_options
from swag_client.exceptions import InvalidSWAGDataException
log = logging.getLogger('swag_client')
click_log.basic_config(log)
class CommaList(click.ParamType):
name = 'commalist'
def convert(self, value, param, ctx):
return value.split(',')
def create_swag_from_ctx(ctx):
"""Creates SWAG client from the current context."""
swag_opts = {}
if ctx.type == 'file':
swag_opts = {
'swag.type': 'file',
'swag.data_dir': ctx.data_dir,
'swag.data_file': ctx.data_file
}
elif ctx.type == 's3':
swag_opts = {
'swag.type': 's3',
'swag.bucket_name': ctx.bucket_name,
'swag.data_file': ctx.data_file,
'swag.region': ctx.region
}
elif ctx.type == 'dynamodb':
swag_opts = {
'swag.type': 'dynamodb',
'swag.region': ctx.region
}
return SWAGManager(**parse_swag_config_options(swag_opts))
class AppContext(object):
def __init__(self):
self.namespace = None
self.region = None
self.type = None
self.data_dir = None
self.data_file = None
self.bucket_name = None
self.dry_run = None
pass_context = click.make_pass_decorator(AppContext, ensure=True)
@click.group()
@click.option('--namespace', default='accounts')
@click.option('--dry-run', type=bool, default=False, is_flag=True, help='Run command without persisting anything.')
@click_log.simple_verbosity_option(log)
@click.version_option(version=__version__)
@pass_context
def cli(ctx, namespace, dry_run):
if not ctx.namespace:
ctx.namespace = namespace
if not ctx.dry_run:
ctx.dry_run = dry_run
@cli.group()
@click.option('--region', default='us-east-1', help='Region the table is located in.')
@pass_context
def dynamodb(ctx, region):
if not ctx.region:
ctx.region = region
ctx.type = 'dynamodb'
@cli.group()
@click.option('--data-dir', help='Directory to store data.', default=os.getcwd())
@click.option('--data-file')
@pass_context
def file(ctx, data_dir, data_file):
"""Use the File SWAG Backend"""
if not ctx.file:
ctx.data_file = data_file
if not ctx.data_dir:
ctx.data_dir = data_dir
ctx.type = 'file'
@cli.group()
@click.option('--bucket-name', help='Name of the bucket you wish to operate on.')
@click.option('--data-file', help='Key name of the file to operate on.')
@click.option('--region', default='us-east-1', help='Region the bucket is located in.')
@pass_context
def s3(ctx, bucket_name, data_file, region):
"""Use the S3 SWAG backend."""
if not ctx.data_file:
ctx.data_file = data_file
if not ctx.bucket_name:
ctx.bucket_name = bucket_name
if not ctx.region:
ctx.region = region
ctx.type = 's3'
@cli.command()
@pass_context
def list(ctx):
"""List SWAG account info."""
if ctx.namespace != 'accounts':
click.echo(
click.style('Only account data is available for listing.', fg='red')
)
return
swag = create_swag_from_ctx(ctx)
accounts = swag.get_all()
_table = [[result['name'], result.get('id')] for result in accounts]
click.echo(
tabulate(_table, headers=["Account Name", "Account Number"])
)
@cli.command()
@click.option('--name', help='Name of the service to list.')
@pass_context
def list_service(ctx, name):
"""Retrieve accounts pertaining to named service."""
swag = create_swag_from_ctx(ctx)
accounts = swag.get_service_enabled(name)
_table = [[result['name'], result.get('id')] for result in accounts]
click.echo(
tabulate(_table, headers=["Account Name", "Account Number"])
)
@cli.command()
@click.option('--start-version', default=1, help='Starting version.')
@click.option('--end-version', default=2, help='Ending version.')
@pass_context
def migrate(ctx, start_version, end_version):
"""Transition from one SWAG schema to another."""
if ctx.type == 'file':
if ctx.data_file:
file_path = ctx.data_file
else:
file_path = os.path.join(ctx.data_file, ctx.namespace + '.json')
# todo make this more like alemebic and determine/load versions automatically
with open(file_path, 'r') as f:
data = json.loads(f.read())
data = run_migration(data, start_version, end_version)
with open(file_path, 'w') as f:
f.write(json.dumps(data))
@cli.command()
@pass_context
def propagate(ctx):
"""Transfers SWAG data from one backend to another"""
data = []
if ctx.type == 'file':
if ctx.data_file:
file_path = ctx.data_file
else:
file_path = os.path.join(ctx.data_dir, ctx.namespace + '.json')
with open(file_path, 'r') as f:
data = json.loads(f.read())
swag_opts = {
'swag.type': 'dynamodb'
}
swag = SWAGManager(**parse_swag_config_options(swag_opts))
for item in data:
time.sleep(2)
swag.create(item, dry_run=ctx.dry_run)
@cli.command()
@pass_context
@click.argument('data', type=click.File())
def create(ctx, data):
"""Create a new SWAG item."""
swag = create_swag_from_ctx(ctx)
data = json.loads(data.read())
for account in data:
swag.create(account, dry_run=ctx.dry_run)
@cli.command()
@pass_context
@click.argument('data', type=click.File())
def update(ctx, data):
"""Updates a given record."""
swag = create_swag_from_ctx(ctx)
data = json.loads(data.read())
for account in data:
swag.update(account, dry_run=ctx.dry_run)
@cli.command()
@pass_context
@click.argument('name')
@click.option('--path', type=str, default='', help='JMESPath string to filter accounts to be targeted. Default is all accounts.')
@click.option('--regions', type=CommaList(), default='all',
help='AWS regions that should be configured. These are comma delimited (e.g. us-east-1, us-west-2, eu-west-1). Default: all')
@click.option('--disabled', type=bool, default=False, is_flag=True, help='Service should be marked as enabled.')
def deploy_service(ctx, path, name, regions, disabled):
"""Deploys a new service JSON to multiple accounts. NAME is the service name you wish to deploy."""
enabled = False if disabled else True
swag = create_swag_from_ctx(ctx)
accounts = swag.get_all(search_filter=path)
log.debug('Searching for accounts. Found: {} JMESPath: `{}`'.format(len(accounts), path))
for a in accounts:
try:
if not swag.get_service(name, search_filter="[?id=='{id}']".format(id=a['id'])):
log.info('Found an account to update. AccountName: {name} AccountNumber: {number}'.format(name=a['name'], number=a['id']))
status = []
for region in regions:
status.append(
{
'enabled': enabled,
'region': region
}
)
a['services'].append(
{
'name': name,
'status': status
}
)
swag.update(a, dry_run=ctx.dry_run)
except InvalidSWAGDataException as e:
log.warning('Found a data quality issue. AccountName: {name} AccountNumber: {number}'.format(name=a['name'], number=a['id']))
log.info('Service has been deployed to all matching accounts.')
@cli.command()
@pass_context
@click.argument('data', type=click.File())
def seed_aws_data(ctx, data):
"""Seeds SWAG from a list of known AWS accounts."""
swag = create_swag_from_ctx(ctx)
for k, v in json.loads(data.read()).items():
for account in v['accounts']:
data = {
'description': 'This is an AWS owned account used for {}'.format(k),
'id': account['account_id'],
'contacts': [],
'owner': 'aws',
'provider': 'aws',
'sensitive': False,
'email': '[email protected]',
'name': k + '-' + account['region']
}
click.echo(click.style(
'Seeded Account. AccountName: {}'.format(data['name']), fg='green')
)
swag.create(data, dry_run=ctx.dry_run)
@cli.command()
@pass_context
@click.option('--owner', type=str, required=True, help='The owner for the account schema.')
def seed_aws_organization(ctx, owner):
"""Seeds SWAG from an AWS organziation."""
swag = create_swag_from_ctx(ctx)
accounts = swag.get_all()
_ids = [result.get('id') for result in accounts]
client = boto3.client('organizations')
paginator = client.get_paginator('list_accounts')
response_iterator = paginator.paginate()
count = 0
for response in response_iterator:
for account in response['Accounts']:
if account['Id'] in _ids:
click.echo(click.style(
'Ignoring Duplicate Account. AccountId: {} already exists in SWAG'.format(account['Id']), fg='yellow')
)
continue
if account['Status'] == 'SUSPENDED':
status = 'deprecated'
else:
status = 'created'
data = {
'id': account['Id'],
'name': account['Name'],
'description': 'Account imported from AWS organization.',
'email': account['Email'],
'owner': owner,
'provider': 'aws',
'contacts': [],
'sensitive': False,
'status': [{'region': 'all', 'status': status}]
}
click.echo(click.style(
'Seeded Account. AccountName: {}'.format(data['name']), fg='green')
)
count += 1
swag.create(data, dry_run=ctx.dry_run)
click.echo('Seeded {} accounts to SWAG.'.format(count))
# todo perhaps there is a better way of dynamically adding subcommands?
file.add_command(list)
file.add_command(migrate)
file.add_command(propagate)
file.add_command(create)
file.add_command(seed_aws_data)
file.add_command(seed_aws_organization)
file.add_command(update)
file.add_command(deploy_service)
file.add_command(list_service)
dynamodb.add_command(list)
dynamodb.add_command(create)
dynamodb.add_command(update)
dynamodb.add_command(seed_aws_data)
dynamodb.add_command(seed_aws_organization)
dynamodb.add_command(deploy_service)
dynamodb.add_command(list_service)
s3.add_command(list)
s3.add_command(create)
s3.add_command(update)
s3.add_command(seed_aws_data)
s3.add_command(seed_aws_organization)
s3.add_command(deploy_service)
s3.add_command(list_service)
| apache-2.0 | -4,546,113,567,798,324,700 | 29.680328 | 139 | 0.596669 | false |
carolFrohlich/nipype | nipype/interfaces/tests/test_base.py | 2 | 27078 | # -*- coding: utf-8 -*-
# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*-
# vi: set ft=python sts=4 ts=4 sw=4 et:
from __future__ import print_function, unicode_literals
from future import standard_library
standard_library.install_aliases()
from builtins import open, str, bytes
import os
import tempfile
import shutil
import warnings
import simplejson as json
from nipype.testing import (assert_equal, assert_not_equal, assert_raises,
assert_true, assert_false, with_setup, package_check,
skipif, example_data)
import nipype.interfaces.base as nib
from nipype.utils.filemanip import split_filename
from nipype.interfaces.base import Undefined, config
from traits.testing.nose_tools import skip
import traits.api as traits
def test_bunch():
b = nib.Bunch()
yield assert_equal, b.__dict__, {}
b = nib.Bunch(a=1, b=[2, 3])
yield assert_equal, b.__dict__, {'a': 1, 'b': [2, 3]}
def test_bunch_attribute():
b = nib.Bunch(a=1, b=[2, 3], c=None)
yield assert_equal, b.a, 1
yield assert_equal, b.b, [2, 3]
yield assert_equal, b.c, None
def test_bunch_repr():
b = nib.Bunch(b=2, c=3, a=dict(n=1, m=2))
yield assert_equal, repr(b), "Bunch(a={'m': 2, 'n': 1}, b=2, c=3)"
def test_bunch_methods():
b = nib.Bunch(a=2)
b.update(a=3)
newb = b.dictcopy()
yield assert_equal, b.a, 3
yield assert_equal, b.get('a'), 3
yield assert_equal, b.get('badkey', 'otherthing'), 'otherthing'
yield assert_not_equal, b, newb
yield assert_equal, type(dict()), type(newb)
yield assert_equal, newb['a'], 3
def test_bunch_hash():
# NOTE: Since the path to the json file is included in the Bunch,
# the hash will be unique to each machine.
pth = os.path.split(os.path.abspath(__file__))[0]
json_pth = os.path.join(pth, 'realign_json.json')
b = nib.Bunch(infile=json_pth,
otherthing='blue',
yat=True)
newbdict, bhash = b._get_bunch_hash()
yield assert_equal, bhash, 'ddcc7b4ec5675df8cf317a48bd1857fa'
# Make sure the hash stored in the json file for `infile` is correct.
jshash = nib.md5()
with open(json_pth, 'r') as fp:
jshash.update(fp.read().encode('utf-8'))
yield assert_equal, newbdict['infile'][0][1], jshash.hexdigest()
yield assert_equal, newbdict['yat'], True
# create a temp file
# global tmp_infile, tmp_dir
# tmp_infile = None
# tmp_dir = None
def setup_file():
# global tmp_infile, tmp_dir
tmp_dir = tempfile.mkdtemp()
tmp_infile = os.path.join(tmp_dir, 'foo.txt')
with open(tmp_infile, 'w') as fp:
fp.writelines(['123456789'])
return tmp_infile
def teardown_file(tmp_dir):
shutil.rmtree(tmp_dir)
def test_TraitedSpec():
yield assert_true, nib.TraitedSpec().get_hashval()
yield assert_equal, nib.TraitedSpec().__repr__(), '\n\n'
class spec(nib.TraitedSpec):
foo = nib.traits.Int
goo = nib.traits.Float(usedefault=True)
yield assert_equal, spec().foo, Undefined
yield assert_equal, spec().goo, 0.0
specfunc = lambda x: spec(hoo=x)
yield assert_raises, nib.traits.TraitError, specfunc, 1
infields = spec(foo=1)
hashval = ([('foo', 1), ('goo', '0.0000000000')], 'e89433b8c9141aa0fda2f8f4d662c047')
yield assert_equal, infields.get_hashval(), hashval
# yield assert_equal, infields.hashval[1], hashval[1]
yield assert_equal, infields.__repr__(), '\nfoo = 1\ngoo = 0.0\n'
@skip
def test_TraitedSpec_dynamic():
from pickle import dumps, loads
a = nib.BaseTraitedSpec()
a.add_trait('foo', nib.traits.Int)
a.foo = 1
assign_a = lambda: setattr(a, 'foo', 'a')
yield assert_raises, Exception, assign_a
pkld_a = dumps(a)
unpkld_a = loads(pkld_a)
assign_a_again = lambda: setattr(unpkld_a, 'foo', 'a')
yield assert_raises, Exception, assign_a_again
def test_TraitedSpec_logic():
class spec3(nib.TraitedSpec):
_xor_inputs = ('foo', 'bar')
foo = nib.traits.Int(xor=_xor_inputs,
desc='foo or bar, not both')
bar = nib.traits.Int(xor=_xor_inputs,
desc='bar or foo, not both')
kung = nib.traits.Float(requires=('foo',),
position=0,
desc='kung foo')
class out3(nib.TraitedSpec):
output = nib.traits.Int
class MyInterface(nib.BaseInterface):
input_spec = spec3
output_spec = out3
myif = MyInterface()
yield assert_raises, TypeError, setattr(myif.inputs, 'kung', 10.0)
myif.inputs.foo = 1
yield assert_equal, myif.inputs.foo, 1
set_bar = lambda: setattr(myif.inputs, 'bar', 1)
yield assert_raises, IOError, set_bar
yield assert_equal, myif.inputs.foo, 1
myif.inputs.kung = 2
yield assert_equal, myif.inputs.kung, 2.0
def test_deprecation():
with warnings.catch_warnings(record=True) as w:
warnings.filterwarnings('always', '', UserWarning)
class DeprecationSpec1(nib.TraitedSpec):
foo = nib.traits.Int(deprecated='0.1')
spec_instance = DeprecationSpec1()
set_foo = lambda: setattr(spec_instance, 'foo', 1)
yield assert_raises, nib.TraitError, set_foo
yield assert_equal, len(w), 0, 'no warnings, just errors'
with warnings.catch_warnings(record=True) as w:
warnings.filterwarnings('always', '', UserWarning)
class DeprecationSpec1numeric(nib.TraitedSpec):
foo = nib.traits.Int(deprecated='0.1')
spec_instance = DeprecationSpec1numeric()
set_foo = lambda: setattr(spec_instance, 'foo', 1)
yield assert_raises, nib.TraitError, set_foo
yield assert_equal, len(w), 0, 'no warnings, just errors'
with warnings.catch_warnings(record=True) as w:
warnings.filterwarnings('always', '', UserWarning)
class DeprecationSpec2(nib.TraitedSpec):
foo = nib.traits.Int(deprecated='100', new_name='bar')
spec_instance = DeprecationSpec2()
set_foo = lambda: setattr(spec_instance, 'foo', 1)
yield assert_raises, nib.TraitError, set_foo
yield assert_equal, len(w), 0, 'no warnings, just errors'
with warnings.catch_warnings(record=True) as w:
warnings.filterwarnings('always', '', UserWarning)
class DeprecationSpec3(nib.TraitedSpec):
foo = nib.traits.Int(deprecated='1000', new_name='bar')
bar = nib.traits.Int()
spec_instance = DeprecationSpec3()
not_raised = True
try:
spec_instance.foo = 1
except nib.TraitError:
not_raised = False
yield assert_true, not_raised
yield assert_equal, len(w), 1, 'deprecated warning 1 %s' % [w1.message for w1 in w]
with warnings.catch_warnings(record=True) as w:
warnings.filterwarnings('always', '', UserWarning)
class DeprecationSpec3(nib.TraitedSpec):
foo = nib.traits.Int(deprecated='1000', new_name='bar')
bar = nib.traits.Int()
spec_instance = DeprecationSpec3()
not_raised = True
try:
spec_instance.foo = 1
except nib.TraitError:
not_raised = False
yield assert_true, not_raised
yield assert_equal, spec_instance.foo, Undefined
yield assert_equal, spec_instance.bar, 1
yield assert_equal, len(w), 1, 'deprecated warning 2 %s' % [w1.message for w1 in w]
def test_namesource():
tmp_infile = setup_file()
tmpd, nme, ext = split_filename(tmp_infile)
pwd = os.getcwd()
os.chdir(tmpd)
class spec2(nib.CommandLineInputSpec):
moo = nib.File(name_source=['doo'], hash_files=False, argstr="%s",
position=2)
doo = nib.File(exists=True, argstr="%s", position=1)
goo = traits.Int(argstr="%d", position=4)
poo = nib.File(name_source=['goo'], hash_files=False, argstr="%s", position=3)
class TestName(nib.CommandLine):
_cmd = "mycommand"
input_spec = spec2
testobj = TestName()
testobj.inputs.doo = tmp_infile
testobj.inputs.goo = 99
yield assert_true, '%s_generated' % nme in testobj.cmdline
testobj.inputs.moo = "my_%s_template"
yield assert_true, 'my_%s_template' % nme in testobj.cmdline
os.chdir(pwd)
teardown_file(tmpd)
def test_chained_namesource():
tmp_infile = setup_file()
tmpd, nme, ext = split_filename(tmp_infile)
pwd = os.getcwd()
os.chdir(tmpd)
class spec2(nib.CommandLineInputSpec):
doo = nib.File(exists=True, argstr="%s", position=1)
moo = nib.File(name_source=['doo'], hash_files=False, argstr="%s",
position=2, name_template='%s_mootpl')
poo = nib.File(name_source=['moo'], hash_files=False,
argstr="%s", position=3)
class TestName(nib.CommandLine):
_cmd = "mycommand"
input_spec = spec2
testobj = TestName()
testobj.inputs.doo = tmp_infile
res = testobj.cmdline
yield assert_true, '%s' % tmp_infile in res
yield assert_true, '%s_mootpl ' % nme in res
yield assert_true, '%s_mootpl_generated' % nme in res
os.chdir(pwd)
teardown_file(tmpd)
def test_cycle_namesource1():
tmp_infile = setup_file()
tmpd, nme, ext = split_filename(tmp_infile)
pwd = os.getcwd()
os.chdir(tmpd)
class spec3(nib.CommandLineInputSpec):
moo = nib.File(name_source=['doo'], hash_files=False, argstr="%s",
position=1, name_template='%s_mootpl')
poo = nib.File(name_source=['moo'], hash_files=False,
argstr="%s", position=2)
doo = nib.File(name_source=['poo'], hash_files=False,
argstr="%s", position=3)
class TestCycle(nib.CommandLine):
_cmd = "mycommand"
input_spec = spec3
# Check that an exception is raised
to0 = TestCycle()
not_raised = True
try:
to0.cmdline
except nib.NipypeInterfaceError:
not_raised = False
yield assert_false, not_raised
os.chdir(pwd)
teardown_file(tmpd)
def test_cycle_namesource2():
tmp_infile = setup_file()
tmpd, nme, ext = split_filename(tmp_infile)
pwd = os.getcwd()
os.chdir(tmpd)
class spec3(nib.CommandLineInputSpec):
moo = nib.File(name_source=['doo'], hash_files=False, argstr="%s",
position=1, name_template='%s_mootpl')
poo = nib.File(name_source=['moo'], hash_files=False,
argstr="%s", position=2)
doo = nib.File(name_source=['poo'], hash_files=False,
argstr="%s", position=3)
class TestCycle(nib.CommandLine):
_cmd = "mycommand"
input_spec = spec3
# Check that loop can be broken by setting one of the inputs
to1 = TestCycle()
to1.inputs.poo = tmp_infile
not_raised = True
try:
res = to1.cmdline
except nib.NipypeInterfaceError:
not_raised = False
print(res)
yield assert_true, not_raised
yield assert_true, '%s' % tmp_infile in res
yield assert_true, '%s_generated' % nme in res
yield assert_true, '%s_generated_mootpl' % nme in res
os.chdir(pwd)
teardown_file(tmpd)
def checknose():
"""check version of nose for known incompatability"""
mod = __import__('nose')
if mod.__versioninfo__[1] <= 11:
return 0
else:
return 1
@skipif(checknose)
def test_TraitedSpec_withFile():
tmp_infile = setup_file()
tmpd, nme = os.path.split(tmp_infile)
yield assert_true, os.path.exists(tmp_infile)
class spec2(nib.TraitedSpec):
moo = nib.File(exists=True)
doo = nib.traits.List(nib.File(exists=True))
infields = spec2(moo=tmp_infile, doo=[tmp_infile])
hashval = infields.get_hashval(hash_method='content')
yield assert_equal, hashval[1], 'a00e9ee24f5bfa9545a515b7a759886b'
teardown_file(tmpd)
@skipif(checknose)
def test_TraitedSpec_withNoFileHashing():
tmp_infile = setup_file()
tmpd, nme = os.path.split(tmp_infile)
pwd = os.getcwd()
os.chdir(tmpd)
yield assert_true, os.path.exists(tmp_infile)
class spec2(nib.TraitedSpec):
moo = nib.File(exists=True, hash_files=False)
doo = nib.traits.List(nib.File(exists=True))
infields = spec2(moo=nme, doo=[tmp_infile])
hashval = infields.get_hashval(hash_method='content')
yield assert_equal, hashval[1], '8da4669ff5d72f670a46ea3e7a203215'
class spec3(nib.TraitedSpec):
moo = nib.File(exists=True, name_source="doo")
doo = nib.traits.List(nib.File(exists=True))
infields = spec3(moo=nme, doo=[tmp_infile])
hashval1 = infields.get_hashval(hash_method='content')
class spec4(nib.TraitedSpec):
moo = nib.File(exists=True)
doo = nib.traits.List(nib.File(exists=True))
infields = spec4(moo=nme, doo=[tmp_infile])
hashval2 = infields.get_hashval(hash_method='content')
yield assert_not_equal, hashval1[1], hashval2[1]
os.chdir(pwd)
teardown_file(tmpd)
def test_Interface():
yield assert_equal, nib.Interface.input_spec, None
yield assert_equal, nib.Interface.output_spec, None
yield assert_raises, NotImplementedError, nib.Interface
yield assert_raises, NotImplementedError, nib.Interface.help
yield assert_raises, NotImplementedError, nib.Interface._inputs_help
yield assert_raises, NotImplementedError, nib.Interface._outputs_help
yield assert_raises, NotImplementedError, nib.Interface._outputs
class DerivedInterface(nib.Interface):
def __init__(self):
pass
nif = DerivedInterface()
yield assert_raises, NotImplementedError, nif.run
yield assert_raises, NotImplementedError, nif.aggregate_outputs
yield assert_raises, NotImplementedError, nif._list_outputs
yield assert_raises, NotImplementedError, nif._get_filecopy_info
def test_BaseInterface():
yield assert_equal, nib.BaseInterface.help(), None
yield assert_equal, nib.BaseInterface._get_filecopy_info(), []
class InputSpec(nib.TraitedSpec):
foo = nib.traits.Int(desc='a random int')
goo = nib.traits.Int(desc='a random int', mandatory=True)
moo = nib.traits.Int(desc='a random int', mandatory=False)
hoo = nib.traits.Int(desc='a random int', usedefault=True)
zoo = nib.File(desc='a file', copyfile=False)
woo = nib.File(desc='a file', copyfile=True)
class OutputSpec(nib.TraitedSpec):
foo = nib.traits.Int(desc='a random int')
class DerivedInterface(nib.BaseInterface):
input_spec = InputSpec
yield assert_equal, DerivedInterface.help(), None
yield assert_true, 'moo' in ''.join(DerivedInterface._inputs_help())
yield assert_equal, DerivedInterface()._outputs(), None
yield assert_equal, DerivedInterface._get_filecopy_info()[0]['key'], 'woo'
yield assert_true, DerivedInterface._get_filecopy_info()[0]['copy']
yield assert_equal, DerivedInterface._get_filecopy_info()[1]['key'], 'zoo'
yield assert_false, DerivedInterface._get_filecopy_info()[1]['copy']
yield assert_equal, DerivedInterface().inputs.foo, Undefined
yield assert_raises, ValueError, DerivedInterface()._check_mandatory_inputs
yield assert_equal, DerivedInterface(goo=1)._check_mandatory_inputs(), None
yield assert_raises, ValueError, DerivedInterface().run
yield assert_raises, NotImplementedError, DerivedInterface(goo=1).run
class DerivedInterface2(DerivedInterface):
output_spec = OutputSpec
def _run_interface(self, runtime):
return runtime
yield assert_equal, DerivedInterface2.help(), None
yield assert_equal, DerivedInterface2()._outputs().foo, Undefined
yield assert_raises, NotImplementedError, DerivedInterface2(goo=1).run
nib.BaseInterface.input_spec = None
yield assert_raises, Exception, nib.BaseInterface
def test_BaseInterface_load_save_inputs():
tmp_dir = tempfile.mkdtemp()
tmp_json = os.path.join(tmp_dir, 'settings.json')
class InputSpec(nib.TraitedSpec):
input1 = nib.traits.Int()
input2 = nib.traits.Float()
input3 = nib.traits.Bool()
input4 = nib.traits.Str()
class DerivedInterface(nib.BaseInterface):
input_spec = InputSpec
def __init__(self, **inputs):
super(DerivedInterface, self).__init__(**inputs)
inputs_dict = {'input1': 12, 'input3': True,
'input4': 'some string'}
bif = DerivedInterface(**inputs_dict)
bif.save_inputs_to_json(tmp_json)
bif2 = DerivedInterface()
bif2.load_inputs_from_json(tmp_json)
yield assert_equal, bif2.inputs.get_traitsfree(), inputs_dict
bif3 = DerivedInterface(from_file=tmp_json)
yield assert_equal, bif3.inputs.get_traitsfree(), inputs_dict
inputs_dict2 = inputs_dict.copy()
inputs_dict2.update({'input4': 'some other string'})
bif4 = DerivedInterface(from_file=tmp_json, input4=inputs_dict2['input4'])
yield assert_equal, bif4.inputs.get_traitsfree(), inputs_dict2
bif5 = DerivedInterface(input4=inputs_dict2['input4'])
bif5.load_inputs_from_json(tmp_json, overwrite=False)
yield assert_equal, bif5.inputs.get_traitsfree(), inputs_dict2
bif6 = DerivedInterface(input4=inputs_dict2['input4'])
bif6.load_inputs_from_json(tmp_json)
yield assert_equal, bif6.inputs.get_traitsfree(), inputs_dict
# test get hashval in a complex interface
from nipype.interfaces.ants import Registration
settings = example_data(example_data('smri_ants_registration_settings.json'))
with open(settings) as setf:
data_dict = json.load(setf)
tsthash = Registration()
tsthash.load_inputs_from_json(settings)
yield assert_equal, {}, check_dict(data_dict, tsthash.inputs.get_traitsfree())
tsthash2 = Registration(from_file=settings)
yield assert_equal, {}, check_dict(data_dict, tsthash2.inputs.get_traitsfree())
_, hashvalue = tsthash.inputs.get_hashval(hash_method='timestamp')
yield assert_equal, 'ec5755e07287e04a4b409e03b77a517c', hashvalue
def test_input_version():
class InputSpec(nib.TraitedSpec):
foo = nib.traits.Int(desc='a random int', min_ver='0.9')
class DerivedInterface1(nib.BaseInterface):
input_spec = InputSpec
obj = DerivedInterface1()
yield assert_not_raises, obj._check_version_requirements, obj.inputs
config.set('execution', 'stop_on_unknown_version', True)
yield assert_raises, Exception, obj._check_version_requirements, obj.inputs
config.set_default_config()
class InputSpec(nib.TraitedSpec):
foo = nib.traits.Int(desc='a random int', min_ver='0.9')
class DerivedInterface1(nib.BaseInterface):
input_spec = InputSpec
_version = '0.8'
obj = DerivedInterface1()
obj.inputs.foo = 1
yield assert_raises, Exception, obj._check_version_requirements
class InputSpec(nib.TraitedSpec):
foo = nib.traits.Int(desc='a random int', min_ver='0.9')
class DerivedInterface1(nib.BaseInterface):
input_spec = InputSpec
_version = '0.10'
obj = DerivedInterface1()
yield assert_not_raises, obj._check_version_requirements, obj.inputs
class InputSpec(nib.TraitedSpec):
foo = nib.traits.Int(desc='a random int', min_ver='0.9')
class DerivedInterface1(nib.BaseInterface):
input_spec = InputSpec
_version = '0.9'
obj = DerivedInterface1()
obj.inputs.foo = 1
not_raised = True
yield assert_not_raises, obj._check_version_requirements, obj.inputs
class InputSpec(nib.TraitedSpec):
foo = nib.traits.Int(desc='a random int', max_ver='0.7')
class DerivedInterface2(nib.BaseInterface):
input_spec = InputSpec
_version = '0.8'
obj = DerivedInterface2()
obj.inputs.foo = 1
yield assert_raises, Exception, obj._check_version_requirements
class InputSpec(nib.TraitedSpec):
foo = nib.traits.Int(desc='a random int', max_ver='0.9')
class DerivedInterface1(nib.BaseInterface):
input_spec = InputSpec
_version = '0.9'
obj = DerivedInterface1()
obj.inputs.foo = 1
not_raised = True
yield assert_not_raises, obj._check_version_requirements, obj.inputs
def test_output_version():
class InputSpec(nib.TraitedSpec):
foo = nib.traits.Int(desc='a random int')
class OutputSpec(nib.TraitedSpec):
foo = nib.traits.Int(desc='a random int', min_ver='0.9')
class DerivedInterface1(nib.BaseInterface):
input_spec = InputSpec
output_spec = OutputSpec
_version = '0.10'
obj = DerivedInterface1()
yield assert_equal, obj._check_version_requirements(obj._outputs()), []
class InputSpec(nib.TraitedSpec):
foo = nib.traits.Int(desc='a random int')
class OutputSpec(nib.TraitedSpec):
foo = nib.traits.Int(desc='a random int', min_ver='0.11')
class DerivedInterface1(nib.BaseInterface):
input_spec = InputSpec
output_spec = OutputSpec
_version = '0.10'
obj = DerivedInterface1()
yield assert_equal, obj._check_version_requirements(obj._outputs()), ['foo']
class InputSpec(nib.TraitedSpec):
foo = nib.traits.Int(desc='a random int')
class OutputSpec(nib.TraitedSpec):
foo = nib.traits.Int(desc='a random int', min_ver='0.11')
class DerivedInterface1(nib.BaseInterface):
input_spec = InputSpec
output_spec = OutputSpec
_version = '0.10'
def _run_interface(self, runtime):
return runtime
def _list_outputs(self):
return {'foo': 1}
obj = DerivedInterface1()
yield assert_raises, KeyError, obj.run
def test_Commandline():
yield assert_raises, Exception, nib.CommandLine
ci = nib.CommandLine(command='which')
yield assert_equal, ci.cmd, 'which'
yield assert_equal, ci.inputs.args, Undefined
ci2 = nib.CommandLine(command='which', args='ls')
yield assert_equal, ci2.cmdline, 'which ls'
ci3 = nib.CommandLine(command='echo')
ci3.inputs.environ = {'MYENV': 'foo'}
res = ci3.run()
yield assert_equal, res.runtime.environ['MYENV'], 'foo'
yield assert_equal, res.outputs, None
class CommandLineInputSpec1(nib.CommandLineInputSpec):
foo = nib.Str(argstr='%s', desc='a str')
goo = nib.traits.Bool(argstr='-g', desc='a bool', position=0)
hoo = nib.traits.List(argstr='-l %s', desc='a list')
moo = nib.traits.List(argstr='-i %d...', desc='a repeated list',
position=-1)
noo = nib.traits.Int(argstr='-x %d', desc='an int')
roo = nib.traits.Str(desc='not on command line')
soo = nib.traits.Bool(argstr="-soo")
nib.CommandLine.input_spec = CommandLineInputSpec1
ci4 = nib.CommandLine(command='cmd')
ci4.inputs.foo = 'foo'
ci4.inputs.goo = True
ci4.inputs.hoo = ['a', 'b']
ci4.inputs.moo = [1, 2, 3]
ci4.inputs.noo = 0
ci4.inputs.roo = 'hello'
ci4.inputs.soo = False
cmd = ci4._parse_inputs()
yield assert_equal, cmd[0], '-g'
yield assert_equal, cmd[-1], '-i 1 -i 2 -i 3'
yield assert_true, 'hello' not in ' '.join(cmd)
yield assert_true, '-soo' not in ' '.join(cmd)
ci4.inputs.soo = True
cmd = ci4._parse_inputs()
yield assert_true, '-soo' in ' '.join(cmd)
class CommandLineInputSpec2(nib.CommandLineInputSpec):
foo = nib.File(argstr='%s', desc='a str', genfile=True)
nib.CommandLine.input_spec = CommandLineInputSpec2
ci5 = nib.CommandLine(command='cmd')
yield assert_raises, NotImplementedError, ci5._parse_inputs
class DerivedClass(nib.CommandLine):
input_spec = CommandLineInputSpec2
def _gen_filename(self, name):
return 'filename'
ci6 = DerivedClass(command='cmd')
yield assert_equal, ci6._parse_inputs()[0], 'filename'
nib.CommandLine.input_spec = nib.CommandLineInputSpec
def test_Commandline_environ():
from nipype import config
config.set_default_config()
ci3 = nib.CommandLine(command='echo')
res = ci3.run()
yield assert_equal, res.runtime.environ['DISPLAY'], ':1'
config.set('execution', 'display_variable', ':3')
res = ci3.run()
yield assert_false, 'DISPLAY' in ci3.inputs.environ
yield assert_equal, res.runtime.environ['DISPLAY'], ':3'
ci3.inputs.environ = {'DISPLAY': ':2'}
res = ci3.run()
yield assert_equal, res.runtime.environ['DISPLAY'], ':2'
def test_CommandLine_output():
tmp_infile = setup_file()
tmpd, name = os.path.split(tmp_infile)
pwd = os.getcwd()
os.chdir(tmpd)
yield assert_true, os.path.exists(tmp_infile)
ci = nib.CommandLine(command='ls -l')
ci.inputs.terminal_output = 'allatonce'
res = ci.run()
yield assert_equal, res.runtime.merged, ''
yield assert_true, name in res.runtime.stdout
ci = nib.CommandLine(command='ls -l')
ci.inputs.terminal_output = 'file'
res = ci.run()
yield assert_true, 'stdout.nipype' in res.runtime.stdout
yield assert_true, isinstance(res.runtime.stdout, (str, bytes))
ci = nib.CommandLine(command='ls -l')
ci.inputs.terminal_output = 'none'
res = ci.run()
yield assert_equal, res.runtime.stdout, ''
ci = nib.CommandLine(command='ls -l')
res = ci.run()
yield assert_true, 'stdout.nipype' in res.runtime.stdout
os.chdir(pwd)
teardown_file(tmpd)
def test_global_CommandLine_output():
tmp_infile = setup_file()
tmpd, name = os.path.split(tmp_infile)
pwd = os.getcwd()
os.chdir(tmpd)
ci = nib.CommandLine(command='ls -l')
res = ci.run()
yield assert_true, name in res.runtime.stdout
yield assert_true, os.path.exists(tmp_infile)
nib.CommandLine.set_default_terminal_output('allatonce')
ci = nib.CommandLine(command='ls -l')
res = ci.run()
yield assert_equal, res.runtime.merged, ''
yield assert_true, name in res.runtime.stdout
nib.CommandLine.set_default_terminal_output('file')
ci = nib.CommandLine(command='ls -l')
res = ci.run()
yield assert_true, 'stdout.nipype' in res.runtime.stdout
nib.CommandLine.set_default_terminal_output('none')
ci = nib.CommandLine(command='ls -l')
res = ci.run()
yield assert_equal, res.runtime.stdout, ''
os.chdir(pwd)
teardown_file(tmpd)
def assert_not_raises(fn, *args, **kwargs):
fn(*args, **kwargs)
return True
def check_dict(ref_dict, tst_dict):
"""Compare dictionaries of inputs and and those loaded from json files"""
def to_list(x):
if isinstance(x, tuple):
x = list(x)
if isinstance(x, list):
for i, xel in enumerate(x):
x[i] = to_list(xel)
return x
failed_dict = {}
for key, value in list(ref_dict.items()):
newval = to_list(tst_dict[key])
if newval != value:
failed_dict[key] = (value, newval)
return failed_dict
| bsd-3-clause | 3,826,922,991,494,386,700 | 33.89433 | 91 | 0.642699 | false |
Atothendrew/SpartanTokenService | SpartanTokenService.py | 1 | 3313 | #!/usr/bin/python
import re
import json
import random
import urllib
import requests
from requests import Request, Session
from requests.cookies import RequestsCookieJar
MS_LOGIN = "https://logSpartanTokenService.pyin.live.com/login.srf?id=2"
WAYPOINT_GATEWAY = "https://www.halowaypoint.com/oauth/signin?returnUrl=https%3a%2f%2fwww.halowaypoint.com%2fen-us&locale=en-US"
WAYPOINT_REGISTER_URL = "https://settings.svc.halowaypoint.com/RegisterClientService.svc/spartantoken/wlid?_={0}"
SPARTAN_TOKEN_GENERATOR = "https://app.halowaypoint.com/oauth/spartanToken"
URL_TO_SCRAPE = "https://login.live.com/oauth20_authorize.srf?client_id=000000004C0BD2F1&scope=xbox.basic+xbox.offline_access&response_type=code&redirect_uri=https://www.halowaypoint.com/oauth/callback&state=https%253a%252f%252fwww.halowaypoint.com%252fen-us&locale=en-US&display=touch"
URL_TO_POST = "https://login.live.com/ppsecure/post.srf?client_id=000000004C0BD2F1&scope=xbox.basic+xbox.offline_access&response_type=code&redirect_uri=https://www.halowaypoint.com/oauth/callback&state=https%253a%252f%252fwww.halowaypoint.com%252fen-us&locale=en-US&display=touch&bk=1383096785"
EMAIL = "PLACE_EMAIL_HERE"
PASSWORD = "PLACE_PASSWORD_HERE"
def get_spartan_token():
# Get the First Cookies
cookie_container = RequestsCookieJar()
first_response = requests.get(URL_TO_SCRAPE)
body = first_response.text.encode('utf-8', 'ignore')
for cookie in first_response.cookies: cookie_container.set_cookie(cookie)
# Get the PPFT
ppft_regex = re.compile("name=\"PPFT\".*?value=\"(.*?)\"")
ppft_match = re.findall(ppft_regex, body)
assert len(ppft_match) == 1
ppft = ppft_match[0]
# Prepare the login to Xbox
ppsx = "Pass"
query = "PPFT={ppft}&login={email}&passwd={password}&LoginOptions=3&NewUser=1&PPSX={ppsx}&type=11&i3={random}&m1=1680&m2=1050&m3=0&i12=1&i17=0&i18=__MobileLogin|1".format(
ppft = ppft, email = urllib.quote(EMAIL), password = PASSWORD, ppsx = ppsx, random = random.randint(15000, 50000))
headers = {"Content-Type": "application/x-www-form-urlencoded", "Host": "login.live.com", "Expect": "100-continue", "Connection": "Keep-Alive"}
# Stream the login to xbox
s = Session()
login_request = Request('POST', URL_TO_POST, headers = headers, data = query, cookies = cookie_container)
prepped = s.prepare_request(login_request)
login_response = s.send(prepped, stream = True, allow_redirects = False)
for cookie in login_response.cookies: cookie_container.set_cookie(cookie)
if "Location" not in login_response.headers: return None
next_location = login_response.headers['Location']
# Get Waypoint Cookies and Headers
waypoint_response = requests.get(next_location, allow_redirects = False)
if "WebAuth" not in waypoint_response.cookies: return None
for cookie in waypoint_response.cookies: cookie_container.set_cookie(cookie)
# Get the Spartan Token
headers = {"UserAgent": "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.17 (KHTML, like Gecko) Chrome/24.0.1312.52 Safari/537.17"}
token_response = requests.get(SPARTAN_TOKEN_GENERATOR, headers = headers, cookies = cookie_container)
spartan_token = token_response.text
spartan_token = json.loads(spartan_token)["SpartanToken"]
return spartan_token | mit | 1,194,989,679,917,223,700 | 53.327869 | 294 | 0.735587 | false |
sacharya/nova | nova/tests/api/openstack/compute/plugins/v3/test_user_data.py | 1 | 9909 | # Copyright 2012 OpenStack Foundation
# Copyright 2013 IBM Corp.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import base64
import datetime
import uuid
from oslo.config import cfg
import webob
from nova.api.openstack.compute import plugins
from nova.api.openstack.compute.plugins.v3 import servers
from nova.api.openstack.compute.plugins.v3 import user_data
from nova.compute import api as compute_api
from nova.compute import flavors
from nova import db
from nova.network import manager
from nova.openstack.common import jsonutils
from nova.openstack.common import rpc
from nova import test
from nova.tests.api.openstack import fakes
from nova.tests import fake_instance
from nova.tests.image import fake
CONF = cfg.CONF
FAKE_UUID = fakes.FAKE_UUID
def fake_gen_uuid():
return FAKE_UUID
def return_security_group(context, instance_id, security_group_id):
pass
class ServersControllerCreateTest(test.TestCase):
def setUp(self):
"""Shared implementation for tests below that create instance."""
super(ServersControllerCreateTest, self).setUp()
self.flags(verbose=True,
enable_instance_password=True)
self.instance_cache_num = 0
self.instance_cache_by_id = {}
self.instance_cache_by_uuid = {}
ext_info = plugins.LoadedExtensionInfo()
self.controller = servers.ServersController(extension_info=ext_info)
CONF.set_override('extensions_blacklist', 'os-user-data',
'osapi_v3')
self.no_user_data_controller = servers.ServersController(
extension_info=ext_info)
def instance_create(context, inst):
inst_type = flavors.get_flavor_by_flavor_id(3)
image_uuid = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
def_image_ref = 'http://localhost/images/%s' % image_uuid
self.instance_cache_num += 1
instance = fake_instance.fake_db_instance(**{
'id': self.instance_cache_num,
'display_name': inst['display_name'] or 'test',
'uuid': FAKE_UUID,
'instance_type': dict(inst_type),
'access_ip_v4': '1.2.3.4',
'access_ip_v6': 'fead::1234',
'image_ref': inst.get('image_ref', def_image_ref),
'user_id': 'fake',
'project_id': 'fake',
'reservation_id': inst['reservation_id'],
"created_at": datetime.datetime(2010, 10, 10, 12, 0, 0),
"updated_at": datetime.datetime(2010, 11, 11, 11, 0, 0),
user_data.ATTRIBUTE_NAME: None,
"progress": 0,
"fixed_ips": [],
"task_state": "",
"vm_state": "",
"root_device_name": inst.get('root_device_name', 'vda'),
})
self.instance_cache_by_id[instance['id']] = instance
self.instance_cache_by_uuid[instance['uuid']] = instance
return instance
def instance_get(context, instance_id):
"""Stub for compute/api create() pulling in instance after
scheduling
"""
return self.instance_cache_by_id[instance_id]
def instance_update(context, uuid, values):
instance = self.instance_cache_by_uuid[uuid]
instance.update(values)
return instance
def server_update(context, instance_uuid, params):
inst = self.instance_cache_by_uuid[instance_uuid]
inst.update(params)
return (inst, inst)
def fake_method(*args, **kwargs):
pass
def project_get_networks(context, user_id):
return dict(id='1', host='localhost')
def queue_get_for(context, *args):
return 'network_topic'
fakes.stub_out_rate_limiting(self.stubs)
fakes.stub_out_key_pair_funcs(self.stubs)
fake.stub_out_image_service(self.stubs)
fakes.stub_out_nw_api(self.stubs)
self.stubs.Set(uuid, 'uuid4', fake_gen_uuid)
self.stubs.Set(db, 'instance_add_security_group',
return_security_group)
self.stubs.Set(db, 'project_get_networks',
project_get_networks)
self.stubs.Set(db, 'instance_create', instance_create)
self.stubs.Set(db, 'instance_system_metadata_update',
fake_method)
self.stubs.Set(db, 'instance_get', instance_get)
self.stubs.Set(db, 'instance_update', instance_update)
self.stubs.Set(rpc, 'cast', fake_method)
self.stubs.Set(db, 'instance_update_and_get_original',
server_update)
self.stubs.Set(rpc, 'queue_get_for', queue_get_for)
self.stubs.Set(manager.VlanManager, 'allocate_fixed_ip',
fake_method)
def _test_create_extra(self, params, no_image=False,
override_controller=None):
image_uuid = 'c905cedb-7281-47e4-8a62-f26bc5fc4c77'
server = dict(name='server_test', image_ref=image_uuid, flavor_ref=2)
if no_image:
server.pop('image_ref', None)
server.update(params)
body = dict(server=server)
req = fakes.HTTPRequestV3.blank('/servers')
req.method = 'POST'
req.body = jsonutils.dumps(body)
req.headers["content-type"] = "application/json"
if override_controller:
server = override_controller.create(req, body).obj['server']
else:
server = self.controller.create(req, body).obj['server']
def test_create_instance_with_user_data_disabled(self):
params = {user_data.ATTRIBUTE_NAME: base64.b64encode('fake')}
old_create = compute_api.API.create
def create(*args, **kwargs):
self.assertNotIn('user_data', kwargs)
return old_create(*args, **kwargs)
self.stubs.Set(compute_api.API, 'create', create)
self._test_create_extra(
params,
override_controller=self.no_user_data_controller)
def test_create_instance_with_user_data_enabled(self):
params = {user_data.ATTRIBUTE_NAME: base64.b64encode('fake')}
old_create = compute_api.API.create
def create(*args, **kwargs):
self.assertIn('user_data', kwargs)
return old_create(*args, **kwargs)
self.stubs.Set(compute_api.API, 'create', create)
self._test_create_extra(params)
def test_create_instance_with_user_data(self):
image_href = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
flavor_ref = 'http://localhost/flavors/3'
value = "A random string"
body = {
'server': {
'name': 'user_data_test',
'image_ref': image_href,
'flavor_ref': flavor_ref,
'metadata': {
'hello': 'world',
'open': 'stack',
},
user_data.ATTRIBUTE_NAME: base64.b64encode(value),
},
}
req = fakes.HTTPRequestV3.blank('/servers')
req.method = 'POST'
req.body = jsonutils.dumps(body)
req.headers["content-type"] = "application/json"
res = self.controller.create(req, body).obj
server = res['server']
self.assertEqual(FAKE_UUID, server['id'])
def test_create_instance_with_bad_user_data(self):
image_href = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
flavor_ref = 'http://localhost/flavors/3'
value = "A random string"
body = {
'server': {
'name': 'user_data_test',
'image_ref': image_href,
'flavor_ref': flavor_ref,
'metadata': {
'hello': 'world',
'open': 'stack',
},
user_data.ATTRIBUTE_NAME: value,
},
}
req = fakes.HTTPRequestV3.blank('/servers')
req.method = 'POST'
req.body = jsonutils.dumps(body)
req.headers["content-type"] = "application/json"
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create, req, body)
class TestServerCreateRequestXMLDeserializer(test.TestCase):
def setUp(self):
super(TestServerCreateRequestXMLDeserializer, self).setUp()
ext_info = plugins.LoadedExtensionInfo()
controller = servers.ServersController(extension_info=ext_info)
self.deserializer = servers.CreateDeserializer(controller)
def test_request_with_user_data(self):
serial_request = """
<server xmlns="http://docs.openstack.org/compute/api/v3"
xmlns:%(alias)s="%(namespace)s"
name="user_data_test"
image_ref="1"
flavor_ref="1"
%(alias)s:user_data="IyEvYmluL2Jhc2gKL2Jpbi9"/>""" % {
'alias': user_data.ALIAS,
'namespace': user_data.UserData.namespace}
request = self.deserializer.deserialize(serial_request)
expected = {
"server": {
"name": "user_data_test",
"image_ref": "1",
"flavor_ref": "1",
user_data.ATTRIBUTE_NAME: "IyEvYmluL2Jhc2gKL2Jpbi9"
},
}
self.assertEqual(request['body'], expected)
| apache-2.0 | -7,509,084,529,474,616,000 | 36.392453 | 78 | 0.58795 | false |
mvanveen/catcher | catcher.py | 1 | 1373 | import traceback
TRACE_STACK = []
class Trace(object):
def __init__(self, exception, stack=None):
if not isinstance(exception, Exception):
raise ValueError("Expected an Exception object as first argument")
if not stack:
stack = traceback.extract_stack()
# pop off current frame and initial catch frame
#stack.pop()
#stack.pop()
# TODO: try to grab exception if it's not passed in explicitly
self._exception = exception
self._stack = stack
@property
def exception(self):
return self._exception
@property
def stack(self):
return self._stack
def __str__(self):
return ''.join(
traceback.format_list(self.stack) +
traceback.format_exception_only(
type(self.exception),
self.exception
)
).strip()
def __repr__(self):
return '<Trace (%s)>' % (
str(type(self.exception)).replace('exceptions.', ''),
)
def catch(e):
TRACE_STACK.append(Trace(e))
def dump(exception_type=None, lineno=None, module=None):
return TRACE_STACK
def clear():
del TRACE_STACK
TRACE_STACK = []
if __name__ == '__main__':
import random
for i in range(20):
try:
random.randint(0,5) / 0
except Exception, e:
catch(e)
print str(dump()[0])
| mit | -2,296,891,424,965,171,500 | 21.508197 | 71 | 0.576839 | false |
horacioMartinez/dakara-client | tools/protocol_generator/generator/gendefs_js.py | 1 | 7898 |
class Packet:
def __init__(self, name, args):
self.name = name
self.args = args
def get_header_fmt(self):
return """
function {name} (buffer) {{
this.id = {base_name}ID.{name} /* {packet_id} */;
if (buffer){{
buffer.ReadByte(); /* PacketID */
{ctor_fields_bytequeue}
}}
this.serialize = function(buffer) {{
buffer.WriteByte({base_name}ID.{name}); /* PacketID: {packet_id} */
{serialize_fields}
buffer.flush();
}};
this.dispatch = function (d){{
d.handle{name}(this);
}};
}}
"""
def get_builder_fmt(self):
return """
Build{name}({header_fields_signature}) {{
var e = new {name}();
{items_assign_build}
return e;
}}
"""
def get_parametros_fmt(self):
return """
{parametros_fields}
handler.handle{name}( {parametros_args} );
"""
def get_argumentosHandler_fmt(self):
return """{parametros_args}
"""
# def get_handler_fmt(self):
# return """
#{items_assign_build}
#"""
def get_handler_fmt(self):
return """
send{name}({header_fields_signature}) {{
p = this.protocolo.Build{name}({header_fields_signature} );
p.serialize(this.byteQueue);
}}
"""
def get_ctor_fields_bytequeue_fmt(self, is_array):
if is_array:
return " var i; this.{arg_name}= []; for (i=0; i<{array_size}; ++i) this.{arg_name}[i] = buffer.{type_reader_name}();\n"
else:
return " this.{arg_name} = buffer.{type_reader_name}();\n"
def get_parametros_fields_fmt(self, is_array):
if is_array:
return " var i; var {arg_name}= []; for (i=0; i<{array_size}; ++i) {arg_name}[i] = buffer.{type_reader_name}();\n"
else:
return " var {arg_name} = buffer.{type_reader_name}();\n"
def get_parametros_args_fmt(self, is_array):
if is_array:
return "{arg_name},"
else:
return "{arg_name},"
def get_serialize_fields_fmt(self, is_array):
if is_array:
return " var i; for (i=0; i<{array_size}; ++i) buffer.{type_writer_name}(this.{arg_name}[i]);\n"
else:
return " buffer.{type_writer_name}(this.{arg_name});\n"
class PacketGMHeader(Packet):
def __init__(self, name, args):
Packet.__init__(self, name, args)
def get_header_fmt(self):
return """
function {name} (buffer) {{
this.id = {base_name}ID.{name} /* {packet_id} */;
if (buffer){{
buffer.ReadByte(); /* PacketID */
{ctor_fields_bytequeue}
}}
this.serialize = function(buffer) {{
{serialize_fields}
}};
this.dispatch = function (d){{
d.handle{name}(this);
}};
}}
"""
class PacketGMCommand(Packet):
def __init__(self, name, args):
self.name = name
self.args = args
def get_header_fmt(self):
return """
function {name} (buffer) {{
this.id = {base_name}ID.{name} /* {packet_id} */;
if (buffer){{
buffer.ReadByte(); /* PacketID */
{ctor_fields_bytequeue}
}}
this.serialize = function(buffer) {{
buffer.WriteByte(ClientPacketID_GMCommands);
buffer.WriteByte({base_name}ID.{name}); /* PacketID: {packet_id} */
{serialize_fields}
buffer.flush();
}};
this.dispatch = function (d){{
d.handle{name}(this);
}};
}}
"""
class PacketWithCount(Packet):
def __init__(self, name, args, reader_type):
Packet.__init__(self, name, args)
self.reader_type = reader_type
def get_header_fmt(self):
return """
function {name} (buffer) {{
this.id = {base_name}ID.{name} /* {packet_id} */;
this.Items = [];
if (buffer) {{
buffer.ReadByte(); /* PacketID */
var Count = buffer.__COUNTREADER__();
var i;
for (i=0; i<Count; ++i) {{
var e = {{
{ctor_fields_bytequeue}
}};
this.Items.push(e);
}}
}}
""".replace("__COUNTREADER__", TYPE_TO_READER_NAME[self.reader_type]) + """
this.serialize = function(buffer) {{
buffer.WriteByte({base_name}ID.{name}); /* PacketID: {packet_id} */
var Count = Items.length;
buffer.__COUNTWRITER__(Count);
var i;
for (i=0; i<Count; ++i) {{
e = Items[i];
{serialize_fields}
buffer.flush();
}}
}};
this.dispatch = function (d){{
d.handle{name}(this);
}};
this.addItem = function({header_fields_signature}) {{
var e = {{
{items_assign_e}
}};
this.Items.push(e);
}}
}}""".replace("__COUNTWRITER__", TYPE_TO_WRITER_NAME[self.reader_type])
def get_handler_fmt(self):
return """ /*ACA*/
var e = {{
{items_assign_e}
}};
this.Items.push(e);
}}"""
def get_parametros_fmt(self):
return """
/* Packet con count! */
var Items = [];
var Count = buffer.__COUNTREADER__();
var i;
for (i=0; i<Count; ++i) {{
var e = {{
{ctor_fields_bytequeue}
}};
Items.push(e);
}}
handler.handle{name}(Items);
""".replace("__COUNTREADER__", TYPE_TO_READER_NAME[self.reader_type])
def get_argumentosHandler_fmt(self):
return """Items
"""
# def get_handler_fmt(self):
# return """
#{items_assign_build}
#"""
def get_ctor_fields_bytequeue_fmt(self, is_array):
if is_array:
return " {{ var i; e.{arg_name} = []; for (i=0; i<{array_size}; ++i) e.{arg_name}[i] = buffer.{type_reader_name}(); }}\n"
else:
return " {arg_name} : buffer.{type_reader_name}(),\n"
def get_serialize_fields_fmt(self, is_array):
if is_array:
return " {{ var i; for (i=0; i<{array_size}; ++i) buffer.{type_writer_name}(e.{arg_name}[i]); }}\n"
else:
return " buffer.{type_writer_name}(e.{arg_name});\n"
TYPE_UNICODE_STRING = 0
TYPE_UNICODE_STRING_FIXED = 1
TYPE_BINARY_STRING = 2
TYPE_BINARY_STRING_FIXED = 3
TYPE_I8 = 4
TYPE_I16 = 5
TYPE_I32 = 6
TYPE_SINGLE = 7 # Float
TYPE_DOUBLE = 8 # Double
TYPE_BOOL = 9
TYPE_ARRAY = (1 << 8)
TYPE_TO_STR = {
TYPE_UNICODE_STRING: 'var',
TYPE_UNICODE_STRING_FIXED: 'var',
TYPE_BINARY_STRING: 'var',
TYPE_BINARY_STRING_FIXED: 'var',
TYPE_I8: 'var',
TYPE_I16: 'var',
TYPE_I32: 'var',
TYPE_SINGLE: 'var',
TYPE_DOUBLE: 'var',
TYPE_BOOL: 'var',
}
TYPE_TO_SIGNATURE_STR = {
TYPE_UNICODE_STRING: '',
TYPE_UNICODE_STRING_FIXED: '',
TYPE_BINARY_STRING: '',
TYPE_BINARY_STRING_FIXED: '',
TYPE_I8: '',
TYPE_I16: '',
TYPE_I32: '',
TYPE_SINGLE: '',
TYPE_DOUBLE: '',
TYPE_BOOL: '',
}
TYPE_TO_READER_NAME = {
TYPE_UNICODE_STRING: 'ReadUnicodeString',
#TYPE_UNICODE_STRING_FIXED: '',
#TYPE_BINARY_STRING: '',
#TYPE_BINARY_STRING_FIXED: 'ReadBinaryFixed',
TYPE_I8: 'ReadByte',
TYPE_I16: 'ReadInteger',
TYPE_I32: 'ReadLong',
TYPE_SINGLE: 'ReadSingle',
TYPE_DOUBLE: 'ReadDouble',
TYPE_BOOL: 'ReadBoolean',
}
TYPE_TO_WRITER_NAME = {
TYPE_UNICODE_STRING: 'WriteUnicodeString',
#TYPE_UNICODE_STRING_FIXED: '',
#TYPE_BINARY_STRING: '',
#TYPE_BINARY_STRING_FIXED: 'ReadBinaryFixed',
TYPE_I8: 'WriteByte',
TYPE_I16: 'WriteInteger',
TYPE_I32: 'WriteLong',
TYPE_SINGLE: 'WriteSingle',
TYPE_DOUBLE: 'WriteDouble',
TYPE_BOOL: 'WriteBoolean',
}
TYPE_SIZE = {
TYPE_UNICODE_STRING: 2,
#TYPE_UNICODE_STRING_FIXED: 0,
TYPE_BINARY_STRING: 2,
#TYPE_BINARY_STRING_FIXED: 0,
TYPE_I8: 1,
TYPE_I16: 2,
TYPE_I32: 4,
TYPE_SINGLE: 4,
TYPE_DOUBLE: 8,
TYPE_BOOL: 1,
}
| mit | -5,359,951,213,729,725,000 | 24.726384 | 144 | 0.530767 | false |
Jeff-Tian/mybnb | Python27/Lib/test/test_hmac.py | 2 | 17810 | # coding: utf-8
import hmac
import hashlib
import unittest
import warnings
from test import test_support
class TestVectorsTestCase(unittest.TestCase):
def test_md5_vectors(self):
# Test the HMAC module against test vectors from the RFC.
def md5test(key, data, digest):
h = hmac.HMAC(key, data)
self.assertEqual(h.hexdigest().upper(), digest.upper())
md5test(chr(0x0b) * 16,
"Hi There",
"9294727A3638BB1C13F48EF8158BFC9D")
md5test("Jefe",
"what do ya want for nothing?",
"750c783e6ab0b503eaa86e310a5db738")
md5test(chr(0xAA)*16,
chr(0xDD)*50,
"56be34521d144c88dbb8c733f0e8b3f6")
md5test("".join([chr(i) for i in range(1, 26)]),
chr(0xCD) * 50,
"697eaf0aca3a3aea3a75164746ffaa79")
md5test(chr(0x0C) * 16,
"Test With Truncation",
"56461ef2342edc00f9bab995690efd4c")
md5test(chr(0xAA) * 80,
"Test Using Larger Than Block-Size Key - Hash Key First",
"6b1ab7fe4bd7bf8f0b62e6ce61b9d0cd")
md5test(chr(0xAA) * 80,
("Test Using Larger Than Block-Size Key "
"and Larger Than One Block-Size Data"),
"6f630fad67cda0ee1fb1f562db3aa53e")
def test_sha_vectors(self):
def shatest(key, data, digest):
h = hmac.HMAC(key, data, digestmod=hashlib.sha1)
self.assertEqual(h.hexdigest().upper(), digest.upper())
shatest(chr(0x0b) * 20,
"Hi There",
"b617318655057264e28bc0b6fb378c8ef146be00")
shatest("Jefe",
"what do ya want for nothing?",
"effcdf6ae5eb2fa2d27416d5f184df9c259a7c79")
shatest(chr(0xAA)*20,
chr(0xDD)*50,
"125d7342b9ac11cd91a39af48aa17b4f63f175d3")
shatest("".join([chr(i) for i in range(1, 26)]),
chr(0xCD) * 50,
"4c9007f4026250c6bc8414f9bf50c86c2d7235da")
shatest(chr(0x0C) * 20,
"Test With Truncation",
"4c1a03424b55e07fe7f27be1d58bb9324a9a5a04")
shatest(chr(0xAA) * 80,
"Test Using Larger Than Block-Size Key - Hash Key First",
"aa4ae5e15272d00e95705637ce8a3b55ed402112")
shatest(chr(0xAA) * 80,
("Test Using Larger Than Block-Size Key "
"and Larger Than One Block-Size Data"),
"e8e99d0f45237d786d6bbaa7965c7808bbff1a91")
def _rfc4231_test_cases(self, hashfunc):
def hmactest(key, data, hexdigests):
h = hmac.HMAC(key, data, digestmod=hashfunc)
self.assertEqual(h.hexdigest().lower(), hexdigests[hashfunc])
# 4.2. Test Case 1
hmactest(key = '\x0b'*20,
data = 'Hi There',
hexdigests = {
hashlib.sha224: '896fb1128abbdf196832107cd49df33f'
'47b4b1169912ba4f53684b22',
hashlib.sha256: 'b0344c61d8db38535ca8afceaf0bf12b'
'881dc200c9833da726e9376c2e32cff7',
hashlib.sha384: 'afd03944d84895626b0825f4ab46907f'
'15f9dadbe4101ec682aa034c7cebc59c'
'faea9ea9076ede7f4af152e8b2fa9cb6',
hashlib.sha512: '87aa7cdea5ef619d4ff0b4241a1d6cb0'
'2379f4e2ce4ec2787ad0b30545e17cde'
'daa833b7d6b8a702038b274eaea3f4e4'
'be9d914eeb61f1702e696c203a126854',
})
# 4.3. Test Case 2
hmactest(key = 'Jefe',
data = 'what do ya want for nothing?',
hexdigests = {
hashlib.sha224: 'a30e01098bc6dbbf45690f3a7e9e6d0f'
'8bbea2a39e6148008fd05e44',
hashlib.sha256: '5bdcc146bf60754e6a042426089575c7'
'5a003f089d2739839dec58b964ec3843',
hashlib.sha384: 'af45d2e376484031617f78d2b58a6b1b'
'9c7ef464f5a01b47e42ec3736322445e'
'8e2240ca5e69e2c78b3239ecfab21649',
hashlib.sha512: '164b7a7bfcf819e2e395fbe73b56e0a3'
'87bd64222e831fd610270cd7ea250554'
'9758bf75c05a994a6d034f65f8f0e6fd'
'caeab1a34d4a6b4b636e070a38bce737',
})
# 4.4. Test Case 3
hmactest(key = '\xaa'*20,
data = '\xdd'*50,
hexdigests = {
hashlib.sha224: '7fb3cb3588c6c1f6ffa9694d7d6ad264'
'9365b0c1f65d69d1ec8333ea',
hashlib.sha256: '773ea91e36800e46854db8ebd09181a7'
'2959098b3ef8c122d9635514ced565fe',
hashlib.sha384: '88062608d3e6ad8a0aa2ace014c8a86f'
'0aa635d947ac9febe83ef4e55966144b'
'2a5ab39dc13814b94e3ab6e101a34f27',
hashlib.sha512: 'fa73b0089d56a284efb0f0756c890be9'
'b1b5dbdd8ee81a3655f83e33b2279d39'
'bf3e848279a722c806b485a47e67c807'
'b946a337bee8942674278859e13292fb',
})
# 4.5. Test Case 4
hmactest(key = ''.join([chr(x) for x in xrange(0x01, 0x19+1)]),
data = '\xcd'*50,
hexdigests = {
hashlib.sha224: '6c11506874013cac6a2abc1bb382627c'
'ec6a90d86efc012de7afec5a',
hashlib.sha256: '82558a389a443c0ea4cc819899f2083a'
'85f0faa3e578f8077a2e3ff46729665b',
hashlib.sha384: '3e8a69b7783c25851933ab6290af6ca7'
'7a9981480850009cc5577c6e1f573b4e'
'6801dd23c4a7d679ccf8a386c674cffb',
hashlib.sha512: 'b0ba465637458c6990e5a8c5f61d4af7'
'e576d97ff94b872de76f8050361ee3db'
'a91ca5c11aa25eb4d679275cc5788063'
'a5f19741120c4f2de2adebeb10a298dd',
})
# 4.7. Test Case 6
hmactest(key = '\xaa'*131,
data = 'Test Using Larger Than Block-Siz'
'e Key - Hash Key First',
hexdigests = {
hashlib.sha224: '95e9a0db962095adaebe9b2d6f0dbce2'
'd499f112f2d2b7273fa6870e',
hashlib.sha256: '60e431591ee0b67f0d8a26aacbf5b77f'
'8e0bc6213728c5140546040f0ee37f54',
hashlib.sha384: '4ece084485813e9088d2c63a041bc5b4'
'4f9ef1012a2b588f3cd11f05033ac4c6'
'0c2ef6ab4030fe8296248df163f44952',
hashlib.sha512: '80b24263c7c1a3ebb71493c1dd7be8b4'
'9b46d1f41b4aeec1121b013783f8f352'
'6b56d037e05f2598bd0fd2215d6a1e52'
'95e64f73f63f0aec8b915a985d786598',
})
# 4.8. Test Case 7
hmactest(key = '\xaa'*131,
data = 'This is a test using a larger th'
'an block-size key and a larger t'
'han block-size data. The key nee'
'ds to be hashed before being use'
'd by the HMAC algorithm.',
hexdigests = {
hashlib.sha224: '3a854166ac5d9f023f54d517d0b39dbd'
'946770db9c2b95c9f6f565d1',
hashlib.sha256: '9b09ffa71b942fcb27635fbcd5b0e944'
'bfdc63644f0713938a7f51535c3a35e2',
hashlib.sha384: '6617178e941f020d351e2f254e8fd32c'
'602420feb0b8fb9adccebb82461e99c5'
'a678cc31e799176d3860e6110c46523e',
hashlib.sha512: 'e37b6a775dc87dbaa4dfa9f96e5e3ffd'
'debd71f8867289865df5a32d20cdc944'
'b6022cac3c4982b10d5eeb55c3e4de15'
'134676fb6de0446065c97440fa8c6a58',
})
def test_sha224_rfc4231(self):
self._rfc4231_test_cases(hashlib.sha224)
def test_sha256_rfc4231(self):
self._rfc4231_test_cases(hashlib.sha256)
def test_sha384_rfc4231(self):
self._rfc4231_test_cases(hashlib.sha384)
def test_sha512_rfc4231(self):
self._rfc4231_test_cases(hashlib.sha512)
def test_legacy_block_size_warnings(self):
class MockCrazyHash(object):
"""Ain't no block_size attribute here."""
def __init__(self, *args):
self._x = hashlib.sha1(*args)
self.digest_size = self._x.digest_size
def update(self, v):
self._x.update(v)
def digest(self):
return self._x.digest()
with warnings.catch_warnings():
warnings.simplefilter('error', RuntimeWarning)
with self.assertRaises(RuntimeWarning):
hmac.HMAC('a', 'b', digestmod=MockCrazyHash)
self.fail('Expected warning about missing block_size')
MockCrazyHash.block_size = 1
with self.assertRaises(RuntimeWarning):
hmac.HMAC('a', 'b', digestmod=MockCrazyHash)
self.fail('Expected warning about small block_size')
class ConstructorTestCase(unittest.TestCase):
def test_normal(self):
# Standard constructor call.
failed = 0
try:
h = hmac.HMAC("key")
except:
self.fail("Standard constructor call raised exception.")
def test_withtext(self):
# Constructor call with text.
try:
h = hmac.HMAC("key", "hash this!")
except:
self.fail("Constructor call with text argument raised exception.")
def test_withmodule(self):
# Constructor call with text and digest module.
try:
h = hmac.HMAC("key", "", hashlib.sha1)
except:
self.fail("Constructor call with hashlib.sha1 raised exception.")
class SanityTestCase(unittest.TestCase):
def test_default_is_md5(self):
# Testing if HMAC defaults to MD5 algorithm.
# NOTE: this whitebox test depends on the hmac class internals
h = hmac.HMAC("key")
self.assertTrue(h.digest_cons == hashlib.md5)
def test_exercise_all_methods(self):
# Exercising all methods once.
# This must not raise any exceptions
try:
h = hmac.HMAC("my secret key")
h.update("compute the hash of this text!")
dig = h.digest()
dig = h.hexdigest()
h2 = h.copy()
except:
self.fail("Exception raised during normal usage of HMAC class.")
class CopyTestCase(unittest.TestCase):
def test_attributes(self):
# Testing if attributes are of same type.
h1 = hmac.HMAC("key")
h2 = h1.copy()
self.assertTrue(h1.digest_cons == h2.digest_cons,
"digest constructors don't match.")
self.assertTrue(type(h1.inner) == type(h2.inner),
"Types of inner don't match.")
self.assertTrue(type(h1.outer) == type(h2.outer),
"Types of outer don't match.")
def test_realcopy(self):
# Testing if the copy method created a real copy.
h1 = hmac.HMAC("key")
h2 = h1.copy()
# Using id() in case somebody has overridden __cmp__.
self.assertTrue(id(h1) != id(h2), "No real copy of the HMAC instance.")
self.assertTrue(id(h1.inner) != id(h2.inner),
"No real copy of the attribute 'inner'.")
self.assertTrue(id(h1.outer) != id(h2.outer),
"No real copy of the attribute 'outer'.")
def test_equality(self):
# Testing if the copy has the same digests.
h1 = hmac.HMAC("key")
h1.update("some random text")
h2 = h1.copy()
self.assertTrue(h1.digest() == h2.digest(),
"Digest of copy doesn't match original digest.")
self.assertTrue(h1.hexdigest() == h2.hexdigest(),
"Hexdigest of copy doesn't match original hexdigest.")
class CompareDigestTestCase(unittest.TestCase):
def test_compare_digest(self):
# Testing input type exception handling
a, b = 100, 200
self.assertRaises(TypeError, hmac.compare_digest, a, b)
a, b = 100, b"foobar"
self.assertRaises(TypeError, hmac.compare_digest, a, b)
a, b = b"foobar", 200
self.assertRaises(TypeError, hmac.compare_digest, a, b)
a, b = u"foobar", b"foobar"
self.assertRaises(TypeError, hmac.compare_digest, a, b)
a, b = b"foobar", u"foobar"
self.assertRaises(TypeError, hmac.compare_digest, a, b)
# Testing bytes of different lengths
a, b = b"foobar", b"foo"
self.assertFalse(hmac.compare_digest(a, b))
a, b = b"\xde\xad\xbe\xef", b"\xde\xad"
self.assertFalse(hmac.compare_digest(a, b))
# Testing bytes of same lengths, different values
a, b = b"foobar", b"foobaz"
self.assertFalse(hmac.compare_digest(a, b))
a, b = b"\xde\xad\xbe\xef", b"\xab\xad\x1d\xea"
self.assertFalse(hmac.compare_digest(a, b))
# Testing bytes of same lengths, same values
a, b = b"foobar", b"foobar"
self.assertTrue(hmac.compare_digest(a, b))
a, b = b"\xde\xad\xbe\xef", b"\xde\xad\xbe\xef"
self.assertTrue(hmac.compare_digest(a, b))
# Testing bytearrays of same lengths, same values
a, b = bytearray(b"foobar"), bytearray(b"foobar")
self.assertTrue(hmac.compare_digest(a, b))
# Testing bytearrays of diffeent lengths
a, b = bytearray(b"foobar"), bytearray(b"foo")
self.assertFalse(hmac.compare_digest(a, b))
# Testing bytearrays of same lengths, different values
a, b = bytearray(b"foobar"), bytearray(b"foobaz")
self.assertFalse(hmac.compare_digest(a, b))
# Testing byte and bytearray of same lengths, same values
a, b = bytearray(b"foobar"), b"foobar"
self.assertTrue(hmac.compare_digest(a, b))
self.assertTrue(hmac.compare_digest(b, a))
# Testing byte bytearray of diffeent lengths
a, b = bytearray(b"foobar"), b"foo"
self.assertFalse(hmac.compare_digest(a, b))
self.assertFalse(hmac.compare_digest(b, a))
# Testing byte and bytearray of same lengths, different values
a, b = bytearray(b"foobar"), b"foobaz"
self.assertFalse(hmac.compare_digest(a, b))
self.assertFalse(hmac.compare_digest(b, a))
# Testing str of same lengths
a, b = "foobar", "foobar"
self.assertTrue(hmac.compare_digest(a, b))
# Testing str of diffeent lengths
a, b = "foo", "foobar"
self.assertFalse(hmac.compare_digest(a, b))
# Testing bytes of same lengths, different values
a, b = "foobar", "foobaz"
self.assertFalse(hmac.compare_digest(a, b))
# Testing error cases
a, b = u"foobar", b"foobar"
self.assertRaises(TypeError, hmac.compare_digest, a, b)
a, b = b"foobar", u"foobar"
self.assertRaises(TypeError, hmac.compare_digest, a, b)
a, b = b"foobar", 1
self.assertRaises(TypeError, hmac.compare_digest, a, b)
a, b = 100, 200
self.assertRaises(TypeError, hmac.compare_digest, a, b)
a, b = "fooä", "fooä"
self.assertTrue(hmac.compare_digest(a, b))
with test_support.check_py3k_warnings():
# subclasses are supported by ignore __eq__
class mystr(str):
def __eq__(self, other):
return False
a, b = mystr("foobar"), mystr("foobar")
self.assertTrue(hmac.compare_digest(a, b))
a, b = mystr("foobar"), "foobar"
self.assertTrue(hmac.compare_digest(a, b))
a, b = mystr("foobar"), mystr("foobaz")
self.assertFalse(hmac.compare_digest(a, b))
with test_support.check_py3k_warnings():
class mybytes(bytes):
def __eq__(self, other):
return False
a, b = mybytes(b"foobar"), mybytes(b"foobar")
self.assertTrue(hmac.compare_digest(a, b))
a, b = mybytes(b"foobar"), b"foobar"
self.assertTrue(hmac.compare_digest(a, b))
a, b = mybytes(b"foobar"), mybytes(b"foobaz")
self.assertFalse(hmac.compare_digest(a, b))
def test_main():
test_support.run_unittest(
TestVectorsTestCase,
ConstructorTestCase,
SanityTestCase,
CopyTestCase,
CompareDigestTestCase,
)
if __name__ == "__main__":
test_main()
| apache-2.0 | 178,129,449,049,034,270 | 39.607477 | 79 | 0.537287 | false |
DummyDivision/Tsune | cardimporter/importer.py | 1 | 2077 | from django.db import transaction
from guardian.shortcuts import assign_perm
from sourcereader import AnkiCardSourceReader
from ankiconverter import ankiTupeToTsuneDict
from cardbox.card_model import Card, Deck
class AnkiImporter():
@transaction.commit_on_success
def importCollection(self,pathToApkg,user):
with AnkiCardSourceReader(pathToApkg) as ankireader:
self._createCollection(ankireader, user)
def _createDeck(self,deck):
names = deck['name'].split("::")
if len(names) > 1:
return Deck.objects.create(title=names[0], description="-".join(names[1:]))
return Deck.objects.create(title=names[0], description=names[0])
def _checkIfCreateDeck(self,current_cardlist):
return len(current_cardlist) > 0
def _convertAnkiCardsToTsuneCards(self,cardlist,deck):
tsuneCards = []
for card in cardlist:
tsuneDict = ankiTupeToTsuneDict(card)
convertedcard = self._createCardObjectFromTsuneDict(tsuneDict,deck)
tsuneCards.append(convertedcard)
return tsuneCards
def _addAllCardsToDeck(self,cardlist,deck):
return self._convertAnkiCardsToTsuneCards(cardlist,deck)
def _createCardObjectFromTsuneDict(self,tsuneDict,deck):
return Card.objects.create(deck=deck,front=tsuneDict["front"], back=tsuneDict["back"])
def _createCollection(self,ankireader, user):
deckdict = ankireader.getDictOfAllDecks()
for deck_id in deckdict.keys():
current_cardlist=ankireader.getAllCardsForDeck(deck_id)
if self._checkIfCreateDeck(current_cardlist):
deck = self._createDeck(deckdict[deck_id])
tsunecards = self._addAllCardsToDeck(current_cardlist,deck)
deck.save()
[card.save() for card in tsunecards]
self._assignPerms(user,deck)
def _assignPerms(self,user,deck):
assign_perm('view_deck',user,deck)
assign_perm('change_deck',user,deck)
assign_perm('delete_deck',user,deck)
| mit | -4,638,345,130,590,870,000 | 36.763636 | 94 | 0.676938 | false |
philanthropy-u/edx-platform | openedx/features/job_board/migrations/0001_initial.py | 1 | 2109 | # -*- coding: utf-8 -*-
# Generated by Django 1.11.21 on 2020-03-31 10:03
from __future__ import unicode_literals
from django.db import migrations, models
import django.utils.timezone
import django_countries.fields
import model_utils.fields
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Job',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', model_utils.fields.AutoCreatedField(default=django.utils.timezone.now, editable=False, verbose_name='created')),
('modified', model_utils.fields.AutoLastModifiedField(default=django.utils.timezone.now, editable=False, verbose_name='modified')),
('title', models.CharField(max_length=255)),
('company', models.CharField(max_length=255)),
('type', models.CharField(choices=[(b'remote', b'Remote'), (b'onsite', b'Onsite')], max_length=255)),
('compensation', models.CharField(choices=[(b'volunteer', b'Volunteer'), (b'hourly', b'Hourly'), (b'salaried', b'Salaried')], max_length=255)),
('hours', models.CharField(choices=[(b'fulltime', b'Full Time'), (b'parttime', b'Part Time'), (b'freelance', b'Freelance')], max_length=255)),
('city', models.CharField(max_length=255)),
('country', django_countries.fields.CountryField(max_length=2)),
('description', models.TextField()),
('function', models.TextField(blank=True, null=True)),
('responsibilities', models.TextField(blank=True, null=True)),
('website_link', models.URLField(blank=True, max_length=255, null=True)),
('contact_email', models.EmailField(max_length=255)),
('logo', models.ImageField(blank=True, null=True, upload_to=b'job-board/uploaded-logos/')),
],
options={
'abstract': False,
},
),
]
| agpl-3.0 | -7,991,687,555,766,982,000 | 48.046512 | 159 | 0.599336 | false |
makinacorpus/geonode | src/GeoNodePy/geonode/settings.py | 1 | 8876 | # -*- coding: utf-8 -*-
# Django settings for GeoNode project.
from urllib import urlencode
import os
_ = lambda x: x
DEBUG = True
SITENAME = "GeoNode"
SITEURL = "http://localhost:8000/"
TEMPLATE_DEBUG = DEBUG
PROJECT_ROOT = os.path.abspath(os.path.dirname(__file__))
ADMINS = (
# ('Your Name', '[email protected]'),
)
MANAGERS = ADMINS
DATABASE_ENGINE = 'sqlite3'
DATABASE_NAME = os.path.join(PROJECT_ROOT,"..","..","..","development.db")
DATABASE_USER = '' # Not used with sqlite3.
DATABASE_PASSWORD = '' # Not used with sqlite3.
DATABASE_HOST = '' # Not used with sqlite3.
DATABASE_PORT = '' # Not used with sqlite3.
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# If running in a Windows environment this must be set to the same as your
# system time zone.
TIME_ZONE = 'America/Chicago'
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'en'
LANGUAGES = (
('en', 'English'),
('es', 'Español'),
('it', 'Italiano'),
('fr', 'Français'),
)
SITE_ID = 1
# Setting a custom test runner to avoid running the tests for some problematic 3rd party apps
TEST_RUNNER='django_nose.NoseTestSuiteRunner'
NOSE_ARGS = [
'--verbosity=2',
'--cover-erase',
'--nocapture',
'--with-coverage',
'--cover-package=geonode',
'--cover-inclusive',
'--cover-tests',
'--detailed-errors',
'--with-xunit',
# This is very beautiful/usable but requires: pip install rudolf
# '--with-color',
# The settings below are useful while debugging test failures or errors
# '--failed',
# '--pdb-failures',
# '--stop',
# '--pdb',
]
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# Absolute path to the directory that holds media.
# Example: "/home/media/media.lawrence.com/"
MEDIA_ROOT = os.path.join(PROJECT_ROOT, "site_media", "media")
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash if there is a path component (optional in other cases).
# Examples: "http://media.lawrence.com", "http://example.com/media/"
MEDIA_URL = "/site_media/media/"
# Absolute path to the directory that holds static files like app media.
# Example: "/home/media/media.lawrence.com/apps/"
STATIC_ROOT = os.path.join(PROJECT_ROOT, "site_media", "static")
# URL that handles the static files like app media.
# Example: "http://media.lawrence.com"
STATIC_URL = "/media/"
# Additional directories which hold static files
STATICFILES_DIRS = [
os.path.join(PROJECT_ROOT, "media"),
]
GEONODE_UPLOAD_PATH = os.path.join(STATIC_URL, "upload/")
# URL prefix for admin media -- CSS, JavaScript and images. Make sure to use a
# trailing slash.
# Examples: "http://foo.com/media/", "/media/".
ADMIN_MEDIA_PREFIX = os.path.join(STATIC_URL, "admin/")
# Make this unique, and don't share it with anybody.
SECRET_KEY = 'myv-y4#7j-d*p-__@j#*3z@!y24fz8%^z2v6atuy4bo9vqr1_a'
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.load_template_source',
'django.template.loaders.app_directories.load_template_source',
#'django.template.loaders.eggs.load_template_source',
'django.template.loaders.app_directories.Loader',
)
TEMPLATE_CONTEXT_PROCESSORS = (
"django.core.context_processors.auth",
"django.core.context_processors.debug",
"django.core.context_processors.i18n",
"django.core.context_processors.media",
"geonode.maps.context_processors.resource_urls",
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.locale.LocaleMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
)
# This isn't required for running the geonode site, but it when running sites that inherit the geonode.settings module.
LOCALE_PATHS = (
os.path.join(PROJECT_ROOT, "locale"),
os.path.join(PROJECT_ROOT, "maps", "locale"),
)
ROOT_URLCONF = 'geonode.urls'
# Note that Django automatically includes the "templates" dir in all the
# INSTALLED_APPS, se there is no need to add maps/templates or admin/templates
TEMPLATE_DIRS = (
os.path.join(PROJECT_ROOT,"templates"),
)
# The FULLY QUALIFIED url to the GeoServer instance for this GeoNode.
GEOSERVER_BASE_URL = "http://localhost:8001/geoserver/"
# Default password for the geoserver admin user, autogenerated during bootstrap
GEOSERVER_TOKEN = open(os.path.join(PROJECT_ROOT,"..","..", "..","geoserver_token")).readline()[0:-1]
# The username and password for a user that can add and edit layer details on GeoServer
GEOSERVER_CREDENTIALS = "geoserver_admin", GEOSERVER_TOKEN
# The FULLY QUALIFIED url to the GeoNetwork instance for this GeoNode
GEONETWORK_BASE_URL = "http://localhost:8001/geonetwork/"
# The username and password for a user with write access to GeoNetwork
GEONETWORK_CREDENTIALS = "admin", "admin"
AUTHENTICATION_BACKENDS = ('geonode.core.auth.GranularBackend',)
GOOGLE_API_KEY = "ABQIAAAAkofooZxTfcCv9Wi3zzGTVxTnme5EwnLVtEDGnh-lFVzRJhbdQhQgAhB1eT_2muZtc0dl-ZSWrtzmrw"
LOGIN_REDIRECT_URL = "/"
DEFAULT_LAYERS_OWNER='admin'
# Where should newly created maps be focused?
DEFAULT_MAP_CENTER = (-84.7, 12.8)
# How tightly zoomed should newly created maps be?
# 0 = entire world;
# maximum zoom is between 12 and 15 (for Google Maps, coverage varies by area)
DEFAULT_MAP_ZOOM = 7
DEFAULT_LAYER_SOURCE = {
"ptype":"gxp_wmscsource",
"url":"/geoserver/wms",
"restUrl": "/gs/rest"
}
MAP_BASELAYERS = [{
"source": {"ptype": "gx_olsource"},
"type":"OpenLayers.Layer",
"args":["No background"],
"visibility": False,
"fixed": True,
"group":"background"
},{
"source": { "ptype":"gx_olsource"},
"type":"OpenLayers.Layer.OSM",
"args":["OpenStreetMap"],
"visibility": True,
"fixed": True,
"group":"background"
},{
"source": {"ptype":"gx_olsource"},
"type":"OpenLayers.Layer.WMS",
"group":"background",
"visibility": False,
"fixed": True,
"args":[
"bluemarble",
"http://maps.opengeo.org/geowebcache/service/wms",
{
"layers":["bluemarble"],
"format":"image/png",
"tiled": True,
"tilesOrigin":[-20037508.34,-20037508.34]
},
{"buffer":0}
]
}]
# NAVBAR expects a dict of dicts or a path to an ini file
NAVBAR = \
{'maps': {'id': '%sLink',
'item_class': '',
'link_class': '',
'text': 'Maps',
'url': 'geonode.maps.views.maps'},
'data': {'id': '%sLink',
'item_class': '',
'link_class': '',
'text': 'Data',
'url': "geonode.maps.views.browse_data"},
# 'index': {'id': '%sLink',
# 'item_class': '',
# 'link_class': '',
# 'text': 'Featured Map',
# 'url': 'geonode.views.index'},
'master': {'id': '%sLink',
'item_class': '',
'link_class': '',
'text': 'This page has no tab for this navigation'},
'meta': {'active_class': 'here',
'default_id': '%sLink',
'default_item_class': '',
'default_link_class': '',
'end_class': 'last',
'id': '%sLink',
'item_class': '',
'link_class': '',
'visible': 'data\nmaps'}}
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.admin',
'django.contrib.sitemaps',
'django.contrib.staticfiles',
'django_extensions',
'registration',
'profiles',
'avatar',
'geonode.core',
'geonode.maps',
'geonode.proxy',
'geonode'
)
def get_user_url(u):
from django.contrib.sites.models import Site
s = Site.objects.get_current()
return "http://" + s.domain + "/profiles/" + u.username
ABSOLUTE_URL_OVERRIDES = {
'auth.user': get_user_url
}
AUTH_PROFILE_MODULE = 'maps.Contact'
REGISTRATION_OPEN = False
SERVE_MEDIA = DEBUG;
#GEONODE_CLIENT_LOCATION = "http://localhost:8001/geonode-client/"
GEONODE_CLIENT_LOCATION = "/media/static/"
#Import uploaded shapefiles into a database such as PostGIS?
DB_DATASTORE=False
#Database datastore connection settings
DB_DATASTORE_NAME = ''
DB_DATASTORE_USER = ''
DB_DATASTORE_PASSWORD = ''
DB_DATASTORE_HOST = ''
DB_DATASTORE_PORT = ''
DB_DATASTORE_TYPE=''
try:
from local_settings import *
except ImportError:
pass
| gpl-3.0 | -9,179,246,854,442,891,000 | 28.778523 | 119 | 0.654046 | false |
simonmonk/prog_pi_ed2 | 07_05_kitchen_sink.py | 1 | 1654 | #07_05_kitchen_sink.py
from tkinter import *
class App:
def __init__(self, master):
frame = Frame(master)
frame.pack()
Label(frame, text='Label').grid(row=0, column=0)
Entry(frame, text='Entry').grid(row=0, column=1)
Button(frame, text='Button').grid(row=0, column=2)
check_var = StringVar()
check = Checkbutton(frame, text='Checkbutton', variable=check_var, onvalue='Y', offvalue='N')
check.grid(row=1, column=0)
#Listbox
listbox = Listbox(frame, height=3, selectmode=SINGLE)
for item in ['red', 'green', 'blue', 'yellow', 'pink']:
listbox.insert(END, item)
listbox.grid(row=1, column=1)
#Radiobutton set
radio_frame = Frame(frame)
radio_selection = StringVar()
b1 = Radiobutton(radio_frame, text='portrait',
variable=radio_selection, value='P')
b1.pack(side=LEFT)
b2 = Radiobutton(radio_frame, text='landscape',
variable=radio_selection, value='L')
b2.pack(side=LEFT)
radio_frame.grid(row=1, column=2)
#Scale
scale_var = IntVar()
Scale(frame, from_=1, to=10, orient=HORIZONTAL,
variable=scale_var).grid(row=2, column=0)
Label(frame, textvariable=scale_var,
font=("Helvetica", 36)).grid(row=2, column=1)
#Message
message = Message(frame,
text='Multiline Message Area')
message.grid(row=2, column=2)
#Spinbox
Spinbox(frame, values=('a','b','c')).grid(row=3)
root = Tk()
root.wm_title('Kitchen Sink')
app = App(root)
root.mainloop()
| mit | 872,859,228,343,858,000 | 34.956522 | 101 | 0.577993 | false |
matheuskiser/pdx_code_guild | django/pointme/points/views.py | 1 | 4113 | from django.shortcuts import render, redirect, render_to_response
from django.contrib.auth.decorators import login_required
from django.template import RequestContext
from django.http import HttpResponse
from django.db.models import Q
import json
from points.models import Place
from points.forms import PlaceForm
from pygeocoder import Geocoder
def show_places(request):
places = Place.objects.all()
# Render the template depending on the context.
return render(request, 'points/index.html', {'places': places})
def my_places(request):
# # Entry.objects.all().filter(pub_date__year=2006)
# user = request.user
# places = Place.objects.all().filter(author=user)
#
# # Render the template depending on the context.
# return render(request, 'points/my_places.html', {'places': places})
if request.is_ajax():
upper_left_lat = request.GET['upper_left_lat']
upper_left_lng = request.GET['upper_left_lng']
lower_left_lat = request.GET['lower_left_lat']
lower_left_lng = request.GET['lower_left_lng']
user = request.user
places = Place.objects.all().filter(latitude__gte=lower_left_lat, longitude__gte=lower_left_lng,
latitude__lte=upper_left_lat, longitude__lte=upper_left_lng,
author=user)
spots = []
for place in places:
temp = {}
temp['id'] = place.id
temp['address'] = place.address
temp['name'] = place.name
temp['like'] = place.like
temp['user_name'] = place.author.username
spots.append(temp)
return HttpResponse(json.dumps(spots))
# Render the template depending on the context.
return render(request, 'points/my_places.html')
def search_results(request):
query = request.GET['search-query']
places = Place.objects.filter(Q(name__icontains=query) | Q(address__icontains=query))
return render(request, 'points/search_results.html', {'places': places, 'query': query})
@login_required()
def add_place(request):
form = PlaceForm(request.POST or None)
if form.is_valid():
place = form.save(commit=False)
place.author = request.user
results = Geocoder.geocode(place.address)
lat, lng = results[0].coordinates
place.latitude = lat
place.longitude = lng
place.save()
return redirect('../../points/')
return render_to_response('points/add_place.html', {'form': form}, context_instance=RequestContext(request))
def map_view(request):
if request.is_ajax():
upper_left_lat = request.GET['upper_left_lat']
upper_left_lng = request.GET['upper_left_lng']
lower_left_lat = request.GET['lower_left_lat']
lower_left_lng = request.GET['lower_left_lng']
places = Place.objects.all().filter(latitude__gte=lower_left_lat, longitude__gte=lower_left_lng,
latitude__lte=upper_left_lat, longitude__lte=upper_left_lng)
spots = []
for place in places:
temp = {}
temp['id'] = place.id
temp['address'] = place.address
temp['name'] = place.name
temp['like'] = place.like
temp['user_name'] = place.author.username
temp['comment'] = place.comment
spots.append(temp)
return HttpResponse(json.dumps(spots))
# Render the template depending on the context.
return render(request, 'points/map_view.html')
def get_places(request):
if request.is_ajax():
places = Place.objects.all()
spots = []
for place in places:
temp = {}
temp['id'] = place.id
temp['address'] = place.address
temp['name'] = place.name
temp['like'] = place.like
temp['user_name'] = place.author.username
temp['comment'] = place.comment
spots.append(temp)
return HttpResponse(json.dumps(spots))
return HttpResponse("0") | mit | -8,677,430,849,673,660,000 | 31.912 | 112 | 0.605398 | false |
amonmoce/corba_examples | omniORBpy-4.2.1/build/python/COS/CosNotifyComm_idl.py | 1 | 37450 | # Python stubs generated by omniidl from /usr/local/share/idl/omniORB/COS/CosNotifyComm.idl
# DO NOT EDIT THIS FILE!
import omniORB, _omnipy
from omniORB import CORBA, PortableServer
_0_CORBA = CORBA
_omnipy.checkVersion(4,2, __file__, 1)
try:
property
except NameError:
def property(*args):
return None
# #include "CosNotification.idl"
import CosNotification_idl
_0_CosNotification = omniORB.openModule("CosNotification")
_0_CosNotification__POA = omniORB.openModule("CosNotification__POA")
# #include "CosEventComm.idl"
import CosEventComm_idl
_0_CosEventComm = omniORB.openModule("CosEventComm")
_0_CosEventComm__POA = omniORB.openModule("CosEventComm__POA")
#
# Start of module "CosNotifyComm"
#
__name__ = "CosNotifyComm"
_0_CosNotifyComm = omniORB.openModule("CosNotifyComm", r"/usr/local/share/idl/omniORB/COS/CosNotifyComm.idl")
_0_CosNotifyComm__POA = omniORB.openModule("CosNotifyComm__POA", r"/usr/local/share/idl/omniORB/COS/CosNotifyComm.idl")
# exception InvalidEventType
_0_CosNotifyComm.InvalidEventType = omniORB.newEmptyClass()
class InvalidEventType (CORBA.UserException):
_NP_RepositoryId = "IDL:omg.org/CosNotifyComm/InvalidEventType:1.0"
def __init__(self, type):
CORBA.UserException.__init__(self, type)
self.type = type
_0_CosNotifyComm.InvalidEventType = InvalidEventType
_0_CosNotifyComm._d_InvalidEventType = (omniORB.tcInternal.tv_except, InvalidEventType, InvalidEventType._NP_RepositoryId, "InvalidEventType", "type", omniORB.typeMapping["IDL:omg.org/CosNotification/EventType:1.0"])
_0_CosNotifyComm._tc_InvalidEventType = omniORB.tcInternal.createTypeCode(_0_CosNotifyComm._d_InvalidEventType)
omniORB.registerType(InvalidEventType._NP_RepositoryId, _0_CosNotifyComm._d_InvalidEventType, _0_CosNotifyComm._tc_InvalidEventType)
del InvalidEventType
# interface NotifyPublish
_0_CosNotifyComm._d_NotifyPublish = (omniORB.tcInternal.tv_objref, "IDL:omg.org/CosNotifyComm/NotifyPublish:1.0", "NotifyPublish")
omniORB.typeMapping["IDL:omg.org/CosNotifyComm/NotifyPublish:1.0"] = _0_CosNotifyComm._d_NotifyPublish
_0_CosNotifyComm.NotifyPublish = omniORB.newEmptyClass()
class NotifyPublish :
_NP_RepositoryId = _0_CosNotifyComm._d_NotifyPublish[1]
def __init__(self, *args, **kw):
raise RuntimeError("Cannot construct objects of this type.")
_nil = CORBA.Object._nil
_0_CosNotifyComm.NotifyPublish = NotifyPublish
_0_CosNotifyComm._tc_NotifyPublish = omniORB.tcInternal.createTypeCode(_0_CosNotifyComm._d_NotifyPublish)
omniORB.registerType(NotifyPublish._NP_RepositoryId, _0_CosNotifyComm._d_NotifyPublish, _0_CosNotifyComm._tc_NotifyPublish)
# NotifyPublish operations and attributes
NotifyPublish._d_offer_change = ((omniORB.typeMapping["IDL:omg.org/CosNotification/EventTypeSeq:1.0"], omniORB.typeMapping["IDL:omg.org/CosNotification/EventTypeSeq:1.0"]), (), {_0_CosNotifyComm.InvalidEventType._NP_RepositoryId: _0_CosNotifyComm._d_InvalidEventType})
# NotifyPublish object reference
class _objref_NotifyPublish (CORBA.Object):
_NP_RepositoryId = NotifyPublish._NP_RepositoryId
def __init__(self, obj):
CORBA.Object.__init__(self, obj)
def offer_change(self, *args):
return self._obj.invoke("offer_change", _0_CosNotifyComm.NotifyPublish._d_offer_change, args)
omniORB.registerObjref(NotifyPublish._NP_RepositoryId, _objref_NotifyPublish)
_0_CosNotifyComm._objref_NotifyPublish = _objref_NotifyPublish
del NotifyPublish, _objref_NotifyPublish
# NotifyPublish skeleton
__name__ = "CosNotifyComm__POA"
class NotifyPublish (PortableServer.Servant):
_NP_RepositoryId = _0_CosNotifyComm.NotifyPublish._NP_RepositoryId
_omni_op_d = {"offer_change": _0_CosNotifyComm.NotifyPublish._d_offer_change}
NotifyPublish._omni_skeleton = NotifyPublish
_0_CosNotifyComm__POA.NotifyPublish = NotifyPublish
omniORB.registerSkeleton(NotifyPublish._NP_RepositoryId, NotifyPublish)
del NotifyPublish
__name__ = "CosNotifyComm"
# interface NotifySubscribe
_0_CosNotifyComm._d_NotifySubscribe = (omniORB.tcInternal.tv_objref, "IDL:omg.org/CosNotifyComm/NotifySubscribe:1.0", "NotifySubscribe")
omniORB.typeMapping["IDL:omg.org/CosNotifyComm/NotifySubscribe:1.0"] = _0_CosNotifyComm._d_NotifySubscribe
_0_CosNotifyComm.NotifySubscribe = omniORB.newEmptyClass()
class NotifySubscribe :
_NP_RepositoryId = _0_CosNotifyComm._d_NotifySubscribe[1]
def __init__(self, *args, **kw):
raise RuntimeError("Cannot construct objects of this type.")
_nil = CORBA.Object._nil
_0_CosNotifyComm.NotifySubscribe = NotifySubscribe
_0_CosNotifyComm._tc_NotifySubscribe = omniORB.tcInternal.createTypeCode(_0_CosNotifyComm._d_NotifySubscribe)
omniORB.registerType(NotifySubscribe._NP_RepositoryId, _0_CosNotifyComm._d_NotifySubscribe, _0_CosNotifyComm._tc_NotifySubscribe)
# NotifySubscribe operations and attributes
NotifySubscribe._d_subscription_change = ((omniORB.typeMapping["IDL:omg.org/CosNotification/EventTypeSeq:1.0"], omniORB.typeMapping["IDL:omg.org/CosNotification/EventTypeSeq:1.0"]), (), {_0_CosNotifyComm.InvalidEventType._NP_RepositoryId: _0_CosNotifyComm._d_InvalidEventType})
# NotifySubscribe object reference
class _objref_NotifySubscribe (CORBA.Object):
_NP_RepositoryId = NotifySubscribe._NP_RepositoryId
def __init__(self, obj):
CORBA.Object.__init__(self, obj)
def subscription_change(self, *args):
return self._obj.invoke("subscription_change", _0_CosNotifyComm.NotifySubscribe._d_subscription_change, args)
omniORB.registerObjref(NotifySubscribe._NP_RepositoryId, _objref_NotifySubscribe)
_0_CosNotifyComm._objref_NotifySubscribe = _objref_NotifySubscribe
del NotifySubscribe, _objref_NotifySubscribe
# NotifySubscribe skeleton
__name__ = "CosNotifyComm__POA"
class NotifySubscribe (PortableServer.Servant):
_NP_RepositoryId = _0_CosNotifyComm.NotifySubscribe._NP_RepositoryId
_omni_op_d = {"subscription_change": _0_CosNotifyComm.NotifySubscribe._d_subscription_change}
NotifySubscribe._omni_skeleton = NotifySubscribe
_0_CosNotifyComm__POA.NotifySubscribe = NotifySubscribe
omniORB.registerSkeleton(NotifySubscribe._NP_RepositoryId, NotifySubscribe)
del NotifySubscribe
__name__ = "CosNotifyComm"
# interface PushConsumer
_0_CosNotifyComm._d_PushConsumer = (omniORB.tcInternal.tv_objref, "IDL:omg.org/CosNotifyComm/PushConsumer:1.0", "PushConsumer")
omniORB.typeMapping["IDL:omg.org/CosNotifyComm/PushConsumer:1.0"] = _0_CosNotifyComm._d_PushConsumer
_0_CosNotifyComm.PushConsumer = omniORB.newEmptyClass()
class PushConsumer (_0_CosNotifyComm.NotifyPublish, _0_CosEventComm.PushConsumer):
_NP_RepositoryId = _0_CosNotifyComm._d_PushConsumer[1]
def __init__(self, *args, **kw):
raise RuntimeError("Cannot construct objects of this type.")
_nil = CORBA.Object._nil
_0_CosNotifyComm.PushConsumer = PushConsumer
_0_CosNotifyComm._tc_PushConsumer = omniORB.tcInternal.createTypeCode(_0_CosNotifyComm._d_PushConsumer)
omniORB.registerType(PushConsumer._NP_RepositoryId, _0_CosNotifyComm._d_PushConsumer, _0_CosNotifyComm._tc_PushConsumer)
# PushConsumer object reference
class _objref_PushConsumer (_0_CosNotifyComm._objref_NotifyPublish, _0_CosEventComm._objref_PushConsumer):
_NP_RepositoryId = PushConsumer._NP_RepositoryId
def __init__(self, obj):
_0_CosNotifyComm._objref_NotifyPublish.__init__(self, obj)
_0_CosEventComm._objref_PushConsumer.__init__(self, obj)
omniORB.registerObjref(PushConsumer._NP_RepositoryId, _objref_PushConsumer)
_0_CosNotifyComm._objref_PushConsumer = _objref_PushConsumer
del PushConsumer, _objref_PushConsumer
# PushConsumer skeleton
__name__ = "CosNotifyComm__POA"
class PushConsumer (_0_CosNotifyComm__POA.NotifyPublish, _0_CosEventComm__POA.PushConsumer):
_NP_RepositoryId = _0_CosNotifyComm.PushConsumer._NP_RepositoryId
_omni_op_d = {}
_omni_op_d.update(_0_CosNotifyComm__POA.NotifyPublish._omni_op_d)
_omni_op_d.update(_0_CosEventComm__POA.PushConsumer._omni_op_d)
PushConsumer._omni_skeleton = PushConsumer
_0_CosNotifyComm__POA.PushConsumer = PushConsumer
omniORB.registerSkeleton(PushConsumer._NP_RepositoryId, PushConsumer)
del PushConsumer
__name__ = "CosNotifyComm"
# interface PullConsumer
_0_CosNotifyComm._d_PullConsumer = (omniORB.tcInternal.tv_objref, "IDL:omg.org/CosNotifyComm/PullConsumer:1.0", "PullConsumer")
omniORB.typeMapping["IDL:omg.org/CosNotifyComm/PullConsumer:1.0"] = _0_CosNotifyComm._d_PullConsumer
_0_CosNotifyComm.PullConsumer = omniORB.newEmptyClass()
class PullConsumer (_0_CosNotifyComm.NotifyPublish, _0_CosEventComm.PullConsumer):
_NP_RepositoryId = _0_CosNotifyComm._d_PullConsumer[1]
def __init__(self, *args, **kw):
raise RuntimeError("Cannot construct objects of this type.")
_nil = CORBA.Object._nil
_0_CosNotifyComm.PullConsumer = PullConsumer
_0_CosNotifyComm._tc_PullConsumer = omniORB.tcInternal.createTypeCode(_0_CosNotifyComm._d_PullConsumer)
omniORB.registerType(PullConsumer._NP_RepositoryId, _0_CosNotifyComm._d_PullConsumer, _0_CosNotifyComm._tc_PullConsumer)
# PullConsumer object reference
class _objref_PullConsumer (_0_CosNotifyComm._objref_NotifyPublish, _0_CosEventComm._objref_PullConsumer):
_NP_RepositoryId = PullConsumer._NP_RepositoryId
def __init__(self, obj):
_0_CosNotifyComm._objref_NotifyPublish.__init__(self, obj)
_0_CosEventComm._objref_PullConsumer.__init__(self, obj)
omniORB.registerObjref(PullConsumer._NP_RepositoryId, _objref_PullConsumer)
_0_CosNotifyComm._objref_PullConsumer = _objref_PullConsumer
del PullConsumer, _objref_PullConsumer
# PullConsumer skeleton
__name__ = "CosNotifyComm__POA"
class PullConsumer (_0_CosNotifyComm__POA.NotifyPublish, _0_CosEventComm__POA.PullConsumer):
_NP_RepositoryId = _0_CosNotifyComm.PullConsumer._NP_RepositoryId
_omni_op_d = {}
_omni_op_d.update(_0_CosNotifyComm__POA.NotifyPublish._omni_op_d)
_omni_op_d.update(_0_CosEventComm__POA.PullConsumer._omni_op_d)
PullConsumer._omni_skeleton = PullConsumer
_0_CosNotifyComm__POA.PullConsumer = PullConsumer
omniORB.registerSkeleton(PullConsumer._NP_RepositoryId, PullConsumer)
del PullConsumer
__name__ = "CosNotifyComm"
# interface PullSupplier
_0_CosNotifyComm._d_PullSupplier = (omniORB.tcInternal.tv_objref, "IDL:omg.org/CosNotifyComm/PullSupplier:1.0", "PullSupplier")
omniORB.typeMapping["IDL:omg.org/CosNotifyComm/PullSupplier:1.0"] = _0_CosNotifyComm._d_PullSupplier
_0_CosNotifyComm.PullSupplier = omniORB.newEmptyClass()
class PullSupplier (_0_CosNotifyComm.NotifySubscribe, _0_CosEventComm.PullSupplier):
_NP_RepositoryId = _0_CosNotifyComm._d_PullSupplier[1]
def __init__(self, *args, **kw):
raise RuntimeError("Cannot construct objects of this type.")
_nil = CORBA.Object._nil
_0_CosNotifyComm.PullSupplier = PullSupplier
_0_CosNotifyComm._tc_PullSupplier = omniORB.tcInternal.createTypeCode(_0_CosNotifyComm._d_PullSupplier)
omniORB.registerType(PullSupplier._NP_RepositoryId, _0_CosNotifyComm._d_PullSupplier, _0_CosNotifyComm._tc_PullSupplier)
# PullSupplier object reference
class _objref_PullSupplier (_0_CosNotifyComm._objref_NotifySubscribe, _0_CosEventComm._objref_PullSupplier):
_NP_RepositoryId = PullSupplier._NP_RepositoryId
def __init__(self, obj):
_0_CosNotifyComm._objref_NotifySubscribe.__init__(self, obj)
_0_CosEventComm._objref_PullSupplier.__init__(self, obj)
omniORB.registerObjref(PullSupplier._NP_RepositoryId, _objref_PullSupplier)
_0_CosNotifyComm._objref_PullSupplier = _objref_PullSupplier
del PullSupplier, _objref_PullSupplier
# PullSupplier skeleton
__name__ = "CosNotifyComm__POA"
class PullSupplier (_0_CosNotifyComm__POA.NotifySubscribe, _0_CosEventComm__POA.PullSupplier):
_NP_RepositoryId = _0_CosNotifyComm.PullSupplier._NP_RepositoryId
_omni_op_d = {}
_omni_op_d.update(_0_CosNotifyComm__POA.NotifySubscribe._omni_op_d)
_omni_op_d.update(_0_CosEventComm__POA.PullSupplier._omni_op_d)
PullSupplier._omni_skeleton = PullSupplier
_0_CosNotifyComm__POA.PullSupplier = PullSupplier
omniORB.registerSkeleton(PullSupplier._NP_RepositoryId, PullSupplier)
del PullSupplier
__name__ = "CosNotifyComm"
# interface PushSupplier
_0_CosNotifyComm._d_PushSupplier = (omniORB.tcInternal.tv_objref, "IDL:omg.org/CosNotifyComm/PushSupplier:1.0", "PushSupplier")
omniORB.typeMapping["IDL:omg.org/CosNotifyComm/PushSupplier:1.0"] = _0_CosNotifyComm._d_PushSupplier
_0_CosNotifyComm.PushSupplier = omniORB.newEmptyClass()
class PushSupplier (_0_CosNotifyComm.NotifySubscribe, _0_CosEventComm.PushSupplier):
_NP_RepositoryId = _0_CosNotifyComm._d_PushSupplier[1]
def __init__(self, *args, **kw):
raise RuntimeError("Cannot construct objects of this type.")
_nil = CORBA.Object._nil
_0_CosNotifyComm.PushSupplier = PushSupplier
_0_CosNotifyComm._tc_PushSupplier = omniORB.tcInternal.createTypeCode(_0_CosNotifyComm._d_PushSupplier)
omniORB.registerType(PushSupplier._NP_RepositoryId, _0_CosNotifyComm._d_PushSupplier, _0_CosNotifyComm._tc_PushSupplier)
# PushSupplier object reference
class _objref_PushSupplier (_0_CosNotifyComm._objref_NotifySubscribe, _0_CosEventComm._objref_PushSupplier):
_NP_RepositoryId = PushSupplier._NP_RepositoryId
def __init__(self, obj):
_0_CosNotifyComm._objref_NotifySubscribe.__init__(self, obj)
_0_CosEventComm._objref_PushSupplier.__init__(self, obj)
omniORB.registerObjref(PushSupplier._NP_RepositoryId, _objref_PushSupplier)
_0_CosNotifyComm._objref_PushSupplier = _objref_PushSupplier
del PushSupplier, _objref_PushSupplier
# PushSupplier skeleton
__name__ = "CosNotifyComm__POA"
class PushSupplier (_0_CosNotifyComm__POA.NotifySubscribe, _0_CosEventComm__POA.PushSupplier):
_NP_RepositoryId = _0_CosNotifyComm.PushSupplier._NP_RepositoryId
_omni_op_d = {}
_omni_op_d.update(_0_CosNotifyComm__POA.NotifySubscribe._omni_op_d)
_omni_op_d.update(_0_CosEventComm__POA.PushSupplier._omni_op_d)
PushSupplier._omni_skeleton = PushSupplier
_0_CosNotifyComm__POA.PushSupplier = PushSupplier
omniORB.registerSkeleton(PushSupplier._NP_RepositoryId, PushSupplier)
del PushSupplier
__name__ = "CosNotifyComm"
# interface StructuredPushConsumer
_0_CosNotifyComm._d_StructuredPushConsumer = (omniORB.tcInternal.tv_objref, "IDL:omg.org/CosNotifyComm/StructuredPushConsumer:1.0", "StructuredPushConsumer")
omniORB.typeMapping["IDL:omg.org/CosNotifyComm/StructuredPushConsumer:1.0"] = _0_CosNotifyComm._d_StructuredPushConsumer
_0_CosNotifyComm.StructuredPushConsumer = omniORB.newEmptyClass()
class StructuredPushConsumer (_0_CosNotifyComm.NotifyPublish):
_NP_RepositoryId = _0_CosNotifyComm._d_StructuredPushConsumer[1]
def __init__(self, *args, **kw):
raise RuntimeError("Cannot construct objects of this type.")
_nil = CORBA.Object._nil
_0_CosNotifyComm.StructuredPushConsumer = StructuredPushConsumer
_0_CosNotifyComm._tc_StructuredPushConsumer = omniORB.tcInternal.createTypeCode(_0_CosNotifyComm._d_StructuredPushConsumer)
omniORB.registerType(StructuredPushConsumer._NP_RepositoryId, _0_CosNotifyComm._d_StructuredPushConsumer, _0_CosNotifyComm._tc_StructuredPushConsumer)
# StructuredPushConsumer operations and attributes
StructuredPushConsumer._d_push_structured_event = ((omniORB.typeMapping["IDL:omg.org/CosNotification/StructuredEvent:1.0"], ), (), {_0_CosEventComm.Disconnected._NP_RepositoryId: _0_CosEventComm._d_Disconnected})
StructuredPushConsumer._d_disconnect_structured_push_consumer = ((), (), None)
# StructuredPushConsumer object reference
class _objref_StructuredPushConsumer (_0_CosNotifyComm._objref_NotifyPublish):
_NP_RepositoryId = StructuredPushConsumer._NP_RepositoryId
def __init__(self, obj):
_0_CosNotifyComm._objref_NotifyPublish.__init__(self, obj)
def push_structured_event(self, *args):
return self._obj.invoke("push_structured_event", _0_CosNotifyComm.StructuredPushConsumer._d_push_structured_event, args)
def disconnect_structured_push_consumer(self, *args):
return self._obj.invoke("disconnect_structured_push_consumer", _0_CosNotifyComm.StructuredPushConsumer._d_disconnect_structured_push_consumer, args)
omniORB.registerObjref(StructuredPushConsumer._NP_RepositoryId, _objref_StructuredPushConsumer)
_0_CosNotifyComm._objref_StructuredPushConsumer = _objref_StructuredPushConsumer
del StructuredPushConsumer, _objref_StructuredPushConsumer
# StructuredPushConsumer skeleton
__name__ = "CosNotifyComm__POA"
class StructuredPushConsumer (_0_CosNotifyComm__POA.NotifyPublish):
_NP_RepositoryId = _0_CosNotifyComm.StructuredPushConsumer._NP_RepositoryId
_omni_op_d = {"push_structured_event": _0_CosNotifyComm.StructuredPushConsumer._d_push_structured_event, "disconnect_structured_push_consumer": _0_CosNotifyComm.StructuredPushConsumer._d_disconnect_structured_push_consumer}
_omni_op_d.update(_0_CosNotifyComm__POA.NotifyPublish._omni_op_d)
StructuredPushConsumer._omni_skeleton = StructuredPushConsumer
_0_CosNotifyComm__POA.StructuredPushConsumer = StructuredPushConsumer
omniORB.registerSkeleton(StructuredPushConsumer._NP_RepositoryId, StructuredPushConsumer)
del StructuredPushConsumer
__name__ = "CosNotifyComm"
# interface StructuredPullConsumer
_0_CosNotifyComm._d_StructuredPullConsumer = (omniORB.tcInternal.tv_objref, "IDL:omg.org/CosNotifyComm/StructuredPullConsumer:1.0", "StructuredPullConsumer")
omniORB.typeMapping["IDL:omg.org/CosNotifyComm/StructuredPullConsumer:1.0"] = _0_CosNotifyComm._d_StructuredPullConsumer
_0_CosNotifyComm.StructuredPullConsumer = omniORB.newEmptyClass()
class StructuredPullConsumer (_0_CosNotifyComm.NotifyPublish):
_NP_RepositoryId = _0_CosNotifyComm._d_StructuredPullConsumer[1]
def __init__(self, *args, **kw):
raise RuntimeError("Cannot construct objects of this type.")
_nil = CORBA.Object._nil
_0_CosNotifyComm.StructuredPullConsumer = StructuredPullConsumer
_0_CosNotifyComm._tc_StructuredPullConsumer = omniORB.tcInternal.createTypeCode(_0_CosNotifyComm._d_StructuredPullConsumer)
omniORB.registerType(StructuredPullConsumer._NP_RepositoryId, _0_CosNotifyComm._d_StructuredPullConsumer, _0_CosNotifyComm._tc_StructuredPullConsumer)
# StructuredPullConsumer operations and attributes
StructuredPullConsumer._d_disconnect_structured_pull_consumer = ((), (), None)
# StructuredPullConsumer object reference
class _objref_StructuredPullConsumer (_0_CosNotifyComm._objref_NotifyPublish):
_NP_RepositoryId = StructuredPullConsumer._NP_RepositoryId
def __init__(self, obj):
_0_CosNotifyComm._objref_NotifyPublish.__init__(self, obj)
def disconnect_structured_pull_consumer(self, *args):
return self._obj.invoke("disconnect_structured_pull_consumer", _0_CosNotifyComm.StructuredPullConsumer._d_disconnect_structured_pull_consumer, args)
omniORB.registerObjref(StructuredPullConsumer._NP_RepositoryId, _objref_StructuredPullConsumer)
_0_CosNotifyComm._objref_StructuredPullConsumer = _objref_StructuredPullConsumer
del StructuredPullConsumer, _objref_StructuredPullConsumer
# StructuredPullConsumer skeleton
__name__ = "CosNotifyComm__POA"
class StructuredPullConsumer (_0_CosNotifyComm__POA.NotifyPublish):
_NP_RepositoryId = _0_CosNotifyComm.StructuredPullConsumer._NP_RepositoryId
_omni_op_d = {"disconnect_structured_pull_consumer": _0_CosNotifyComm.StructuredPullConsumer._d_disconnect_structured_pull_consumer}
_omni_op_d.update(_0_CosNotifyComm__POA.NotifyPublish._omni_op_d)
StructuredPullConsumer._omni_skeleton = StructuredPullConsumer
_0_CosNotifyComm__POA.StructuredPullConsumer = StructuredPullConsumer
omniORB.registerSkeleton(StructuredPullConsumer._NP_RepositoryId, StructuredPullConsumer)
del StructuredPullConsumer
__name__ = "CosNotifyComm"
# interface StructuredPullSupplier
_0_CosNotifyComm._d_StructuredPullSupplier = (omniORB.tcInternal.tv_objref, "IDL:omg.org/CosNotifyComm/StructuredPullSupplier:1.0", "StructuredPullSupplier")
omniORB.typeMapping["IDL:omg.org/CosNotifyComm/StructuredPullSupplier:1.0"] = _0_CosNotifyComm._d_StructuredPullSupplier
_0_CosNotifyComm.StructuredPullSupplier = omniORB.newEmptyClass()
class StructuredPullSupplier (_0_CosNotifyComm.NotifySubscribe):
_NP_RepositoryId = _0_CosNotifyComm._d_StructuredPullSupplier[1]
def __init__(self, *args, **kw):
raise RuntimeError("Cannot construct objects of this type.")
_nil = CORBA.Object._nil
_0_CosNotifyComm.StructuredPullSupplier = StructuredPullSupplier
_0_CosNotifyComm._tc_StructuredPullSupplier = omniORB.tcInternal.createTypeCode(_0_CosNotifyComm._d_StructuredPullSupplier)
omniORB.registerType(StructuredPullSupplier._NP_RepositoryId, _0_CosNotifyComm._d_StructuredPullSupplier, _0_CosNotifyComm._tc_StructuredPullSupplier)
# StructuredPullSupplier operations and attributes
StructuredPullSupplier._d_pull_structured_event = ((), (omniORB.typeMapping["IDL:omg.org/CosNotification/StructuredEvent:1.0"], ), {_0_CosEventComm.Disconnected._NP_RepositoryId: _0_CosEventComm._d_Disconnected})
StructuredPullSupplier._d_try_pull_structured_event = ((), (omniORB.typeMapping["IDL:omg.org/CosNotification/StructuredEvent:1.0"], omniORB.tcInternal.tv_boolean), {_0_CosEventComm.Disconnected._NP_RepositoryId: _0_CosEventComm._d_Disconnected})
StructuredPullSupplier._d_disconnect_structured_pull_supplier = ((), (), None)
# StructuredPullSupplier object reference
class _objref_StructuredPullSupplier (_0_CosNotifyComm._objref_NotifySubscribe):
_NP_RepositoryId = StructuredPullSupplier._NP_RepositoryId
def __init__(self, obj):
_0_CosNotifyComm._objref_NotifySubscribe.__init__(self, obj)
def pull_structured_event(self, *args):
return self._obj.invoke("pull_structured_event", _0_CosNotifyComm.StructuredPullSupplier._d_pull_structured_event, args)
def try_pull_structured_event(self, *args):
return self._obj.invoke("try_pull_structured_event", _0_CosNotifyComm.StructuredPullSupplier._d_try_pull_structured_event, args)
def disconnect_structured_pull_supplier(self, *args):
return self._obj.invoke("disconnect_structured_pull_supplier", _0_CosNotifyComm.StructuredPullSupplier._d_disconnect_structured_pull_supplier, args)
omniORB.registerObjref(StructuredPullSupplier._NP_RepositoryId, _objref_StructuredPullSupplier)
_0_CosNotifyComm._objref_StructuredPullSupplier = _objref_StructuredPullSupplier
del StructuredPullSupplier, _objref_StructuredPullSupplier
# StructuredPullSupplier skeleton
__name__ = "CosNotifyComm__POA"
class StructuredPullSupplier (_0_CosNotifyComm__POA.NotifySubscribe):
_NP_RepositoryId = _0_CosNotifyComm.StructuredPullSupplier._NP_RepositoryId
_omni_op_d = {"pull_structured_event": _0_CosNotifyComm.StructuredPullSupplier._d_pull_structured_event, "try_pull_structured_event": _0_CosNotifyComm.StructuredPullSupplier._d_try_pull_structured_event, "disconnect_structured_pull_supplier": _0_CosNotifyComm.StructuredPullSupplier._d_disconnect_structured_pull_supplier}
_omni_op_d.update(_0_CosNotifyComm__POA.NotifySubscribe._omni_op_d)
StructuredPullSupplier._omni_skeleton = StructuredPullSupplier
_0_CosNotifyComm__POA.StructuredPullSupplier = StructuredPullSupplier
omniORB.registerSkeleton(StructuredPullSupplier._NP_RepositoryId, StructuredPullSupplier)
del StructuredPullSupplier
__name__ = "CosNotifyComm"
# interface StructuredPushSupplier
_0_CosNotifyComm._d_StructuredPushSupplier = (omniORB.tcInternal.tv_objref, "IDL:omg.org/CosNotifyComm/StructuredPushSupplier:1.0", "StructuredPushSupplier")
omniORB.typeMapping["IDL:omg.org/CosNotifyComm/StructuredPushSupplier:1.0"] = _0_CosNotifyComm._d_StructuredPushSupplier
_0_CosNotifyComm.StructuredPushSupplier = omniORB.newEmptyClass()
class StructuredPushSupplier (_0_CosNotifyComm.NotifySubscribe):
_NP_RepositoryId = _0_CosNotifyComm._d_StructuredPushSupplier[1]
def __init__(self, *args, **kw):
raise RuntimeError("Cannot construct objects of this type.")
_nil = CORBA.Object._nil
_0_CosNotifyComm.StructuredPushSupplier = StructuredPushSupplier
_0_CosNotifyComm._tc_StructuredPushSupplier = omniORB.tcInternal.createTypeCode(_0_CosNotifyComm._d_StructuredPushSupplier)
omniORB.registerType(StructuredPushSupplier._NP_RepositoryId, _0_CosNotifyComm._d_StructuredPushSupplier, _0_CosNotifyComm._tc_StructuredPushSupplier)
# StructuredPushSupplier operations and attributes
StructuredPushSupplier._d_disconnect_structured_push_supplier = ((), (), None)
# StructuredPushSupplier object reference
class _objref_StructuredPushSupplier (_0_CosNotifyComm._objref_NotifySubscribe):
_NP_RepositoryId = StructuredPushSupplier._NP_RepositoryId
def __init__(self, obj):
_0_CosNotifyComm._objref_NotifySubscribe.__init__(self, obj)
def disconnect_structured_push_supplier(self, *args):
return self._obj.invoke("disconnect_structured_push_supplier", _0_CosNotifyComm.StructuredPushSupplier._d_disconnect_structured_push_supplier, args)
omniORB.registerObjref(StructuredPushSupplier._NP_RepositoryId, _objref_StructuredPushSupplier)
_0_CosNotifyComm._objref_StructuredPushSupplier = _objref_StructuredPushSupplier
del StructuredPushSupplier, _objref_StructuredPushSupplier
# StructuredPushSupplier skeleton
__name__ = "CosNotifyComm__POA"
class StructuredPushSupplier (_0_CosNotifyComm__POA.NotifySubscribe):
_NP_RepositoryId = _0_CosNotifyComm.StructuredPushSupplier._NP_RepositoryId
_omni_op_d = {"disconnect_structured_push_supplier": _0_CosNotifyComm.StructuredPushSupplier._d_disconnect_structured_push_supplier}
_omni_op_d.update(_0_CosNotifyComm__POA.NotifySubscribe._omni_op_d)
StructuredPushSupplier._omni_skeleton = StructuredPushSupplier
_0_CosNotifyComm__POA.StructuredPushSupplier = StructuredPushSupplier
omniORB.registerSkeleton(StructuredPushSupplier._NP_RepositoryId, StructuredPushSupplier)
del StructuredPushSupplier
__name__ = "CosNotifyComm"
# interface SequencePushConsumer
_0_CosNotifyComm._d_SequencePushConsumer = (omniORB.tcInternal.tv_objref, "IDL:omg.org/CosNotifyComm/SequencePushConsumer:1.0", "SequencePushConsumer")
omniORB.typeMapping["IDL:omg.org/CosNotifyComm/SequencePushConsumer:1.0"] = _0_CosNotifyComm._d_SequencePushConsumer
_0_CosNotifyComm.SequencePushConsumer = omniORB.newEmptyClass()
class SequencePushConsumer (_0_CosNotifyComm.NotifyPublish):
_NP_RepositoryId = _0_CosNotifyComm._d_SequencePushConsumer[1]
def __init__(self, *args, **kw):
raise RuntimeError("Cannot construct objects of this type.")
_nil = CORBA.Object._nil
_0_CosNotifyComm.SequencePushConsumer = SequencePushConsumer
_0_CosNotifyComm._tc_SequencePushConsumer = omniORB.tcInternal.createTypeCode(_0_CosNotifyComm._d_SequencePushConsumer)
omniORB.registerType(SequencePushConsumer._NP_RepositoryId, _0_CosNotifyComm._d_SequencePushConsumer, _0_CosNotifyComm._tc_SequencePushConsumer)
# SequencePushConsumer operations and attributes
SequencePushConsumer._d_push_structured_events = ((omniORB.typeMapping["IDL:omg.org/CosNotification/EventBatch:1.0"], ), (), {_0_CosEventComm.Disconnected._NP_RepositoryId: _0_CosEventComm._d_Disconnected})
SequencePushConsumer._d_disconnect_sequence_push_consumer = ((), (), None)
# SequencePushConsumer object reference
class _objref_SequencePushConsumer (_0_CosNotifyComm._objref_NotifyPublish):
_NP_RepositoryId = SequencePushConsumer._NP_RepositoryId
def __init__(self, obj):
_0_CosNotifyComm._objref_NotifyPublish.__init__(self, obj)
def push_structured_events(self, *args):
return self._obj.invoke("push_structured_events", _0_CosNotifyComm.SequencePushConsumer._d_push_structured_events, args)
def disconnect_sequence_push_consumer(self, *args):
return self._obj.invoke("disconnect_sequence_push_consumer", _0_CosNotifyComm.SequencePushConsumer._d_disconnect_sequence_push_consumer, args)
omniORB.registerObjref(SequencePushConsumer._NP_RepositoryId, _objref_SequencePushConsumer)
_0_CosNotifyComm._objref_SequencePushConsumer = _objref_SequencePushConsumer
del SequencePushConsumer, _objref_SequencePushConsumer
# SequencePushConsumer skeleton
__name__ = "CosNotifyComm__POA"
class SequencePushConsumer (_0_CosNotifyComm__POA.NotifyPublish):
_NP_RepositoryId = _0_CosNotifyComm.SequencePushConsumer._NP_RepositoryId
_omni_op_d = {"push_structured_events": _0_CosNotifyComm.SequencePushConsumer._d_push_structured_events, "disconnect_sequence_push_consumer": _0_CosNotifyComm.SequencePushConsumer._d_disconnect_sequence_push_consumer}
_omni_op_d.update(_0_CosNotifyComm__POA.NotifyPublish._omni_op_d)
SequencePushConsumer._omni_skeleton = SequencePushConsumer
_0_CosNotifyComm__POA.SequencePushConsumer = SequencePushConsumer
omniORB.registerSkeleton(SequencePushConsumer._NP_RepositoryId, SequencePushConsumer)
del SequencePushConsumer
__name__ = "CosNotifyComm"
# interface SequencePullConsumer
_0_CosNotifyComm._d_SequencePullConsumer = (omniORB.tcInternal.tv_objref, "IDL:omg.org/CosNotifyComm/SequencePullConsumer:1.0", "SequencePullConsumer")
omniORB.typeMapping["IDL:omg.org/CosNotifyComm/SequencePullConsumer:1.0"] = _0_CosNotifyComm._d_SequencePullConsumer
_0_CosNotifyComm.SequencePullConsumer = omniORB.newEmptyClass()
class SequencePullConsumer (_0_CosNotifyComm.NotifyPublish):
_NP_RepositoryId = _0_CosNotifyComm._d_SequencePullConsumer[1]
def __init__(self, *args, **kw):
raise RuntimeError("Cannot construct objects of this type.")
_nil = CORBA.Object._nil
_0_CosNotifyComm.SequencePullConsumer = SequencePullConsumer
_0_CosNotifyComm._tc_SequencePullConsumer = omniORB.tcInternal.createTypeCode(_0_CosNotifyComm._d_SequencePullConsumer)
omniORB.registerType(SequencePullConsumer._NP_RepositoryId, _0_CosNotifyComm._d_SequencePullConsumer, _0_CosNotifyComm._tc_SequencePullConsumer)
# SequencePullConsumer operations and attributes
SequencePullConsumer._d_disconnect_sequence_pull_consumer = ((), (), None)
# SequencePullConsumer object reference
class _objref_SequencePullConsumer (_0_CosNotifyComm._objref_NotifyPublish):
_NP_RepositoryId = SequencePullConsumer._NP_RepositoryId
def __init__(self, obj):
_0_CosNotifyComm._objref_NotifyPublish.__init__(self, obj)
def disconnect_sequence_pull_consumer(self, *args):
return self._obj.invoke("disconnect_sequence_pull_consumer", _0_CosNotifyComm.SequencePullConsumer._d_disconnect_sequence_pull_consumer, args)
omniORB.registerObjref(SequencePullConsumer._NP_RepositoryId, _objref_SequencePullConsumer)
_0_CosNotifyComm._objref_SequencePullConsumer = _objref_SequencePullConsumer
del SequencePullConsumer, _objref_SequencePullConsumer
# SequencePullConsumer skeleton
__name__ = "CosNotifyComm__POA"
class SequencePullConsumer (_0_CosNotifyComm__POA.NotifyPublish):
_NP_RepositoryId = _0_CosNotifyComm.SequencePullConsumer._NP_RepositoryId
_omni_op_d = {"disconnect_sequence_pull_consumer": _0_CosNotifyComm.SequencePullConsumer._d_disconnect_sequence_pull_consumer}
_omni_op_d.update(_0_CosNotifyComm__POA.NotifyPublish._omni_op_d)
SequencePullConsumer._omni_skeleton = SequencePullConsumer
_0_CosNotifyComm__POA.SequencePullConsumer = SequencePullConsumer
omniORB.registerSkeleton(SequencePullConsumer._NP_RepositoryId, SequencePullConsumer)
del SequencePullConsumer
__name__ = "CosNotifyComm"
# interface SequencePullSupplier
_0_CosNotifyComm._d_SequencePullSupplier = (omniORB.tcInternal.tv_objref, "IDL:omg.org/CosNotifyComm/SequencePullSupplier:1.0", "SequencePullSupplier")
omniORB.typeMapping["IDL:omg.org/CosNotifyComm/SequencePullSupplier:1.0"] = _0_CosNotifyComm._d_SequencePullSupplier
_0_CosNotifyComm.SequencePullSupplier = omniORB.newEmptyClass()
class SequencePullSupplier (_0_CosNotifyComm.NotifySubscribe):
_NP_RepositoryId = _0_CosNotifyComm._d_SequencePullSupplier[1]
def __init__(self, *args, **kw):
raise RuntimeError("Cannot construct objects of this type.")
_nil = CORBA.Object._nil
_0_CosNotifyComm.SequencePullSupplier = SequencePullSupplier
_0_CosNotifyComm._tc_SequencePullSupplier = omniORB.tcInternal.createTypeCode(_0_CosNotifyComm._d_SequencePullSupplier)
omniORB.registerType(SequencePullSupplier._NP_RepositoryId, _0_CosNotifyComm._d_SequencePullSupplier, _0_CosNotifyComm._tc_SequencePullSupplier)
# SequencePullSupplier operations and attributes
SequencePullSupplier._d_pull_structured_events = ((omniORB.tcInternal.tv_long, ), (omniORB.typeMapping["IDL:omg.org/CosNotification/EventBatch:1.0"], ), {_0_CosEventComm.Disconnected._NP_RepositoryId: _0_CosEventComm._d_Disconnected})
SequencePullSupplier._d_try_pull_structured_events = ((omniORB.tcInternal.tv_long, ), (omniORB.typeMapping["IDL:omg.org/CosNotification/EventBatch:1.0"], omniORB.tcInternal.tv_boolean), {_0_CosEventComm.Disconnected._NP_RepositoryId: _0_CosEventComm._d_Disconnected})
SequencePullSupplier._d_disconnect_sequence_pull_supplier = ((), (), None)
# SequencePullSupplier object reference
class _objref_SequencePullSupplier (_0_CosNotifyComm._objref_NotifySubscribe):
_NP_RepositoryId = SequencePullSupplier._NP_RepositoryId
def __init__(self, obj):
_0_CosNotifyComm._objref_NotifySubscribe.__init__(self, obj)
def pull_structured_events(self, *args):
return self._obj.invoke("pull_structured_events", _0_CosNotifyComm.SequencePullSupplier._d_pull_structured_events, args)
def try_pull_structured_events(self, *args):
return self._obj.invoke("try_pull_structured_events", _0_CosNotifyComm.SequencePullSupplier._d_try_pull_structured_events, args)
def disconnect_sequence_pull_supplier(self, *args):
return self._obj.invoke("disconnect_sequence_pull_supplier", _0_CosNotifyComm.SequencePullSupplier._d_disconnect_sequence_pull_supplier, args)
omniORB.registerObjref(SequencePullSupplier._NP_RepositoryId, _objref_SequencePullSupplier)
_0_CosNotifyComm._objref_SequencePullSupplier = _objref_SequencePullSupplier
del SequencePullSupplier, _objref_SequencePullSupplier
# SequencePullSupplier skeleton
__name__ = "CosNotifyComm__POA"
class SequencePullSupplier (_0_CosNotifyComm__POA.NotifySubscribe):
_NP_RepositoryId = _0_CosNotifyComm.SequencePullSupplier._NP_RepositoryId
_omni_op_d = {"pull_structured_events": _0_CosNotifyComm.SequencePullSupplier._d_pull_structured_events, "try_pull_structured_events": _0_CosNotifyComm.SequencePullSupplier._d_try_pull_structured_events, "disconnect_sequence_pull_supplier": _0_CosNotifyComm.SequencePullSupplier._d_disconnect_sequence_pull_supplier}
_omni_op_d.update(_0_CosNotifyComm__POA.NotifySubscribe._omni_op_d)
SequencePullSupplier._omni_skeleton = SequencePullSupplier
_0_CosNotifyComm__POA.SequencePullSupplier = SequencePullSupplier
omniORB.registerSkeleton(SequencePullSupplier._NP_RepositoryId, SequencePullSupplier)
del SequencePullSupplier
__name__ = "CosNotifyComm"
# interface SequencePushSupplier
_0_CosNotifyComm._d_SequencePushSupplier = (omniORB.tcInternal.tv_objref, "IDL:omg.org/CosNotifyComm/SequencePushSupplier:1.0", "SequencePushSupplier")
omniORB.typeMapping["IDL:omg.org/CosNotifyComm/SequencePushSupplier:1.0"] = _0_CosNotifyComm._d_SequencePushSupplier
_0_CosNotifyComm.SequencePushSupplier = omniORB.newEmptyClass()
class SequencePushSupplier (_0_CosNotifyComm.NotifySubscribe):
_NP_RepositoryId = _0_CosNotifyComm._d_SequencePushSupplier[1]
def __init__(self, *args, **kw):
raise RuntimeError("Cannot construct objects of this type.")
_nil = CORBA.Object._nil
_0_CosNotifyComm.SequencePushSupplier = SequencePushSupplier
_0_CosNotifyComm._tc_SequencePushSupplier = omniORB.tcInternal.createTypeCode(_0_CosNotifyComm._d_SequencePushSupplier)
omniORB.registerType(SequencePushSupplier._NP_RepositoryId, _0_CosNotifyComm._d_SequencePushSupplier, _0_CosNotifyComm._tc_SequencePushSupplier)
# SequencePushSupplier operations and attributes
SequencePushSupplier._d_disconnect_sequence_push_supplier = ((), (), None)
# SequencePushSupplier object reference
class _objref_SequencePushSupplier (_0_CosNotifyComm._objref_NotifySubscribe):
_NP_RepositoryId = SequencePushSupplier._NP_RepositoryId
def __init__(self, obj):
_0_CosNotifyComm._objref_NotifySubscribe.__init__(self, obj)
def disconnect_sequence_push_supplier(self, *args):
return self._obj.invoke("disconnect_sequence_push_supplier", _0_CosNotifyComm.SequencePushSupplier._d_disconnect_sequence_push_supplier, args)
omniORB.registerObjref(SequencePushSupplier._NP_RepositoryId, _objref_SequencePushSupplier)
_0_CosNotifyComm._objref_SequencePushSupplier = _objref_SequencePushSupplier
del SequencePushSupplier, _objref_SequencePushSupplier
# SequencePushSupplier skeleton
__name__ = "CosNotifyComm__POA"
class SequencePushSupplier (_0_CosNotifyComm__POA.NotifySubscribe):
_NP_RepositoryId = _0_CosNotifyComm.SequencePushSupplier._NP_RepositoryId
_omni_op_d = {"disconnect_sequence_push_supplier": _0_CosNotifyComm.SequencePushSupplier._d_disconnect_sequence_push_supplier}
_omni_op_d.update(_0_CosNotifyComm__POA.NotifySubscribe._omni_op_d)
SequencePushSupplier._omni_skeleton = SequencePushSupplier
_0_CosNotifyComm__POA.SequencePushSupplier = SequencePushSupplier
omniORB.registerSkeleton(SequencePushSupplier._NP_RepositoryId, SequencePushSupplier)
del SequencePushSupplier
__name__ = "CosNotifyComm"
#
# End of module "CosNotifyComm"
#
__name__ = "CosNotifyComm_idl"
_exported_modules = ( "CosNotifyComm", )
# The end.
| mit | -5,131,353,625,835,709,000 | 48.933333 | 326 | 0.786195 | false |
Invoiced/invoiced-python | invoiced/test/objects/test_payment_source.py | 1 | 3880 | import unittest
import invoiced
import responses
class TestPaymentSource(unittest.TestCase):
def setUp(self):
self.client = invoiced.Client('api_key')
@responses.activate
def test_endpoint_and_create_bank_account(self):
responses.add('POST',
'https://api.invoiced.com/customers/1234/payment_sources', # noqa
status=201,
json={"id": 123, "object": "bank_account"})
customer = invoiced.Customer(self.client, 1234)
source = customer.payment_sources().create()
# if true, endpoint on creation is correct
self.assertEqual(123, source.id)
# if true, endpoint after creation is correct
self.assertEqual("/customers/1234/bank_accounts/123",
source.endpoint())
@responses.activate
def test_endpoint_and_create_card(self):
responses.add('POST',
'https://api.invoiced.com/customers/1234/payment_sources', # noqa
status=201,
json={"id": 456, "object": "card"})
customer = invoiced.Customer(self.client, 1234)
source = customer.payment_sources().create()
# if true, endpoint on creation is correct
self.assertEqual(456, source.id)
# if true, endpoint after creation is correct
self.assertEqual("/customers/1234/cards/456", source.endpoint())
@responses.activate
def test_list(self):
responses.add('GET',
'https://api.invoiced.com/customers/1234/payment_sources', # noqa
status=200,
json=[
{"id": 123, "object": "bank_account"},
{"id": 456, "object": "card"}],
adding_headers={
'x-total-count': '15',
'link': '<https://api.invoiced.com/payment_sources?per_page=25&page=1>; rel="self", <https://api.invoiced.com/payment_sources?per_page=25&page=1>; rel="first", <https://api.invoiced.com/contacts?per_page=25&page=1>; rel="last"'}) # noqa
customer = invoiced.Customer(self.client, 1234)
sources, metadata = customer.payment_sources().list()
self.assertIsInstance(sources, list)
self.assertEqual(len(sources), 2)
self.assertEqual(sources[0].id, 123)
self.assertIsInstance(sources[0], invoiced.BankAccount)
self.assertIsInstance(sources[1], invoiced.Card)
self.assertIsInstance(metadata, invoiced.List)
self.assertEqual(metadata.total_count, 15)
@responses.activate
def test_delete_card(self):
responses.add('POST',
'https://api.invoiced.com/customers/1234/payment_sources', # noqa
status=201,
json={"id": 123, "object": "card"})
responses.add('DELETE',
'https://api.invoiced.com/customers/1234/cards/123',
status=204)
customer = invoiced.Customer(self.client, 1234)
source = customer.payment_sources().create()
self.assertIsInstance(source, invoiced.Card)
self.assertTrue(source.delete())
@responses.activate
def test_delete_bank_account(self):
responses.add('POST',
'https://api.invoiced.com/customers/1234/payment_sources', # noqa
status=201,
json={"id": 123, "object": "bank_account"})
responses.add('DELETE',
'https://api.invoiced.com/customers/1234/bank_accounts/123', # noqa
status=204)
customer = invoiced.Customer(self.client, 1234)
source = customer.payment_sources().create()
self.assertIsInstance(source, invoiced.BankAccount)
self.assertTrue(source.delete())
| mit | 4,387,637,778,830,354,400 | 39.416667 | 261 | 0.574742 | false |
cipri-tom/Swiss-on-Amazon | spark/brand_search.py | 1 | 1692 | #! /bin/python
from pyspark import SparkContext, SparkConf
import json
from pprint import pprint
import pickle
import re
#config path
brands_path = "../data/wiki_brands.txt"
brands_path2 = "../data/all_swiss_brands.pickle"
metadata_path = "hdfs:///datasets/amazon-reviews/metadata.json"
# load the list of brands
brands = []
with open(brands_path) as f:
for line in f:
line = line.rstrip('\n').lower()
brands.append(line)
with open(brands_path2, 'rb') as fp:
new_brands = pickle.load(fp)
# clean brand data
for b in new_brands:
b = b.lower()
b = re.sub(" [\(\[].*?[\)\]]", "", b)
brands.append(b)
brands = list(set(brands))
# lookup if a certain brand is swiss
def searchBrand(line):
line = line.rstrip('\n').lower()
d = eval(line)
if 'brand' in d:
if d['brand'] in brands:
return ("Swiss brand", [d])
#return (d['brand'], d['asin'])
else:
return ("No Swiss brand", 1)
else:
return ("No brand", 1)
# load spark job
conf = SparkConf().setAppName("SoA")
sc = SparkContext(conf=conf)
# load metadata file
text_file = sc.textFile(metadata_path)
print("finished loading file and brands")
# map reduce ->
# for each product lookup if it is swiss, keeps brand:productkey (map)
# group products of the same brand, keeps brand:[productkeys] (reduce)
counts = text_file \
.map(searchBrand) \
.reduceByKey(lambda a, b: a + b)
products = counts.collect()
print("finished map reduce")
#print(products)
# create json file containing only swiss products
f = open('../data/swiss_products.json','w')
products = dict(products)
for product in products['Swiss brand']:
f.write(str(product) + '\n')
f.close()
print("finished writing file") | gpl-3.0 | -4,317,103,139,602,162,700 | 22.191781 | 70 | 0.670804 | false |
bitmingw/FindYourSister | tools/gen_misc.py | 1 | 3382 | #!/usr/bin/env python
"""
Copyright (c) 2015, Ming Wen
This program saves size of images into a json file.
Usage: ./gen_misc.py (IMAGE_FILE | IMAGE_DIR)
[train|validate|test] [JSON_FILE]
The program reads one image file "IMAGE_FILE", or reads some
image files from "IMAGE_DIR". Then it generates a json format
string containing image folder, name, group and size.
Group name can be "train", "validate" and "test". The default
group name is "train".
If "JSON_FILE" is given, the json string will be written into
the file (old content will be truncated), otherwise the json string
will be displayed in the terminal.
"""
from __future__ import print_function
from __future__ import unicode_literals
import sys
from os.path import abspath, isfile, isdir, join, split, splitext
from os import listdir
from PIL import Image
import json
# Global variables
IMAGE_TYPES = set([".jpg", ".jpeg", ".png", ".bmp", ".tif", ".tiff"])
GROUP_TYPES = set(["train", "validate", "validation", "test"])
def main():
if len(sys.argv) < 2:
raise IOError("More arguments are required.")
if not (isdir(sys.argv[1]) or isfile(sys.argv[1])):
raise IOError("Given path is not a file or directory.")
if len(sys.argv) == 2:
print("Warning: label as default group 'train'.")
group = "train"
jsonPath = None
if len(sys.argv) == 3:
if sys.argv[2] not in GROUP_TYPES:
print("Warning: label as default group 'train'.")
group = "train"
jsonPath = sys.argv[2]
else:
group = sys.argv[2]
jsonPath = None
elif len(sys.argv) >= 4:
if (sys.argv[2]) not in GROUP_TYPES:
raise ValueError("Image group type not supported.")
else:
group = sys.argv[2]
jsonPath = sys.argv[3]
imagePath = sys.argv[1]
rootNode = setupJsonRoot(group)
# Case isfile()
if isfile(imagePath):
addImage(rootNode, imagePath)
# Case isdir()
elif isdir(imagePath):
files = listdir(imagePath)
for eachFile in files:
addImage(rootNode, join(imagePath, eachFile))
# Display or Save
s = json.JSONEncoder(indent = 4).encode(rootNode)
if jsonPath != None:
outfile = open(jsonPath, "w")
outfile.write(s)
else:
print(s)
def setupJsonRoot(groupType):
root = {}
if groupType == "train":
root[u"train"] = []
elif groupType == "validate" or groupType == "validation":
root[u"validate"] = []
elif groupType == "test":
root[u"test"] = []
return root
def addImage(root, path):
"""
Read an image file, and add an entry in the json structure.
"""
_, extension = splitext(path)
if extension in IMAGE_TYPES:
img = Image.open(path)
imgSize = img.size
if u"train" in root.keys():
li = root[u"train"]
elif u"validate" in root.keys():
li = root[u"validate"]
elif u"test" in root.keys():
li = root[u"test"]
node = {}
node[u"folder"] = split(abspath(path))[0]
node[u"filename"] = split(abspath(path))[1]
node[u"size"] = {}
node[u"size"][u"nrows"] = imgSize[1] # height
node[u"size"][u"ncols"] = imgSize[0] # width
li.append(node)
if __name__ == "__main__":
main()
| bsd-2-clause | -2,822,098,611,643,585,000 | 26.721311 | 69 | 0.592253 | false |
tskisner/pytoast | src/python/tests/dist.py | 1 | 5208 | # Copyright (c) 2015-2018 by the parties listed in the AUTHORS file.
# All rights reserved. Use of this source code is governed by
# a BSD-style license that can be found in the LICENSE file.
from ..mpi import MPI
from .mpi import MPITestCase
from ..dist import *
import numpy as np
import sys
import os
from ._helpers import (create_outdir, create_distdata)
class DataTest(MPITestCase):
def setUp(self):
fixture_name = os.path.splitext(os.path.basename(__file__))[0]
self.outdir = create_outdir(self.comm, fixture_name)
# Create one observation per group.
self.data = create_distdata(self.comm, obs_per_group=1)
self.ntask = 24
self.sizes1 = [
29218,
430879,
43684,
430338,
36289,
437553,
37461,
436200,
41249,
432593,
42467,
431195,
35387,
438274,
36740,
436741,
40663,
432999,
42015,
431285,
35297,
438004,
37010,
436291,
41114,
432186,
42828,
430293,
36243,
436697,
38318,
434802,
42602,
430338,
44676,
428264,
38273,
434306,
40708,
432051,
45308,
427452,
36695,
435884,
41520,
430879,
44090,
428309,
38273,
434126,
40843,
431375
]
self.totsamp1 = np.sum(self.sizes1)
self.sizes2 = [ (int(3600*169.7)) for i in range(8640) ]
self.totsamp2 = np.sum(self.sizes2)
def test_construction(self):
dist_uni1 = distribute_uniform(self.totsamp1, self.ntask)
# with open("test_uni_{}".format(self.comm.rank), "w") as f:
# for d in dist_uni:
# f.write("uniform: {} {}\n".format(d[0], d[1]))
n1 = np.sum(np.array(dist_uni1)[:,1])
assert(n1 == self.totsamp1)
n = self.totsamp1
breaks = [n//2+1000, n//4-1000000, n//2+1000, (3*n)//4]
dist_uni2 = distribute_uniform(self.totsamp1, self.ntask,
breaks=breaks)
n2 = np.sum(np.array(dist_uni2)[:,1])
assert(n2 == self.totsamp1)
for offset, nsamp in dist_uni2:
for brk in breaks:
if brk > offset and brk < offset+nsamp:
raise Exception(
"Uniform data distribution did not honor the breaks")
dist_disc1 = distribute_discrete(self.sizes1, self.ntask)
# with open("test_disc_{}".format(self.comm.rank), "w") as f:
# for d in dist_disc:
# f.write("discrete: {} {}\n".format(d[0], d[1]))
n = np.sum(np.array(dist_disc1)[:,1])
assert(n == len(self.sizes1))
n = len(self.sizes1)
breaks = [n//2, n//4, n//2, (3*n)//4]
dist_disc2 = distribute_discrete(self.sizes1, self.ntask,
breaks=breaks)
n = np.sum(np.array(dist_disc2)[:,1])
assert(n == len(self.sizes1))
for offset, nchunk in dist_disc2:
for brk in breaks:
if brk > offset and brk < offset+nchunk:
raise Exception(
"Discrete data distribution did not honor the breaks")
handle = None
if self.comm.rank == 0:
handle = open(os.path.join(self.outdir,"out_test_construct_info"),
"w")
self.data.info(handle)
if self.comm.rank == 0:
handle.close()
dist_disc3 = distribute_discrete(self.sizes2, 384)
if self.comm.rank == 0:
with open(os.path.join(self.outdir,"dist_discrete_8640x384.txt"), "w") as f:
indx = 0
for d in dist_disc3:
f.write("{:04d} = ({}, {})\n".format(indx, d[0], d[1]))
indx += 1
return
def test_split(self):
data = Data(self.data.comm)
data.obs.append({"site":"Atacama", "season":1})
data.obs.append({"site":"Atacama", "season":2})
data.obs.append({"site":"Atacama", "season":3})
data.obs.append({"site":"Pole", "season":1})
data.obs.append({"site":"Pole", "season":2})
data.obs.append({"site":"Pole", "season":3})
datasplit_site = data.split("site")
datasplit_season = data.split("season")
nt.assert_equal(len(datasplit_site), 2)
nt.assert_equal(len(datasplit_season), 3)
# Verify that the observations are shared
sum1 = 0
for value, site_data in datasplit_site:
for obs in site_data.obs:
assert("var1" not in obs)
obs["var1"] = 1
sum1 += 1
sum2 = 0
for value, season_data in datasplit_season:
for obs in season_data.obs:
sum2 += obs["var1"]
nt.assert_equal(sum1, sum2)
return
| bsd-2-clause | 3,222,813,290,255,280,000 | 27.773481 | 88 | 0.49424 | false |
MrSnede/BalancingWheelRobot | mainWindow.py | 1 | 19684 | # -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'ui/mainWindow.ui'
#
# Created: Sat Nov 29 18:47:56 2014
# by: PyQt4 UI code generator 4.10.4
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
def _fromUtf8(s):
return s
try:
_encoding = QtGui.QApplication.UnicodeUTF8
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig, _encoding)
except AttributeError:
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig)
class Ui_MainWindow(object):
def setupUi(self, MainWindow):
MainWindow.setObjectName(_fromUtf8("MainWindow"))
MainWindow.resize(1237, 745)
self.centralwidget = QtGui.QWidget(MainWindow)
self.centralwidget.setObjectName(_fromUtf8("centralwidget"))
self.verticalLayout_3 = QtGui.QVBoxLayout(self.centralwidget)
self.verticalLayout_3.setObjectName(_fromUtf8("verticalLayout_3"))
self.horizontalLayout_2 = QtGui.QHBoxLayout()
self.horizontalLayout_2.setObjectName(_fromUtf8("horizontalLayout_2"))
self.Serial_groupBox = QtGui.QGroupBox(self.centralwidget)
self.Serial_groupBox.setObjectName(_fromUtf8("Serial_groupBox"))
self.verticalLayout = QtGui.QVBoxLayout(self.Serial_groupBox)
self.verticalLayout.setObjectName(_fromUtf8("verticalLayout"))
self.HLayOutSerialSettings = QtGui.QHBoxLayout()
self.HLayOutSerialSettings.setObjectName(_fromUtf8("HLayOutSerialSettings"))
self.portsComboBox = QtGui.QComboBox(self.Serial_groupBox)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Preferred, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.portsComboBox.sizePolicy().hasHeightForWidth())
self.portsComboBox.setSizePolicy(sizePolicy)
self.portsComboBox.setMinimumSize(QtCore.QSize(100, 27))
self.portsComboBox.setObjectName(_fromUtf8("portsComboBox"))
self.HLayOutSerialSettings.addWidget(self.portsComboBox)
self.refreshPortsPushButton = QtGui.QPushButton(self.Serial_groupBox)
self.refreshPortsPushButton.setEnabled(True)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Minimum)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.refreshPortsPushButton.sizePolicy().hasHeightForWidth())
self.refreshPortsPushButton.setSizePolicy(sizePolicy)
self.refreshPortsPushButton.setMinimumSize(QtCore.QSize(38, 27))
self.refreshPortsPushButton.setMaximumSize(QtCore.QSize(38, 27))
self.refreshPortsPushButton.setText(_fromUtf8(""))
icon = QtGui.QIcon()
icon.addPixmap(QtGui.QPixmap(_fromUtf8(":/icons/icons/refresh.png")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.refreshPortsPushButton.setIcon(icon)
self.refreshPortsPushButton.setIconSize(QtCore.QSize(16, 16))
self.refreshPortsPushButton.setObjectName(_fromUtf8("refreshPortsPushButton"))
self.HLayOutSerialSettings.addWidget(self.refreshPortsPushButton)
self.baudRateComboBox = QtGui.QComboBox(self.Serial_groupBox)
self.baudRateComboBox.setMinimumSize(QtCore.QSize(91, 27))
self.baudRateComboBox.setObjectName(_fromUtf8("baudRateComboBox"))
self.HLayOutSerialSettings.addWidget(self.baudRateComboBox)
self.connectPushButton = QtGui.QPushButton(self.Serial_groupBox)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Preferred, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.connectPushButton.sizePolicy().hasHeightForWidth())
self.connectPushButton.setSizePolicy(sizePolicy)
self.connectPushButton.setMinimumSize(QtCore.QSize(91, 27))
self.connectPushButton.setObjectName(_fromUtf8("connectPushButton"))
self.HLayOutSerialSettings.addWidget(self.connectPushButton)
self.disconnectPushButton = QtGui.QPushButton(self.Serial_groupBox)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Preferred, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.disconnectPushButton.sizePolicy().hasHeightForWidth())
self.disconnectPushButton.setSizePolicy(sizePolicy)
self.disconnectPushButton.setMinimumSize(QtCore.QSize(91, 27))
self.disconnectPushButton.setObjectName(_fromUtf8("disconnectPushButton"))
self.HLayOutSerialSettings.addWidget(self.disconnectPushButton)
self.verticalLayout.addLayout(self.HLayOutSerialSettings)
self.logPlainTextEdit = QtGui.QPlainTextEdit(self.Serial_groupBox)
self.logPlainTextEdit.setMinimumSize(QtCore.QSize(270, 200))
font = QtGui.QFont()
font.setFamily(_fromUtf8("Courier New"))
self.logPlainTextEdit.setFont(font)
self.logPlainTextEdit.setReadOnly(True)
self.logPlainTextEdit.setObjectName(_fromUtf8("logPlainTextEdit"))
self.verticalLayout.addWidget(self.logPlainTextEdit)
self.cmdLineEdit = QtGui.QLineEdit(self.Serial_groupBox)
self.cmdLineEdit.setMinimumSize(QtCore.QSize(0, 27))
font = QtGui.QFont()
font.setFamily(_fromUtf8("Courier New"))
self.cmdLineEdit.setFont(font)
self.cmdLineEdit.setDragEnabled(True)
self.cmdLineEdit.setPlaceholderText(_fromUtf8(""))
self.cmdLineEdit.setObjectName(_fromUtf8("cmdLineEdit"))
self.verticalLayout.addWidget(self.cmdLineEdit)
self.horizontalLayout_2.addWidget(self.Serial_groupBox)
self.verticalLayout_2 = QtGui.QVBoxLayout()
self.verticalLayout_2.setObjectName(_fromUtf8("verticalLayout_2"))
self.PID_groupBox_2 = QtGui.QGroupBox(self.centralwidget)
self.PID_groupBox_2.setObjectName(_fromUtf8("PID_groupBox_2"))
self.gridLayout = QtGui.QGridLayout(self.PID_groupBox_2)
self.gridLayout.setObjectName(_fromUtf8("gridLayout"))
self.D_Label = QtGui.QLabel(self.PID_groupBox_2)
self.D_Label.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.D_Label.setObjectName(_fromUtf8("D_Label"))
self.gridLayout.addWidget(self.D_Label, 2, 0, 1, 1)
self.I_Label = QtGui.QLabel(self.PID_groupBox_2)
self.I_Label.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.I_Label.setObjectName(_fromUtf8("I_Label"))
self.gridLayout.addWidget(self.I_Label, 1, 0, 1, 1)
self.P_ValueLabel = QtGui.QLabel(self.PID_groupBox_2)
self.P_ValueLabel.setMinimumSize(QtCore.QSize(50, 0))
self.P_ValueLabel.setText(_fromUtf8(""))
self.P_ValueLabel.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.P_ValueLabel.setObjectName(_fromUtf8("P_ValueLabel"))
self.gridLayout.addWidget(self.P_ValueLabel, 0, 1, 1, 1)
self.I_ValueLabel = QtGui.QLabel(self.PID_groupBox_2)
self.I_ValueLabel.setMinimumSize(QtCore.QSize(50, 0))
self.I_ValueLabel.setText(_fromUtf8(""))
self.I_ValueLabel.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.I_ValueLabel.setObjectName(_fromUtf8("I_ValueLabel"))
self.gridLayout.addWidget(self.I_ValueLabel, 1, 1, 1, 1)
self.P_HSlider = QtGui.QSlider(self.PID_groupBox_2)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Maximum)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.P_HSlider.sizePolicy().hasHeightForWidth())
self.P_HSlider.setSizePolicy(sizePolicy)
self.P_HSlider.setMaximum(3000)
self.P_HSlider.setSingleStep(100)
self.P_HSlider.setPageStep(300)
self.P_HSlider.setProperty("value", 2000)
self.P_HSlider.setOrientation(QtCore.Qt.Horizontal)
self.P_HSlider.setTickPosition(QtGui.QSlider.TicksBelow)
self.P_HSlider.setTickInterval(100)
self.P_HSlider.setObjectName(_fromUtf8("P_HSlider"))
self.gridLayout.addWidget(self.P_HSlider, 0, 2, 1, 1)
self.D_ValueLabel = QtGui.QLabel(self.PID_groupBox_2)
self.D_ValueLabel.setMinimumSize(QtCore.QSize(50, 0))
self.D_ValueLabel.setText(_fromUtf8(""))
self.D_ValueLabel.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.D_ValueLabel.setObjectName(_fromUtf8("D_ValueLabel"))
self.gridLayout.addWidget(self.D_ValueLabel, 2, 1, 1, 1)
self.P_Label = QtGui.QLabel(self.PID_groupBox_2)
self.P_Label.setMinimumSize(QtCore.QSize(20, 0))
self.P_Label.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.P_Label.setObjectName(_fromUtf8("P_Label"))
self.gridLayout.addWidget(self.P_Label, 0, 0, 1, 1)
self.D_HSlider = QtGui.QSlider(self.PID_groupBox_2)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Maximum)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.D_HSlider.sizePolicy().hasHeightForWidth())
self.D_HSlider.setSizePolicy(sizePolicy)
self.D_HSlider.setMaximum(3000)
self.D_HSlider.setSingleStep(100)
self.D_HSlider.setPageStep(300)
self.D_HSlider.setProperty("value", 2000)
self.D_HSlider.setOrientation(QtCore.Qt.Horizontal)
self.D_HSlider.setTickPosition(QtGui.QSlider.TicksBelow)
self.D_HSlider.setTickInterval(100)
self.D_HSlider.setObjectName(_fromUtf8("D_HSlider"))
self.gridLayout.addWidget(self.D_HSlider, 2, 2, 1, 1)
self.I_HSlider = QtGui.QSlider(self.PID_groupBox_2)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Maximum)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.I_HSlider.sizePolicy().hasHeightForWidth())
self.I_HSlider.setSizePolicy(sizePolicy)
self.I_HSlider.setMaximum(3000)
self.I_HSlider.setSingleStep(100)
self.I_HSlider.setPageStep(300)
self.I_HSlider.setProperty("value", 2000)
self.I_HSlider.setOrientation(QtCore.Qt.Horizontal)
self.I_HSlider.setTickPosition(QtGui.QSlider.TicksBelow)
self.I_HSlider.setTickInterval(100)
self.I_HSlider.setObjectName(_fromUtf8("I_HSlider"))
self.gridLayout.addWidget(self.I_HSlider, 1, 2, 1, 1)
self.verticalLayout_2.addWidget(self.PID_groupBox_2)
self.FreeSlider_groupBox_3 = QtGui.QGroupBox(self.centralwidget)
self.FreeSlider_groupBox_3.setObjectName(_fromUtf8("FreeSlider_groupBox_3"))
self.gridLayout_2 = QtGui.QGridLayout(self.FreeSlider_groupBox_3)
self.gridLayout_2.setObjectName(_fromUtf8("gridLayout_2"))
self.FreeSlider2_Label = QtGui.QLabel(self.FreeSlider_groupBox_3)
self.FreeSlider2_Label.setObjectName(_fromUtf8("FreeSlider2_Label"))
self.gridLayout_2.addWidget(self.FreeSlider2_Label, 1, 0, 1, 1)
self.FreeSlider1_Label = QtGui.QLabel(self.FreeSlider_groupBox_3)
self.FreeSlider1_Label.setObjectName(_fromUtf8("FreeSlider1_Label"))
self.gridLayout_2.addWidget(self.FreeSlider1_Label, 0, 0, 1, 1)
self.FreeSlider2_HSlider = QtGui.QSlider(self.FreeSlider_groupBox_3)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Maximum)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.FreeSlider2_HSlider.sizePolicy().hasHeightForWidth())
self.FreeSlider2_HSlider.setSizePolicy(sizePolicy)
self.FreeSlider2_HSlider.setMaximum(3000)
self.FreeSlider2_HSlider.setSingleStep(100)
self.FreeSlider2_HSlider.setPageStep(300)
self.FreeSlider2_HSlider.setProperty("value", 2000)
self.FreeSlider2_HSlider.setOrientation(QtCore.Qt.Horizontal)
self.FreeSlider2_HSlider.setTickPosition(QtGui.QSlider.TicksBelow)
self.FreeSlider2_HSlider.setTickInterval(100)
self.FreeSlider2_HSlider.setObjectName(_fromUtf8("FreeSlider2_HSlider"))
self.gridLayout_2.addWidget(self.FreeSlider2_HSlider, 1, 2, 1, 1)
self.FreeSlider2_ValueLabel = QtGui.QLabel(self.FreeSlider_groupBox_3)
self.FreeSlider2_ValueLabel.setMinimumSize(QtCore.QSize(50, 0))
self.FreeSlider2_ValueLabel.setText(_fromUtf8(""))
self.FreeSlider2_ValueLabel.setObjectName(_fromUtf8("FreeSlider2_ValueLabel"))
self.gridLayout_2.addWidget(self.FreeSlider2_ValueLabel, 1, 1, 1, 1)
self.FreeSlider1_HSlider = QtGui.QSlider(self.FreeSlider_groupBox_3)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Maximum)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.FreeSlider1_HSlider.sizePolicy().hasHeightForWidth())
self.FreeSlider1_HSlider.setSizePolicy(sizePolicy)
self.FreeSlider1_HSlider.setMaximum(3000)
self.FreeSlider1_HSlider.setSingleStep(100)
self.FreeSlider1_HSlider.setPageStep(300)
self.FreeSlider1_HSlider.setProperty("value", 2000)
self.FreeSlider1_HSlider.setOrientation(QtCore.Qt.Horizontal)
self.FreeSlider1_HSlider.setTickPosition(QtGui.QSlider.TicksBelow)
self.FreeSlider1_HSlider.setTickInterval(100)
self.FreeSlider1_HSlider.setObjectName(_fromUtf8("FreeSlider1_HSlider"))
self.gridLayout_2.addWidget(self.FreeSlider1_HSlider, 0, 2, 1, 1)
self.FreeSlider1_ValueLabel = QtGui.QLabel(self.FreeSlider_groupBox_3)
self.FreeSlider1_ValueLabel.setMinimumSize(QtCore.QSize(50, 0))
self.FreeSlider1_ValueLabel.setText(_fromUtf8(""))
self.FreeSlider1_ValueLabel.setObjectName(_fromUtf8("FreeSlider1_ValueLabel"))
self.gridLayout_2.addWidget(self.FreeSlider1_ValueLabel, 0, 1, 1, 1)
self.verticalLayout_2.addWidget(self.FreeSlider_groupBox_3)
self.FreeSwitches_groupBox = QtGui.QGroupBox(self.centralwidget)
self.FreeSwitches_groupBox.setObjectName(_fromUtf8("FreeSwitches_groupBox"))
self.horizontalLayout = QtGui.QHBoxLayout(self.FreeSwitches_groupBox)
self.horizontalLayout.setObjectName(_fromUtf8("horizontalLayout"))
self.Free_Free_checkBox_1 = QtGui.QCheckBox(self.FreeSwitches_groupBox)
self.Free_Free_checkBox_1.setObjectName(_fromUtf8("Free_Free_checkBox_1"))
self.horizontalLayout.addWidget(self.Free_Free_checkBox_1)
self.Free_checkBox_2 = QtGui.QCheckBox(self.FreeSwitches_groupBox)
self.Free_checkBox_2.setObjectName(_fromUtf8("Free_checkBox_2"))
self.horizontalLayout.addWidget(self.Free_checkBox_2)
self.Free_checkBox_3 = QtGui.QCheckBox(self.FreeSwitches_groupBox)
self.Free_checkBox_3.setObjectName(_fromUtf8("Free_checkBox_3"))
self.horizontalLayout.addWidget(self.Free_checkBox_3)
self.Free_checkBox_4 = QtGui.QCheckBox(self.FreeSwitches_groupBox)
self.Free_checkBox_4.setObjectName(_fromUtf8("Free_checkBox_4"))
self.horizontalLayout.addWidget(self.Free_checkBox_4)
self.Free_checkBox_5 = QtGui.QCheckBox(self.FreeSwitches_groupBox)
self.Free_checkBox_5.setObjectName(_fromUtf8("Free_checkBox_5"))
self.horizontalLayout.addWidget(self.Free_checkBox_5)
self.verticalLayout_2.addWidget(self.FreeSwitches_groupBox)
self.horizontalLayout_2.addLayout(self.verticalLayout_2)
self.verticalLayout_3.addLayout(self.horizontalLayout_2)
self.mpl_widget = MplWidget(self.centralwidget)
self.mpl_widget.setMinimumSize(QtCore.QSize(0, 300))
self.mpl_widget.setObjectName(_fromUtf8("mpl_widget"))
self.verticalLayout_3.addWidget(self.mpl_widget)
MainWindow.setCentralWidget(self.centralwidget)
self.menubar = QtGui.QMenuBar(MainWindow)
self.menubar.setGeometry(QtCore.QRect(0, 0, 1237, 27))
self.menubar.setObjectName(_fromUtf8("menubar"))
self.menuFile = QtGui.QMenu(self.menubar)
self.menuFile.setObjectName(_fromUtf8("menuFile"))
MainWindow.setMenuBar(self.menubar)
self.actionExit = QtGui.QAction(MainWindow)
self.actionExit.setObjectName(_fromUtf8("actionExit"))
self.menuFile.addAction(self.actionExit)
self.menubar.addAction(self.menuFile.menuAction())
self.retranslateUi(MainWindow)
QtCore.QObject.connect(self.actionExit, QtCore.SIGNAL(_fromUtf8("activated()")), MainWindow.close)
QtCore.QObject.connect(self.cmdLineEdit, QtCore.SIGNAL(_fromUtf8("returnPressed()")), self.logPlainTextEdit.paste)
QtCore.QObject.connect(self.P_HSlider, QtCore.SIGNAL(_fromUtf8("valueChanged(int)")), self.P_ValueLabel.setNum)
QtCore.QObject.connect(self.I_HSlider, QtCore.SIGNAL(_fromUtf8("valueChanged(int)")), self.I_ValueLabel.setNum)
QtCore.QObject.connect(self.D_HSlider, QtCore.SIGNAL(_fromUtf8("valueChanged(int)")), self.D_ValueLabel.setNum)
QtCore.QObject.connect(self.FreeSlider1_HSlider, QtCore.SIGNAL(_fromUtf8("valueChanged(int)")), self.FreeSlider1_ValueLabel.setNum)
QtCore.QObject.connect(self.FreeSlider2_HSlider, QtCore.SIGNAL(_fromUtf8("valueChanged(int)")), self.FreeSlider2_ValueLabel.setNum)
QtCore.QMetaObject.connectSlotsByName(MainWindow)
def retranslateUi(self, MainWindow):
MainWindow.setWindowTitle(_translate("MainWindow", "BotControlGUI", None))
self.Serial_groupBox.setTitle(_translate("MainWindow", "Serielle Verbindung", None))
self.connectPushButton.setText(_translate("MainWindow", "Connect", None))
self.disconnectPushButton.setText(_translate("MainWindow", "Disconnect", None))
self.cmdLineEdit.setText(_translate("MainWindow", ">LedBlinkTime=100<", None))
self.PID_groupBox_2.setTitle(_translate("MainWindow", "PID Regler", None))
self.D_Label.setText(_translate("MainWindow", "D :", None))
self.I_Label.setText(_translate("MainWindow", "I :", None))
self.P_Label.setText(_translate("MainWindow", "P :", None))
self.FreeSlider_groupBox_3.setTitle(_translate("MainWindow", "Freie Regler", None))
self.FreeSlider2_Label.setText(_translate("MainWindow", "Max Geschwindigkeit:", None))
self.FreeSlider1_Label.setText(_translate("MainWindow", "Max Beschleunigung :", None))
self.FreeSwitches_groupBox.setTitle(_translate("MainWindow", "Freie Schalter", None))
self.Free_Free_checkBox_1.setText(_translate("MainWindow", "Schalter 1", None))
self.Free_checkBox_2.setText(_translate("MainWindow", "Schalter 2", None))
self.Free_checkBox_3.setText(_translate("MainWindow", "Schalter 3", None))
self.Free_checkBox_4.setText(_translate("MainWindow", "Schalter 4", None))
self.Free_checkBox_5.setText(_translate("MainWindow", "Schalter 5", None))
self.menuFile.setTitle(_translate("MainWindow", "File", None))
self.actionExit.setText(_translate("MainWindow", "Exit", None))
from mplwidget import MplWidget
import mainWindow_rc
| gpl-2.0 | 4,252,222,543,709,044,000 | 61.888179 | 139 | 0.723024 | false |
chaddcw/Python-CI-Testing | test_list.py | 1 | 1273 | #!/usr/bin/python3
################################
# File Name: test_list.py
# Author: Chadd Williams
# Date: 11/7/2014
# Class: CS 360
# Assignment: Lecture Examples
# Purpose: build some tests that will be run by nosetests
################################
from nose import with_setup
from nose.tools import assert_equals
def test_simpleAddition():
assert 1+2 == 3
def setUp():
global theList
theList = []
def tearDown():
pass
@with_setup(setUp, tearDown)
def test_listAppend():
""" test that list append works
Since this function's name begins with 'test' and the
file name also begins with 'test', nosetests will
automatically discover and run this test when
nosetests is run
"""
theList.append(5)
assert theList[0] == 5
def dictSetup():
global mapping
mapping = { str(x):x for x in range(99) }
def dictTeardown():
pass
@with_setup(dictSetup, dictTeardown)
def test_dictSortedIterator():
value = 0
# the keys are strings, so they are sorted lexigraphically (1, 10, ...)
# use the key parameter to convert the key to an integer and sort
# numerically
for x in sorted(mapping.keys() , key=lambda data: int(data)) :
assert_equals( value , mapping[x])
value += 1
| gpl-2.0 | 2,139,923,379,134,889,200 | 20.576271 | 72 | 0.635507 | false |
niboshi/chainer | chainer/testing/parameterized.py | 1 | 6917 | import functools
import itertools
import types
import typing as tp # NOQA
import unittest
import numpy
import six
from chainer.testing import _bundle
from chainer import utils
def _param_to_str(obj):
if isinstance(obj, type):
return obj.__name__
return repr(obj)
def _shorten(s, maxlen):
# Shortens the string down to maxlen, by replacing the middle part with
# a 3-dots string '...'.
ellipsis = '...'
if len(s) <= maxlen:
return s
n1 = (maxlen - len(ellipsis)) // 2
n2 = maxlen - len(ellipsis) - n1
s = s[:n1] + ellipsis + s[-n2:]
assert len(s) == maxlen
return s
def _make_class_name(base_class_name, i_param, param):
# Creates a class name for a single combination of parameters.
SINGLE_PARAM_MAXLEN = 100 # Length limit of a single parameter value
PARAMS_MAXLEN = 5000 # Length limit of the whole parameters part
param_strs = [
'{}={}'.format(k, _shorten(_param_to_str(v), SINGLE_PARAM_MAXLEN))
for k, v in sorted(param.items())]
param_strs = _shorten(', '.join(param_strs), PARAMS_MAXLEN)
cls_name = '{}_param_{}_{{{}}}'.format(
base_class_name, i_param, param_strs)
return cls_name
def _parameterize_test_case_generator(base, params):
# Defines the logic to generate parameterized test case classes.
for i, param in enumerate(params):
yield _parameterize_test_case(base, i, param)
def _parameterize_test_case(base, i, param):
cls_name = _make_class_name(base.__name__, i, param)
def __str__(self):
name = base.__str__(self)
return '%s parameter: %s' % (name, param)
mb = {'__str__': __str__}
for k, v in sorted(param.items()):
if isinstance(v, types.FunctionType):
def create_new_v():
f = v
def new_v(self, *args, **kwargs):
return f(*args, **kwargs)
return new_v
mb[k] = create_new_v()
else:
mb[k] = v
def method_generator(base_method):
# Generates a wrapped test method
@functools.wraps(base_method)
def new_method(self, *args, **kwargs):
try:
return base_method(self, *args, **kwargs)
except unittest.SkipTest:
raise
except Exception as e:
s = six.StringIO()
s.write('Parameterized test failed.\n\n')
s.write('Base test method: {}.{}\n'.format(
base.__name__, base_method.__name__))
s.write('Test parameters:\n')
for k, v in sorted(param.items()):
s.write(' {}: {}\n'.format(k, v))
utils._raise_from(e.__class__, s.getvalue(), e)
return new_method
return (cls_name, mb, method_generator)
def parameterize(*params):
# TODO(niboshi): Add documentation
return _bundle.make_decorator(
lambda base: _parameterize_test_case_generator(base, params))
def _values_to_dicts(names, values):
assert isinstance(names, six.string_types)
assert isinstance(values, (tuple, list))
def safe_zip(ns, vs):
if len(ns) == 1:
return [(ns[0], vs)]
assert isinstance(vs, (tuple, list)) and len(ns) == len(vs)
return zip(ns, vs)
names = names.split(',')
params = [dict(safe_zip(names, value_list)) for value_list in values]
return params
def from_pytest_parameterize(names, values):
# Pytest-style parameterization.
# TODO(niboshi): Add documentation
return _values_to_dicts(names, values)
def parameterize_pytest(names, values):
# Pytest-style parameterization.
# TODO(niboshi): Add documentation
return parameterize(*from_pytest_parameterize(names, values))
def product(parameter):
# TODO(niboshi): Add documentation
if isinstance(parameter, dict):
return product_dict(*[
_values_to_dicts(names, values)
for names, values in sorted(parameter.items())])
elif isinstance(parameter, list):
# list of lists of dicts
if not all(isinstance(_, list) for _ in parameter):
raise TypeError('parameter must be list of lists of dicts')
if not all(isinstance(_, dict) for l in parameter for _ in l):
raise TypeError('parameter must be list of lists of dicts')
return product_dict(*parameter)
else:
raise TypeError(
'parameter must be either dict or list. Actual: {}'.format(
type(parameter)))
def product_dict(*parameters):
# TODO(niboshi): Add documentation
return [
{k: v for dic in dicts for k, v in six.iteritems(dic)}
for dicts in itertools.product(*parameters)]
# TODO(kataoka): product_dict is patched by tests/conftest.py while tests are
# collected if CHAINER_TEST_PAIRWISE_PARAMETERIZATION is configured
# accordingly. Also used in
# tests/chainer_tests/testing_tests/test_parameterized.py
_product_dict_orig = product_dict
def _pairwise_product_dict(*parameters):
if len(parameters) <= 2:
return _product_dict_orig(*parameters)
return list(_pairwise_product_dict_iter(*parameters))
def _pairwise_product_dict_iter(
*parameters: tp.Iterable[tp.Dict[str, tp.Any]]
) -> tp.Iterator[tp.Dict[str, tp.Any]]:
"""Generate combinations that cover all pairs.
The argument is the same as `chainer.testing.product_dict`.
"""
parameter_lists = [list(dicts) for dicts in parameters] # type: tp.List[tp.List[tp.Dict[str, tp.Any]]] # NOQA
for nd_index in sorted(_nd_indices_to_cover_each_2d(
[len(dicts) for dicts in parameter_lists])):
yield {
k: v
for i, dicts in zip(nd_index, parameter_lists)
for k, v in dicts[i].items()}
def _nd_indices_to_cover_each_2d(
shape: tp.Sequence[int]
) -> tp.Iterator[tp.Tuple[int, ...]]:
rs = numpy.random.RandomState(seed=0)
n = len(shape)
indices = [list(range(length)) for length in shape] # type: tp.List[tp.List[int]] # NOQA
# `(k_i, k_j) in uncovered[(i, j)]` iff it has not been yielded
# `nd_index` such that `(nd_index[i], nd_inde[j]) == (k_i, k_j)`.
uncovered = {} # type: tp.Dict[tp.Tuple[int, int], tp.Set[tp.Tuple[int, int]]] # NOQA
for i, j in itertools.combinations(range(n), 2):
uncovered[(i, j)] = set(itertools.product(indices[i], indices[j]))
nd_indices = list(itertools.product(*indices)) # type: tp.List[tp.Tuple[int, ...]] # NOQA
rs.shuffle(nd_indices)
for nd_index in nd_indices:
count = 0
for i, j in itertools.combinations(range(n), 2):
try:
uncovered[(i, j)].remove((nd_index[i], nd_index[j]))
except KeyError:
pass
else:
count += 1
if count > 0:
yield nd_index
| mit | 8,060,633,874,050,926,000 | 31.172093 | 115 | 0.597947 | false |
culturagovbr/sistema-nacional-cultura | planotrabalho/tests/test_views.py | 1 | 22743 | import pytest
import datetime
from django.shortcuts import reverse
from django.core.files.uploadedfile import SimpleUploadedFile
from adesao.models import SistemaCultura
from planotrabalho.models import Componente
from planotrabalho.models import FundoDeCultura
from planotrabalho.models import Conselheiro
from planotrabalho.models import ConselhoDeCultura
from model_mommy import mommy
def test_cadastrar_componente_tipo_legislacao(client, login):
sistema_cultura = mommy.make("SistemaCultura", _fill_optional=['ente_federado', 'sede', 'gestor'],
cadastrador=login, ente_federado__cod_ibge=123456)
url = reverse("adesao:home")
client.get(url)
url = reverse("planotrabalho:cadastrar_legislacao")
arquivo = SimpleUploadedFile(
"componente.txt", b"file_content", content_type="text/plain"
)
response = client.post(url, data={"arquivo": arquivo,
'data_publicacao': '28/06/2018'})
sistema_atualizado = SistemaCultura.sistema.get(
ente_federado__nome=sistema_cultura.ente_federado.nome)
assert response.status_code == 302
assert arquivo.name.split(".")[0] in sistema_atualizado.legislacao.arquivo.name.split("/")[-1]
assert sistema_atualizado.legislacao.data_publicacao == datetime.date(2018, 6, 28)
assert sistema_atualizado.legislacao.tipo == 0
def test_cadastrar_componente_tipo_orgao_gestor(client, login):
sistema_cultura = mommy.make("SistemaCultura", _fill_optional=['ente_federado', 'sede', 'gestor'],
cadastrador=login, ente_federado__cod_ibge=123456)
url = reverse("adesao:home")
client.get(url)
url = reverse("planotrabalho:cadastrar_orgao")
arquivo = SimpleUploadedFile(
"componente.txt", b"file_content", content_type="text/plain"
)
response = client.post(url, data={"arquivo": arquivo,
'data_publicacao': '28/06/2018',
'perfil': 0})
sistema_atualizado = SistemaCultura.sistema.get(
ente_federado__nome=sistema_cultura.ente_federado.nome)
assert response.status_code == 302
assert arquivo.name.split(".")[0] in sistema_atualizado.orgao_gestor.arquivo.name.split("/")[-1]
assert sistema_atualizado.orgao_gestor.data_publicacao == datetime.date(2018, 6, 28)
assert sistema_atualizado.orgao_gestor.perfil == 0
assert sistema_atualizado.orgao_gestor.tipo == 1
def test_alterar_orgao_gestor(client, login):
sistema_cultura = mommy.make("SistemaCultura", _fill_optional=['ente_federado', 'orgao_gestor', 'sede', 'gestor'],
cadastrador=login, ente_federado__cod_ibge=123456)
url = reverse("adesao:home")
client.get(url)
url = reverse("planotrabalho:alterar_orgao", kwargs={"pk": sistema_cultura.orgao_gestor.id})
arquivo = SimpleUploadedFile(
"novo.txt", b"file_content", content_type="text/plain"
)
response = client.post(url, data={"arquivo": arquivo,
"data_publicacao": "25/06/2018",
"perfil": 0})
sistema_atualizado = SistemaCultura.sistema.get(
ente_federado__nome=sistema_cultura.ente_federado.nome)
assert arquivo.name.split(".")[0] in sistema_atualizado.orgao_gestor.arquivo.name.split("/")[-1]
assert sistema_atualizado.orgao_gestor.data_publicacao == datetime.date(2018, 6, 25)
assert sistema_atualizado.orgao_gestor.perfil == 0
assert sistema_atualizado.orgao_gestor.tipo == 1
def test_cadastrar_componente_tipo_fundo_cultura(client, login, cnpj):
sistema_cultura = mommy.make("SistemaCultura", _fill_optional=['ente_federado', 'sede', 'gestor'],
cadastrador=login, ente_federado__cod_ibge=123456)
url = reverse("adesao:home")
client.get(url)
url = reverse("planotrabalho:cadastrar_fundo_cultura")
arquivo = SimpleUploadedFile(
"componente.txt", b"file_content", content_type="text/plain"
)
cnpj = SimpleUploadedFile(
"cnpj.txt", b"file_content", content_type="text/plain"
)
response = client.post(url, data={"arquivo": arquivo,
"data_publicacao": '28/06/2018',
"possui_cnpj": 'True',
"cnpj": '28.134.084/0001-75',
'mesma_lei': 'False',
"comprovante": cnpj})
sistema_atualizado = SistemaCultura.sistema.get(
ente_federado__nome=sistema_cultura.ente_federado.nome)
assert response.status_code == 302
assert arquivo.name.split(".")[0] in sistema_atualizado.fundo_cultura.arquivo.name.split("/")[-1]
assert sistema_atualizado.fundo_cultura.data_publicacao == datetime.date(2018, 6, 28)
assert sistema_atualizado.fundo_cultura.tipo == 2
def test_cadastrar_componente_tipo_fundo_cultura_reaproveita_lei_sem_cnpj(client, login):
sistema_cultura = mommy.make("SistemaCultura", _fill_optional=['ente_federado', 'sede', 'gestor', 'legislacao'],
cadastrador=login, ente_federado__cod_ibge=123456)
legislacao = SimpleUploadedFile(
"legislacao.txt", b"file_content", content_type="text/plain"
)
sistema_cultura.legislacao.arquivo = legislacao
sistema_cultura.legislacao.save()
url = reverse("adesao:home")
client.get(url)
url = reverse("planotrabalho:cadastrar_fundo_cultura")
response = client.post(url, data={"possui_cnpj": 'False',
'mesma_lei': 'True'})
sistema_atualizado = SistemaCultura.sistema.get(
ente_federado__nome=sistema_cultura.ente_federado.nome)
assert response.status_code == 302
assert sistema_atualizado.legislacao.arquivo.name.split("/")[-1] in sistema_atualizado.fundo_cultura.arquivo.name.split("/")[-1]
assert sistema_atualizado.legislacao.data_publicacao == sistema_atualizado.fundo_cultura.data_publicacao
assert sistema_atualizado.fundo_cultura.tipo == 2
def test_cadastrar_componente_tipo_fundo_cultura_reaproveita_lei_com_cnpj(client, login, cnpj):
sistema_cultura = mommy.make("SistemaCultura", _fill_optional=['ente_federado', 'sede', 'gestor', 'legislacao'],
cadastrador=login, ente_federado__cod_ibge=123456)
legislacao = SimpleUploadedFile(
"legislacao_teste.txt", b"file_content", content_type="text/plain"
)
sistema_cultura.legislacao.arquivo = legislacao
sistema_cultura.legislacao.save()
url = reverse("adesao:home")
client.get(url)
url = reverse("planotrabalho:cadastrar_fundo_cultura")
cnpj = SimpleUploadedFile(
"cnpj.txt", b"file_content", content_type="text/plain"
)
response = client.post(url, data={"possui_cnpj": 'True',
"cnpj": '28.134.084/0001-75',
"comprovante": cnpj,
'mesma_lei': 'True'})
sistema_atualizado = SistemaCultura.sistema.get(
ente_federado__nome=sistema_cultura.ente_federado.nome)
assert response.status_code == 302
assert sistema_atualizado.legislacao.arquivo.name.split("/")[-1] in sistema_atualizado.fundo_cultura.arquivo.name.split("/")[-1]
assert sistema_atualizado.legislacao.data_publicacao == sistema_atualizado.fundo_cultura.data_publicacao
assert cnpj.name.split(".")[0] in sistema_atualizado.fundo_cultura.comprovante_cnpj.arquivo.name.split("/")[-1]
assert sistema_atualizado.fundo_cultura.cnpj == '28.134.084/0001-75'
assert sistema_atualizado.fundo_cultura.tipo == 2
def test_cadastrar_componente_tipo_conselho(client, login):
sistema_cultura = mommy.make("SistemaCultura", _fill_optional=['ente_federado', 'sede', 'gestor'],
cadastrador=login, ente_federado__cod_ibge=123456)
url = reverse("adesao:home")
client.get(url)
url = reverse("planotrabalho:cadastrar_conselho")
arquivo_ata = SimpleUploadedFile(
"ata.txt", b"file_content", content_type="text/plain"
)
arquivo_lei = SimpleUploadedFile(
"lei.txt", b"file_content", content_type="text/plain"
)
response = client.post(url, data={'mesma_lei': False,
'arquivo': arquivo_ata,
'data_publicacao': '28/06/2018',
'arquivo_lei': arquivo_lei,
'data_publicacao_lei': '29/06/2018',
'possui_ata': True,
'paritario': True,
'exclusivo_cultura': True})
sistema_atualizado = SistemaCultura.sistema.get(
ente_federado__nome=sistema_cultura.ente_federado.nome)
assert response.status_code == 302
assert arquivo_ata.name.split(".")[0] in sistema_atualizado.conselho.arquivo.name.split("/")[-1]
assert arquivo_lei.name.split(".")[0] in sistema_atualizado.conselho.lei.arquivo.name.split("/")[-1]
assert sistema_atualizado.conselho.data_publicacao == datetime.date(2018, 6, 28)
assert sistema_atualizado.conselho.lei.data_publicacao == datetime.date(2018, 6, 29)
assert sistema_atualizado.conselho.tipo == 3
def test_cadastrar_componente_tipo_conselho_importar_lei(client, login):
sistema_cultura = mommy.make("SistemaCultura", _fill_optional=['ente_federado', 'sede', 'gestor', 'legislacao'],
cadastrador=login, ente_federado__cod_ibge=123456)
legislacao = SimpleUploadedFile(
"legislacao.txt", b"file_content", content_type="text/plain"
)
sistema_cultura.legislacao.arquivo = legislacao
sistema_cultura.legislacao.save()
url = reverse("adesao:home")
client.get(url)
url = reverse("planotrabalho:cadastrar_conselho")
response = client.post(url, data={'mesma_lei': True,
'possui_ata': False,
'paritario': True,
'exclusivo_cultura': True})
sistema_atualizado = SistemaCultura.sistema.get(
ente_federado__nome=sistema_cultura.ente_federado.nome)
assert response.status_code == 302
assert sistema_atualizado.legislacao.arquivo.name.split("/")[-1] in sistema_atualizado.conselho.lei.arquivo.name.split("/")[-1]
assert sistema_atualizado.legislacao.data_publicacao == sistema_atualizado.conselho.lei.data_publicacao
assert sistema_atualizado.conselho.paritario
assert sistema_atualizado.conselho.exclusivo_cultura
assert sistema_atualizado.conselho.tipo == 3
def test_cadastrar_componente_tipo_plano(client, login):
sistema_cultura = mommy.make("SistemaCultura", _fill_optional=['ente_federado', 'sede', 'gestor'],
cadastrador=login, ente_federado__cod_ibge=123456)
url = reverse("adesao:home")
client.get(url)
url = reverse("planotrabalho:cadastrar_plano")
arquivo = SimpleUploadedFile(
"componente.txt", b"file_content", content_type="text/plain"
)
anexo_lei = SimpleUploadedFile(
"plano_lei.txt", b"file_content", content_type="text/plain"
)
arquivo_metas = SimpleUploadedFile(
"plano_metas.txt", b"file_content", content_type="text/plain"
)
response = client.post(url, data={"arquivo": arquivo,
'data_publicacao': '28/06/2018',
'exclusivo_cultura': True,
'ultimo_ano_vigencia': 2000,
'periodicidade': 1,
'mesma_lei': False,
'possui_anexo': True,
'anexo_na_lei': False,
'anexo_lei': anexo_lei,
'possui_metas': True,
'metas_na_lei': False,
'arquivo_metas': arquivo_metas,
'monitorado': True,
'local_monitoramento': "Local",
'participou_curso': True,
'ano_inicio_curso': 2000,
'ano_termino_curso': 2001,
'esfera_federacao_curso': ['1'],
'tipo_oficina': ['1'],
'perfil_participante': ['1']})
sistema_atualizado = SistemaCultura.sistema.get(
ente_federado__nome=sistema_cultura.ente_federado.nome)
assert response.status_code == 302
assert arquivo.name.split(".")[0] in sistema_atualizado.plano.arquivo.name.split("/")[-1]
assert anexo_lei.name.split(".")[0] in sistema_atualizado.plano.anexo.arquivo.name.split("/")[-1]
assert arquivo_metas.name.split(".")[0] in sistema_atualizado.plano.metas.arquivo.name.split("/")[-1]
assert not sistema_atualizado.plano.anexo_na_lei
assert sistema_atualizado.plano.local_monitoramento == "Local"
assert sistema_atualizado.plano.ano_inicio_curso == 2000
assert sistema_atualizado.plano.ano_termino_curso == 2001
assert sistema_atualizado.plano.esfera_federacao_curso == ['1']
assert sistema_atualizado.plano.tipo_oficina == ['1']
assert sistema_atualizado.plano.perfil_participante == ['1']
assert sistema_atualizado.plano.data_publicacao == datetime.date(2018, 6, 28)
assert sistema_atualizado.plano.tipo == 4
def test_alterar_componente(client, login):
sistema_cultura = mommy.make("SistemaCultura", _fill_optional=['ente_federado', 'legislacao', 'sede', 'gestor'],
cadastrador=login, ente_federado__cod_ibge=123456)
url = reverse("adesao:home")
client.get(url)
url = reverse("planotrabalho:alterar_legislacao", kwargs={"pk": sistema_cultura.legislacao.id})
numero_componentes = Componente.objects.count()
arquivo = SimpleUploadedFile(
"novo.txt", b"file_content", content_type="text/plain"
)
response = client.post(url, data={"arquivo": arquivo,
"data_publicacao": "25/06/2018"})
sistema_atualizado = SistemaCultura.sistema.get(
ente_federado__nome=sistema_cultura.ente_federado.nome)
numero_componentes_apos_update = Componente.objects.count()
assert numero_componentes == numero_componentes_apos_update
assert response.status_code == 302
assert arquivo.name.split(".")[0] in sistema_atualizado.legislacao.arquivo.name.split("/")[-1]
assert sistema_atualizado.legislacao.data_publicacao == datetime.date(2018, 6, 25)
assert sistema_atualizado.legislacao.tipo == 0
def test_alterar_fundo_cultura(client, login, cnpj):
sistema_cultura = mommy.make("SistemaCultura", _fill_optional=['ente_federado', 'fundo_cultura', 'sede', 'gestor'],
cadastrador=login, ente_federado__cod_ibge=123456)
url = reverse("adesao:home")
client.get(url)
url = reverse("planotrabalho:alterar_fundo_cultura", kwargs={"pk": sistema_cultura.fundo_cultura.id})
numero_componentes = Componente.objects.count()
numero_fundo_cultura = FundoDeCultura.objects.count()
arquivo = SimpleUploadedFile(
"novo.txt", b"file_content", content_type="text/plain"
)
response = client.post(url, data={"mesma_lei": "False",
"possui_cnpj": "Sim",
"arquivo": arquivo,
"data_publicacao": "25/06/2018",
"cnpj": "28.134.084/0001-75"})
sistema_atualizado = SistemaCultura.sistema.get(
ente_federado__nome=sistema_cultura.ente_federado.nome)
numero_componentes_apos_update = Componente.objects.count()
numero_fundo_cultura_apos_update = FundoDeCultura.objects.count()
assert numero_fundo_cultura == numero_fundo_cultura_apos_update
assert numero_componentes == numero_componentes_apos_update
assert response.status_code == 302
assert arquivo.name.split(".")[0] in sistema_atualizado.fundo_cultura.arquivo.name.split("/")[-1]
assert sistema_atualizado.fundo_cultura.data_publicacao == datetime.date(2018, 6, 25)
assert sistema_atualizado.fundo_cultura.cnpj == "28.134.084/0001-75"
assert sistema_atualizado.fundo_cultura.tipo == 2
def test_alterar_fundo_cultura_remove_cnpj(client, login, cnpj):
arquivo = SimpleUploadedFile(
"novo.txt", b"file_content", content_type="text/plain"
)
comprovante = SimpleUploadedFile(
"comprovante.txt", b"file_content", content_type="text/plain"
)
sistema_cultura = mommy.make("SistemaCultura", _fill_optional=['ente_federado', 'fundo_cultura', 'sede', 'gestor'],
cadastrador=login, ente_federado__cod_ibge=123456)
sistema_cultura.fundo_cultura.cnpj = "56.385.239/0001-81"
sistema_cultura.fundo_cultura.comprovante_cnpj = mommy.make("ArquivoComponente2")
sistema_cultura.fundo_cultura.save()
url = reverse("adesao:home")
client.get(url)
url = reverse("planotrabalho:alterar_fundo_cultura", kwargs={"pk": sistema_cultura.fundo_cultura.id})
numero_componentes = Componente.objects.count()
numero_fundo_cultura = FundoDeCultura.objects.count()
response = client.post(url, data={"mesma_lei": "False",
"possui_cnpj": "False",
"arquivo": arquivo,
"data_publicacao": "25/06/2018",
"cnpj": "28.134.084/0001-75",
"comprovante": comprovante})
sistema_atualizado = SistemaCultura.sistema.get(
ente_federado__nome=sistema_cultura.ente_federado.nome)
numero_componentes_apos_update = Componente.objects.count()
numero_fundo_cultura_apos_update = FundoDeCultura.objects.count()
assert numero_fundo_cultura == numero_fundo_cultura_apos_update
assert numero_componentes == numero_componentes_apos_update
assert response.status_code == 302
assert arquivo.name.split(".")[0] in sistema_atualizado.fundo_cultura.arquivo.name.split("/")[-1]
assert sistema_atualizado.fundo_cultura.data_publicacao == datetime.date(2018, 6, 25)
assert sistema_atualizado.fundo_cultura.cnpj == None
assert sistema_atualizado.fundo_cultura.comprovante_cnpj == None
assert sistema_atualizado.fundo_cultura.tipo == 2
def test_alterar_conselho_cultura(client, login):
componente = mommy.make("ConselhoDeCultura", tipo=3, _fill_optional=True)
sistema_cultura = mommy.make("SistemaCultura", _fill_optional=['ente_federado', 'sede', 'gestor'],
cadastrador=login, conselho=componente, ente_federado__cod_ibge=123456)
url = reverse("adesao:home")
client.get(url)
url = reverse("planotrabalho:alterar_conselho", kwargs={"pk": sistema_cultura.conselho.id})
numero_componentes = Componente.objects.count()
numero_conselho_cultura = ConselhoDeCultura.objects.count()
arquivo_lei = SimpleUploadedFile(
"novo_lei.txt", b"file_content", content_type="text/plain"
)
arquivo_ata = SimpleUploadedFile(
"novo_ata.txt", b"file_content", content_type="text/plain"
)
response = client.post(url, data={"mesma_lei": False,
"arquivo": arquivo_ata,
"data_publicacao": "25/06/2018",
"arquivo_lei": arquivo_lei,
"data_publicacao_lei": "26/06/2018",
'possui_ata': True,
'exclusivo_cultura': True,
'paritario': True})
sistema_atualizado = SistemaCultura.sistema.get(
ente_federado__nome=sistema_cultura.ente_federado.nome)
numero_componentes_apos_update = Componente.objects.count()
numero_conselho_cultura_apos_update = ConselhoDeCultura.objects.count()
assert numero_conselho_cultura == numero_conselho_cultura_apos_update
assert numero_componentes == numero_componentes_apos_update
assert response.status_code == 302
assert arquivo_ata.name.split(".")[0] in sistema_atualizado.conselho.arquivo.name.split("/")[-1]
assert arquivo_lei.name.split(".")[0] in sistema_atualizado.conselho.lei.arquivo.name.split("/")[-1]
assert sistema_atualizado.conselho.data_publicacao == datetime.date(2018, 6, 25)
assert sistema_atualizado.conselho.lei.data_publicacao == datetime.date(2018, 6, 26)
assert sistema_atualizado.conselho.tipo == 3
def teste_criar_conselheiro(client, login):
sistema = mommy.make("SistemaCultura", _fill_optional=['ente_federado', 'conselho', 'sede', 'gestor'],
cadastrador=login, ente_federado__cod_ibge=123456)
url = reverse("adesao:home")
client.get(url)
url = reverse("planotrabalho:criar_conselheiro", kwargs={'conselho': sistema.conselho.id})
response = client.post(url, data={"nome": "teste",
"segmento": "20", "email": "[email protected]"})
conselheiro = Conselheiro.objects.last()
assert conselheiro.nome == "teste"
assert conselheiro.segmento == "Teatro"
assert conselheiro.email == "[email protected]"
assert conselheiro.conselho == sistema.conselho
def teste_alterar_conselheiro(client, login):
sistema = mommy.make("SistemaCultura", _fill_optional=['ente_federado', 'conselho', 'sede', 'gestor'],
cadastrador=login, ente_federado__cod_ibge=123456)
conselheiro = mommy.make("Conselheiro", conselho=sistema.conselho)
url = reverse("adesao:home")
client.get(url)
url = reverse("planotrabalho:alterar_conselheiro", kwargs={'pk': conselheiro.id})
response = client.post(url, data={"nome": "teste",
"segmento": "20", "email": "[email protected]"})
conselheiro.refresh_from_db()
assert conselheiro.nome == "teste"
assert conselheiro.segmento == "Teatro"
assert conselheiro.email == "[email protected]"
assert conselheiro.conselho == sistema.conselho
def teste_remover_conselheiro(client, login):
sistema = mommy.make("SistemaCultura", _fill_optional=['ente_federado', 'conselho', 'sede', 'gestor'],
cadastrador=login, ente_federado__cod_ibge=123456)
conselheiro = mommy.make("Conselheiro", conselho=sistema.conselho)
url = reverse("adesao:home")
client.get(url)
url = reverse("planotrabalho:remover_conselheiro", kwargs={'pk': conselheiro.id})
response = client.post(url)
conselheiro.refresh_from_db()
assert conselheiro.situacao == '0'
| agpl-3.0 | 2,829,405,920,098,751,500 | 42.32 | 132 | 0.63514 | false |
Krissbro/LondonGaymers | serverquotes/serverquotes.py | 1 | 7819 | import discord
from discord.ext import commands
from cogs.utils.dataIO import dataIO
from .utils import checks
from .utils.chat_formatting import escape_mass_mentions, pagify
import os
from random import choice as randchoice
try:
from tabulate import tabulate
except Exception as e:
raise RuntimeError("You must run `pip3 install tabulate`.") from e
PATH = 'data/serverquotes/'
JSON = PATH + 'quotes.json'
class ServerQuotes:
def __init__(self, bot):
self.bot = bot
self.quotes = dataIO.load_json(JSON)
def _get_random_quote(self, ctx):
sid = ctx.message.server.id
if sid not in self.quotes or len(self.quotes[sid]) == 0:
raise AssertionError("There are no quotes in this server!")
quotes = list(enumerate(self.quotes[sid]))
return randchoice(quotes)
def _get_random_author_quote(self, ctx, author):
sid = ctx.message.server.id
if sid not in self.quotes or len(self.quotes[sid]) == 0:
raise AssertionError("There are no quotes in this server!")
if isinstance(author, discord.User):
uid = author.id
quotes = [(i,q) for i,q in enumerate(self.quotes[sid]) if q['author_id'] == uid]
else:
quotes = [(i,q) for i,q in enumerate(self.quotes[sid]) if q['author_name'] == author]
if len(quotes) == 0:
raise commands.BadArgument("There are no quotes by %s." % author)
return randchoice(quotes)
def _add_quote(self, ctx, author, message):
sid = ctx.message.server.id
aid = ctx.message.author.id
if sid not in self.quotes:
self.quotes[sid] = []
author_name = 'Unknown'
author_id = None
if isinstance(author, discord.User):
author_name = author.display_name
author_id = author.id
elif isinstance(author, str):
author_name = author
quote = {'added_by': aid,
'author_name': author_name,
'author_id': author_id,
'text': escape_mass_mentions(message)}
self.quotes[sid].append(quote)
dataIO.save_json(JSON, self.quotes)
def _quote_author(self, ctx, quote):
if quote['author_id']:
name = self._get_name_by_id(ctx, quote['author_id'])
if quote['author_name'] and not name:
name = quote['author_name']
name += " (non-present user ID#%s)" % (quote['author_id'])
return name
elif quote['author_name']:
return quote['author_name']
else:
return "Unknown"
def _format_quote(self, ctx, quote):
qid, quote = quote
author = self._quote_author(ctx, quote)
return '"%s"\n—%s (quote #%i)' % (quote['text'], author, qid + 1)
def _get_name_by_id(self, ctx, uid):
member = discord.utils.get(ctx.message.server.members, id=uid)
if member:
return member.display_name
else:
return None
def _get_quote(self, ctx, author_or_num=None):
sid = ctx.message.server.id
if type(author_or_num) is discord.Member:
return self._get_random_author_quote(ctx, author_or_num)
if author_or_num:
try:
quote_id = int(author_or_num)
if quote_id > 0 and quote_id <= len(self.quotes[sid]):
return (quote_id - 1, self.quotes[sid][quote_id - 1])
else:
raise commands.BadArgument("Quote #%i does not exist." % quote_id)
except ValueError:
pass
try:
author = commands.MemberConverter(ctx, author_or_num).convert()
except commands.errors.BadArgument:
author = author_or_num.strip(' \t\n\r\x0b\x0c-–—') # whitespace + dashes
return self._get_random_author_quote(ctx, author)
return self._get_random_quote(ctx)
@commands.command(pass_context=True, no_pm=True)
@checks.serverowner_or_permissions(administrator=True)
async def rmquote(self, ctx, num: int):
"""Deletes a quote by its number
Use [p]lsquotes to find quote numbers
Example: !delquote 3"""
sid = ctx.message.server.id
if num > 0 and num <= len(self.quotes[sid]):
del self.quotes[sid][num-1]
await self.bot.say("Quote #%i deleted." % num)
dataIO.save_json(JSON, self.quotes)
else:
await self.bot.say("Quote #%i does not exist." % num)
@commands.command(pass_context=True, no_pm=True)
async def lsquotes(self, ctx):
"""Displays a list of all quotes"""
sid = ctx.message.server.id
if sid not in self.quotes:
raise commands.UserInputError("There are no quotes in this server!")
quotes = self.quotes[sid]
header = ['#', 'Author', 'Added by', 'Quote']
table = []
for i, q in enumerate(quotes):
text = q['text']
if len(text) > 60:
text = text[:60 - 3] + '...'
name = self._get_name_by_id(ctx, q['added_by'])
if not name:
name = "(non-present user ID#%s)" % q['added_by']
table.append((i + 1, self._quote_author(ctx, q), name, text))
tabulated = tabulate(table, header)
for page in pagify(tabulated, ['\n']):
await self.bot.whisper('```\n%s\n```' % page)
@commands.command(pass_context=True, no_pm=True)
@checks.serverowner_or_permissions(administrator=True)
async def addquote(self, ctx, message: str, *, author: str = None):
"""Adds a quote to the server quote list. The quote must be enclosed
in \"double quotes\". If a member mention or name is the last argument,
the quote will be stored as theirs. If not, the last argument will
be stored as the quote's author. If left empty, "Unknown" is used.
"""
if author:
try:
author = commands.MemberConverter(ctx, author).convert()
except commands.errors.BadArgument:
author = author.strip(' \t\n\r\x0b\x0c-–—') # whitespace + dashes
pass
self._add_quote(ctx, author, message)
await self.bot.say("Quote added.")
@commands.command(pass_context=True, no_pm=True)
@commands.cooldown(6, 60, commands.BucketType.channel)
async def quote(self, ctx, *, author_or_num: str = None):
"""Say a stored quote!
Without any arguments, this command randomly selects from all stored
quotes. If you supply an author name, it randomly selects from among
that author's quotes. Finally, if given a number, that specific quote
will be said, assuming it exists. Use [p]lsquotes to show all quotes where [p] is the prefix.
"""
sid = ctx.message.server.id
if sid not in self.quotes or len(self.quotes[sid]) == 0:
await self.bot.say("There are no quotes in this server!")
return
try:
quote = self._get_quote(ctx, author_or_num)
except commands.BadArgument:
if author_or_num.lower().strip() in ['me', 'myself', 'self']:
quote = self._get_quote(ctx, ctx.message.author)
else:
raise
await self.bot.say(self._format_quote(ctx, quote))
def check_folder():
if not os.path.exists(PATH):
print("Creating serverquotes folder...")
os.makedirs(PATH)
def check_file():
if not dataIO.is_valid_json(JSON):
print("Creating default quotes.json...")
dataIO.save_json(JSON, {})
def setup(bot):
check_folder()
check_file()
n = ServerQuotes(bot)
bot.add_cog(n)
| gpl-3.0 | 7,994,293,379,458,089,000 | 35.490654 | 101 | 0.577667 | false |
stormi/tsunami | src/secondaires/crafting/rang.py | 1 | 5518 | # -*-coding:Utf-8 -*
# Copyright (c) 2015 LE GOFF Vincent
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# * Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT
# OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
"""Fichier contenant la classe Rang, détaillée plus bas."""
from abstraits.obase import BaseObj
from secondaires.crafting.exception import ExceptionCrafting
from secondaires.crafting.recette import Recette
class Rang(BaseObj):
"""Classe représentant un rang de guilde."""
def __init__(self, guilde, cle):
"""Constructeur de la fiche."""
BaseObj.__init__(self)
self.guilde = guilde
self.cle = cle
self.nom = "rang inconnu"
self.points_guilde = 10
self.recettes = []
self._construire()
def __getnewargs__(self):
return (None, "")
@property
def total_points_guilde(self):
"""Retourne les points de guilde consommés pour arriver à ce rang.
Si le rang a des prédécesseurs, retourne la somme des
points de guilde nécessités pour atteindre ce rang. Par
exemple, si un membre est au rang 2, il faut additionner
les points de guilde du rang 1 et du rang 2.
"""
# Cherche à trouver les rangs prédécesseurs
guilde = self.guilde
try:
indice = guilde.rangs.index(self)
except ValueError:
raise RangIntrouvable("le rang {} ne peut être trouvé " \
"dans la guilde {}".format(self.cle, guilde.cle))
precedents = guilde.rangs[:indice]
return sum(p.points_guilde for p in precedents) + self.points_guilde
@property
def membres(self):
"""Retourne la liste des membres (personnages) à ce rang."""
progressions = self.guilde.membres.values()
membres = []
for progression in progressions:
if progression.rang is self:
membres.append(progression.membre)
return membres
@property
def nom_complet(self):
"""Retourne le nom complet du rang."""
membres = self.membres
ps = "s" if self.points_guilde > 1 else ""
ms = "s" if len(membres) > 1 else ""
msg = "{}, nom : {}, {} point{ps} de guilde ({} accumulés), " \
"{} membre{ms}".format(self.cle, self.nom, self.points_guilde,
self.total_points_guilde, len(membres), ps=ps, ms=ms)
return msg
@property
def rangs_parents(self):
"""Retourne les rangs parents, incluant self."""
guilde = self.guilde
try:
indice = guilde.rangs.index(self)
except ValueError:
raise RangIntrouvable("le rang {} ne peut être trouvé " \
"dans la guilde {}".format(self.cle, guilde.cle))
return guilde.rangs[:indice + 1]
def get_recette(self, cle, exception=True):
"""Récupère la recette correspondant à la clé.
La clé est celle du résultat.
"""
cle = cle.lower()
for recette in self.recettes:
if recette.resultat == cle:
return recette
if exception:
raise ValueError("Recette {} inconnue".format(repr(cle)))
def ajouter_recette(self, resultat):
"""Ajoute une recette.
Le résultat doit être la clé du prototype de résultat.
"""
if self.get_recette(resultat, False):
raise ValueError("La recette {} existe déjà".format(
repr(resultat)))
recette = Recette(self)
recette.resultat = resultat
self.recettes.append(recette)
return recette
def supprimer_recette(self, cle):
"""Retire la recette spécifiée."""
cle = cle.lower()
for recette in list(self.recettes):
if recette.resultat == cle:
self.recettes.remove(recette)
recette.detruire()
return
raise ValueError("Recette {} introuvable".format(repr(cle)))
class RangIntrouvable(ExceptionCrafting):
"""Exception levée si le rang de la guilde est introuvable."""
pass
| bsd-3-clause | -3,393,670,967,765,601,300 | 33.71519 | 79 | 0.643209 | false |
hastexo/edx-platform | openedx/core/djangoapps/content/course_overviews/models.py | 1 | 35401 | """
Declaration of CourseOverview model
"""
import json
import logging
from urlparse import urlparse, urlunparse
from django.conf import settings
from django.db import models, transaction
from django.db.models.fields import BooleanField, DateTimeField, DecimalField, TextField, FloatField, IntegerField
from django.db.utils import IntegrityError
from django.template import defaultfilters
from ccx_keys.locator import CCXLocator
from model_utils.models import TimeStampedModel
from config_models.models import ConfigurationModel
from lms.djangoapps import django_comment_client
from openedx.core.djangoapps.catalog.models import CatalogIntegration
from openedx.core.djangoapps.lang_pref.api import get_closest_released_language
from openedx.core.djangoapps.models.course_details import CourseDetails
from static_replace.models import AssetBaseUrlConfig
from xmodule import course_metadata_utils, block_metadata_utils
from xmodule.course_module import CourseDescriptor, DEFAULT_START_DATE
from xmodule.error_module import ErrorDescriptor
from xmodule.modulestore.django import modulestore
from openedx.core.djangoapps.xmodule_django.models import CourseKeyField, UsageKeyField
log = logging.getLogger(__name__)
class CourseOverview(TimeStampedModel):
"""
Model for storing and caching basic information about a course.
This model contains basic course metadata such as an ID, display name,
image URL, and any other information that would be necessary to display
a course as part of:
user dashboard (enrolled courses)
course catalog (courses to enroll in)
course about (meta data about the course)
"""
class Meta(object):
app_label = 'course_overviews'
# IMPORTANT: Bump this whenever you modify this model and/or add a migration.
VERSION = 6
# Cache entry versioning.
version = IntegerField()
# Course identification
id = CourseKeyField(db_index=True, primary_key=True, max_length=255)
_location = UsageKeyField(max_length=255)
org = TextField(max_length=255, default='outdated_entry')
display_name = TextField(null=True)
display_number_with_default = TextField()
display_org_with_default = TextField()
# Start/end dates
start = DateTimeField(null=True)
end = DateTimeField(null=True)
advertised_start = TextField(null=True)
announcement = DateTimeField(null=True)
# URLs
course_image_url = TextField()
social_sharing_url = TextField(null=True)
end_of_course_survey_url = TextField(null=True)
# Certification data
certificates_display_behavior = TextField(null=True)
certificates_show_before_end = BooleanField(default=False)
cert_html_view_enabled = BooleanField(default=False)
has_any_active_web_certificate = BooleanField(default=False)
cert_name_short = TextField()
cert_name_long = TextField()
certificate_available_date = DateTimeField(default=None, null=True)
# Grading
lowest_passing_grade = DecimalField(max_digits=5, decimal_places=2, null=True)
# Access parameters
days_early_for_beta = FloatField(null=True)
mobile_available = BooleanField(default=False)
visible_to_staff_only = BooleanField(default=False)
_pre_requisite_courses_json = TextField() # JSON representation of list of CourseKey strings
# Enrollment details
enrollment_start = DateTimeField(null=True)
enrollment_end = DateTimeField(null=True)
enrollment_domain = TextField(null=True)
invitation_only = BooleanField(default=False)
max_student_enrollments_allowed = IntegerField(null=True)
# Catalog information
catalog_visibility = TextField(null=True)
short_description = TextField(null=True)
course_video_url = TextField(null=True)
effort = TextField(null=True)
self_paced = BooleanField(default=False)
marketing_url = TextField(null=True)
eligible_for_financial_aid = BooleanField(default=True)
language = TextField(null=True)
@classmethod
def _create_or_update(cls, course):
"""
Creates or updates a CourseOverview object from a CourseDescriptor.
Does not touch the database, simply constructs and returns an overview
from the given course.
Arguments:
course (CourseDescriptor): any course descriptor object
Returns:
CourseOverview: created or updated overview extracted from the given course
"""
from lms.djangoapps.certificates.api import get_active_web_certificate
from openedx.core.lib.courses import course_image_url
# Workaround for a problem discovered in https://openedx.atlassian.net/browse/TNL-2806.
# If the course has a malformed grading policy such that
# course._grading_policy['GRADE_CUTOFFS'] = {}, then
# course.lowest_passing_grade will raise a ValueError.
# Work around this for now by defaulting to None.
try:
lowest_passing_grade = course.lowest_passing_grade
except ValueError:
lowest_passing_grade = None
display_name = course.display_name
start = course.start
end = course.end
max_student_enrollments_allowed = course.max_student_enrollments_allowed
if isinstance(course.id, CCXLocator):
from lms.djangoapps.ccx.utils import get_ccx_from_ccx_locator
ccx = get_ccx_from_ccx_locator(course.id)
display_name = ccx.display_name
start = ccx.start
end = ccx.due
max_student_enrollments_allowed = ccx.max_student_enrollments_allowed
course_overview = cls.objects.filter(id=course.id)
if course_overview.exists():
log.info('Updating course overview for %s.', unicode(course.id))
course_overview = course_overview.first()
else:
log.info('Creating course overview for %s.', unicode(course.id))
course_overview = cls()
course_overview.version = cls.VERSION
course_overview.id = course.id
course_overview._location = course.location
course_overview.org = course.location.org
course_overview.display_name = display_name
course_overview.display_number_with_default = course.display_number_with_default
course_overview.display_org_with_default = course.display_org_with_default
course_overview.start = start
course_overview.end = end
course_overview.advertised_start = course.advertised_start
course_overview.announcement = course.announcement
course_overview.course_image_url = course_image_url(course)
course_overview.social_sharing_url = course.social_sharing_url
course_overview.certificates_display_behavior = course.certificates_display_behavior
course_overview.certificates_show_before_end = course.certificates_show_before_end
course_overview.cert_html_view_enabled = course.cert_html_view_enabled
course_overview.has_any_active_web_certificate = (get_active_web_certificate(course) is not None)
course_overview.cert_name_short = course.cert_name_short
course_overview.cert_name_long = course.cert_name_long
course_overview.certificate_available_date = course.certificate_available_date
course_overview.lowest_passing_grade = lowest_passing_grade
course_overview.end_of_course_survey_url = course.end_of_course_survey_url
course_overview.days_early_for_beta = course.days_early_for_beta
course_overview.mobile_available = course.mobile_available
course_overview.visible_to_staff_only = course.visible_to_staff_only
course_overview._pre_requisite_courses_json = json.dumps(course.pre_requisite_courses)
course_overview.enrollment_start = course.enrollment_start
course_overview.enrollment_end = course.enrollment_end
course_overview.enrollment_domain = course.enrollment_domain
course_overview.invitation_only = course.invitation_only
course_overview.max_student_enrollments_allowed = max_student_enrollments_allowed
course_overview.catalog_visibility = course.catalog_visibility
course_overview.short_description = CourseDetails.fetch_about_attribute(course.id, 'short_description')
course_overview.effort = CourseDetails.fetch_about_attribute(course.id, 'effort')
course_overview.course_video_url = CourseDetails.fetch_video_url(course.id)
course_overview.self_paced = course.self_paced
if not CatalogIntegration.is_enabled():
course_overview.language = course.language
return course_overview
@classmethod
def load_from_module_store(cls, course_id):
"""
Load a CourseDescriptor, create or update a CourseOverview from it, cache the
overview, and return it.
Arguments:
course_id (CourseKey): the ID of the course overview to be loaded.
Returns:
CourseOverview: overview of the requested course.
Raises:
- CourseOverview.DoesNotExist if the course specified by course_id
was not found.
- IOError if some other error occurs while trying to load the
course from the module store.
"""
store = modulestore()
with store.bulk_operations(course_id):
course = store.get_course(course_id)
if isinstance(course, CourseDescriptor):
course_overview = cls._create_or_update(course)
try:
with transaction.atomic():
course_overview.save()
# Remove and recreate all the course tabs
CourseOverviewTab.objects.filter(course_overview=course_overview).delete()
CourseOverviewTab.objects.bulk_create([
CourseOverviewTab(tab_id=tab.tab_id, course_overview=course_overview)
for tab in course.tabs
])
# Remove and recreate course images
CourseOverviewImageSet.objects.filter(course_overview=course_overview).delete()
CourseOverviewImageSet.create(course_overview, course)
except IntegrityError:
# There is a rare race condition that will occur if
# CourseOverview.get_from_id is called while a
# another identical overview is already in the process
# of being created.
# One of the overviews will be saved normally, while the
# other one will cause an IntegrityError because it tries
# to save a duplicate.
# (see: https://openedx.atlassian.net/browse/TNL-2854).
pass
except Exception: # pylint: disable=broad-except
log.exception(
"CourseOverview for course %s failed!",
course_id,
)
raise
return course_overview
elif course is not None:
raise IOError(
"Error while loading course {} from the module store: {}",
unicode(course_id),
course.error_msg if isinstance(course, ErrorDescriptor) else unicode(course)
)
else:
raise cls.DoesNotExist()
@classmethod
def get_from_id(cls, course_id):
"""
Load a CourseOverview object for a given course ID.
First, we try to load the CourseOverview from the database. If it
doesn't exist, we load the entire course from the modulestore, create a
CourseOverview object from it, and then cache it in the database for
future use.
Arguments:
course_id (CourseKey): the ID of the course overview to be loaded.
Returns:
CourseOverview: overview of the requested course.
Raises:
- CourseOverview.DoesNotExist if the course specified by course_id
was not found.
- IOError if some other error occurs while trying to load the
course from the module store.
"""
try:
course_overview = cls.objects.select_related('image_set').get(id=course_id)
if course_overview.version < cls.VERSION:
# Throw away old versions of CourseOverview, as they might contain stale data.
course_overview.delete()
course_overview = None
except cls.DoesNotExist:
course_overview = None
# Regenerate the thumbnail images if they're missing (either because
# they were never generated, or because they were flushed out after
# a change to CourseOverviewImageConfig.
if course_overview and not hasattr(course_overview, 'image_set'):
CourseOverviewImageSet.create(course_overview)
return course_overview or cls.load_from_module_store(course_id)
@classmethod
def get_from_ids_if_exists(cls, course_ids):
"""
Return a dict mapping course_ids to CourseOverviews, if they exist.
This method will *not* generate new CourseOverviews or delete outdated
ones. It exists only as a small optimization used when CourseOverviews
are known to exist, for common situations like the student dashboard.
Callers should assume that this list is incomplete and fall back to
get_from_id if they need to guarantee CourseOverview generation.
"""
return {
overview.id: overview
for overview
in cls.objects.select_related('image_set').filter(
id__in=course_ids,
version__gte=cls.VERSION
)
}
def clean_id(self, padding_char='='):
"""
Returns a unique deterministic base32-encoded ID for the course.
Arguments:
padding_char (str): Character used for padding at end of base-32
-encoded string, defaulting to '='
"""
return course_metadata_utils.clean_course_key(self.location.course_key, padding_char)
@property
def location(self):
"""
Returns the UsageKey of this course.
UsageKeyField has a strange behavior where it fails to parse the "run"
of a course out of the serialized form of a Mongo Draft UsageKey. This
method is a wrapper around _location attribute that fixes the problem
by calling map_into_course, which restores the run attribute.
"""
if self._location.run is None:
self._location = self._location.map_into_course(self.id)
return self._location
@property
def number(self):
"""
Returns this course's number.
This is a "number" in the sense of the "course numbers" that you see at
lots of universities. For example, given a course
"Intro to Computer Science" with the course key "edX/CS-101/2014", the
course number would be "CS-101"
"""
return course_metadata_utils.number_for_course_location(self.location)
@property
def url_name(self):
"""
Returns this course's URL name.
"""
return block_metadata_utils.url_name_for_block(self)
@property
def display_name_with_default(self):
"""
Return reasonable display name for the course.
"""
return block_metadata_utils.display_name_with_default(self)
@property
def display_name_with_default_escaped(self):
"""
DEPRECATED: use display_name_with_default
Return html escaped reasonable display name for the course.
Note: This newly introduced method should not be used. It was only
introduced to enable a quick search/replace and the ability to slowly
migrate and test switching to display_name_with_default, which is no
longer escaped.
"""
return block_metadata_utils.display_name_with_default_escaped(self)
@property
def dashboard_start_display(self):
"""
Return start date to diplay on learner's dashboard, preferably `Course Advertised Start`
"""
return self.advertised_start or self.start
def has_started(self):
"""
Returns whether the the course has started.
"""
return course_metadata_utils.has_course_started(self.start)
def has_ended(self):
"""
Returns whether the course has ended.
"""
return course_metadata_utils.has_course_ended(self.end)
def has_marketing_url(self):
"""
Returns whether the course has marketing url.
"""
return settings.FEATURES.get('ENABLE_MKTG_SITE') and bool(self.marketing_url)
def has_social_sharing_url(self):
"""
Returns whether the course has social sharing url.
"""
is_social_sharing_enabled = getattr(settings, 'SOCIAL_SHARING_SETTINGS', {}).get('CUSTOM_COURSE_URLS')
return is_social_sharing_enabled and bool(self.social_sharing_url)
def starts_within(self, days):
"""
Returns True if the course starts with-in given number of days otherwise returns False.
"""
return course_metadata_utils.course_starts_within(self.start, days)
@property
def start_date_is_still_default(self):
"""
Checks if the start date set for the course is still default, i.e.
.start has not been modified, and .advertised_start has not been set.
"""
return course_metadata_utils.course_start_date_is_default(
self.start,
self.advertised_start,
)
@property
def sorting_score(self):
"""
Returns a tuple that can be used to sort the courses according
the how "new" they are. The "newness" score is computed using a
heuristic that takes into account the announcement and
(advertised) start dates of the course if available.
The lower the number the "newer" the course.
"""
return course_metadata_utils.sorting_score(self.start, self.advertised_start, self.announcement)
@property
def start_type(self):
"""
Returns the type of the course's 'start' field.
"""
if self.advertised_start:
return u'string'
elif self.start != DEFAULT_START_DATE:
return u'timestamp'
else:
return u'empty'
@property
def start_display(self):
"""
Returns the display value for the course's start date.
"""
if self.advertised_start:
return self.advertised_start
elif self.start != DEFAULT_START_DATE:
return defaultfilters.date(self.start, "DATE_FORMAT")
else:
return None
def may_certify(self):
"""
Returns whether it is acceptable to show the student a certificate
download link.
"""
return course_metadata_utils.may_certify_for_course(
self.certificates_display_behavior,
self.certificates_show_before_end,
self.has_ended(),
self.certificate_available_date,
self.self_paced
)
@property
def pre_requisite_courses(self):
"""
Returns a list of ID strings for this course's prerequisite courses.
"""
return json.loads(self._pre_requisite_courses_json)
@pre_requisite_courses.setter
def pre_requisite_courses(self, value):
"""
Django requires there be a setter for this, but it is not
necessary for the way we currently use it. Due to the way
CourseOverviews are constructed raising errors here will
cause a lot of issues. These should not be mutable after
construction, so for now we just eat this.
"""
pass
@classmethod
def update_select_courses(cls, course_keys, force_update=False):
"""
A side-effecting method that updates CourseOverview objects for
the given course_keys.
Arguments:
course_keys (list[CourseKey]): Identifies for which courses to
return CourseOverview objects.
force_update (boolean): Optional parameter that indicates
whether the requested CourseOverview objects should be
forcefully updated (i.e., re-synched with the modulestore).
"""
log.info('Generating course overview for %d courses.', len(course_keys))
log.debug('Generating course overview(s) for the following courses: %s', course_keys)
action = CourseOverview.load_from_module_store if force_update else CourseOverview.get_from_id
for course_key in course_keys:
try:
action(course_key)
except Exception as ex: # pylint: disable=broad-except
log.exception(
'An error occurred while generating course overview for %s: %s',
unicode(course_key),
ex.message,
)
log.info('Finished generating course overviews.')
@classmethod
def get_all_courses(cls, orgs=None, filter_=None):
"""
Returns all CourseOverview objects in the database.
Arguments:
orgs (list[string]): Optional parameter that allows case-insensitive
filtering by organization.
filter_ (dict): Optional parameter that allows custom filtering.
"""
# Note: If a newly created course is not returned in this QueryList,
# make sure the "publish" signal was emitted when the course was
# created. For tests using CourseFactory, use emit_signals=True.
course_overviews = CourseOverview.objects.all()
if orgs:
# In rare cases, courses belonging to the same org may be accidentally assigned
# an org code with a different casing (e.g., Harvardx as opposed to HarvardX).
# Case-insensitive matching allows us to deal with this kind of dirty data.
course_overviews = course_overviews.filter(org__iregex=r'(' + '|'.join(orgs) + ')')
if filter_:
course_overviews = course_overviews.filter(**filter_)
return course_overviews
@classmethod
def get_all_course_keys(cls):
"""
Returns all course keys from course overviews.
"""
return CourseOverview.objects.values_list('id', flat=True)
def is_discussion_tab_enabled(self):
"""
Returns True if course has discussion tab and is enabled
"""
tabs = self.tabs.all()
# creates circular import; hence explicitly referenced is_discussion_enabled
for tab in tabs:
if tab.tab_id == "discussion" and django_comment_client.utils.is_discussion_enabled(self.id):
return True
return False
@property
def image_urls(self):
"""
Return a dict with all known URLs for this course image.
Current resolutions are:
raw = original upload from the user
small = thumbnail with dimensions CourseOverviewImageConfig.current().small
large = thumbnail with dimensions CourseOverviewImageConfig.current().large
If no thumbnails exist, the raw (originally uploaded) image will be
returned for all resolutions.
"""
# This is either the raw image that the course team uploaded, or the
# settings.DEFAULT_COURSE_ABOUT_IMAGE_URL if they didn't specify one.
raw_image_url = self.course_image_url
# Default all sizes to return the raw image if there is no
# CourseOverviewImageSet associated with this CourseOverview. This can
# happen because we're disabled via CourseOverviewImageConfig.
urls = {
'raw': raw_image_url,
'small': raw_image_url,
'large': raw_image_url,
}
# If we do have a CourseOverviewImageSet, we still default to the raw
# images if our thumbnails are blank (might indicate that there was a
# processing error of some sort while trying to generate thumbnails).
if hasattr(self, 'image_set') and CourseOverviewImageConfig.current().enabled:
urls['small'] = self.image_set.small_url or raw_image_url
urls['large'] = self.image_set.large_url or raw_image_url
return self.apply_cdn_to_urls(urls)
@property
def pacing(self):
""" Returns the pacing for the course.
Potential values:
self: Self-paced courses
instructor: Instructor-led courses
"""
return 'self' if self.self_paced else 'instructor'
@property
def closest_released_language(self):
"""
Returns the language code that most closely matches this course' language and is fully
supported by the LMS, or None if there are no fully supported languages that
match the target.
"""
return get_closest_released_language(self.language) if self.language else None
def apply_cdn_to_urls(self, image_urls):
"""
Given a dict of resolutions -> urls, return a copy with CDN applied.
If CDN does not exist or is disabled, just returns the original. The
URLs that we store in CourseOverviewImageSet are all already top level
paths, so we don't need to go through the /static remapping magic that
happens with other course assets. We just need to add the CDN server if
appropriate.
"""
cdn_config = AssetBaseUrlConfig.current()
if not cdn_config.enabled:
return image_urls
base_url = cdn_config.base_url
return {
resolution: self._apply_cdn_to_url(url, base_url)
for resolution, url in image_urls.items()
}
def _apply_cdn_to_url(self, url, base_url):
"""
Applies a new CDN/base URL to the given URL.
If a URL is absolute, we skip switching the host since it could
be a hostname that isn't behind our CDN, and we could unintentionally
break the URL overall.
"""
# The URL can't be empty.
if not url:
return url
_, netloc, path, params, query, fragment = urlparse(url)
# If this is an absolute URL, just return it as is. It could be a domain
# that isn't ours, and thus CDNing it would actually break it.
if netloc:
return url
return urlunparse((None, base_url, path, params, query, fragment))
def __unicode__(self):
"""Represent ourselves with the course key."""
return unicode(self.id)
class CourseOverviewTab(models.Model):
"""
Model for storing and caching tabs information of a course.
"""
tab_id = models.CharField(max_length=50)
course_overview = models.ForeignKey(CourseOverview, db_index=True, related_name="tabs")
class CourseOverviewImageSet(TimeStampedModel):
"""
Model for Course overview images. Each column is an image type/size.
You should basically never use this class directly. Read from
CourseOverview.image_urls instead.
Special Notes on Deployment/Rollback/Changes:
1. By default, this functionality is disabled. To turn it on, you have to
create a CourseOverviewImageConfig entry via Django Admin and select
enabled=True.
2. If it is enabled in configuration, it will lazily create thumbnails as
individual CourseOverviews are requested. This is independent of the
CourseOverview's cls.VERSION scheme. This is to better support the use
case where someone might want to change the thumbnail resolutions for
their theme -- we didn't want to tie the code-based data schema of
CourseOverview to configuration changes.
3. A CourseOverviewImageSet is automatically deleted when the CourseOverview
it belongs to is deleted. So it will be regenerated whenever there's a
new publish or the CourseOverview schema version changes. It's not
particularly smart about this, and will just re-write the same thumbnails
over and over to the same location without checking to see if there were
changes.
4. Just because a CourseOverviewImageSet is successfully created does not
mean that any thumbnails exist. There might have been a processing error,
or there might simply be no source image to create a thumbnail out of.
In this case, accessing CourseOverview.image_urls will return the value
for course.course_image_url for all resolutions. CourseOverviewImageSet
will *not* try to regenerate if there is a model entry with blank values
for the URLs -- the assumption is that either there's no data there or
something has gone wrong and needs fixing in code.
5. If you want to change thumbnail resolutions, you need to create a new
CourseOverviewImageConfig with the desired dimensions and then wipe the
values in CourseOverviewImageSet.
Logical next steps that I punted on for this first cut:
1. Converting other parts of the app to use this.
Our first cut only affects About Pages and the Student Dashboard. But
most places that use course_image_url() should be converted -- e.g.
course discovery, mobile, etc.
2. Center cropping the image before scaling.
This is desirable, but it involves a few edge cases (what the rounding
policy is, what to do with undersized images, etc.) The behavior that
we implemented is at least no worse than what was already there in terms
of distorting images.
3. Automatically invalidating entries based on CourseOverviewImageConfig.
There are two basic paths I can think of for this. The first is to
completely wipe this table when the config changes. The second is to
actually tie the config as a foreign key from this model -- so you could
do the comparison to see if the image_set's config_id matched
CourseOverviewImageConfig.current() and invalidate it if they didn't
match. I punted on this mostly because it's just not something that
happens much at all in practice, there is an understood (if manual)
process to do it, and it can happen in a follow-on PR if anyone is
interested in extending this functionality.
"""
course_overview = models.OneToOneField(CourseOverview, db_index=True, related_name="image_set")
small_url = models.TextField(blank=True, default="")
large_url = models.TextField(blank=True, default="")
@classmethod
def create(cls, course_overview, course=None):
"""
Create thumbnail images for this CourseOverview.
This will save the CourseOverviewImageSet before it returns.
"""
from openedx.core.lib.courses import create_course_image_thumbnail
# If image thumbnails are not enabled, do nothing.
config = CourseOverviewImageConfig.current()
if not config.enabled:
return
# If a course object was provided, use that. Otherwise, pull it from
# CourseOverview's course_id. This happens because sometimes we are
# generated as part of the CourseOverview creation (course is available
# and passed in), and sometimes the CourseOverview already exists.
if not course:
course = modulestore().get_course(course_overview.id)
image_set = cls(course_overview=course_overview)
if course.course_image:
# Try to create a thumbnails of the course image. If this fails for any
# reason (weird format, non-standard URL, etc.), the URLs will default
# to being blank. No matter what happens, we don't want to bubble up
# a 500 -- an image_set is always optional.
try:
image_set.small_url = create_course_image_thumbnail(course, config.small)
image_set.large_url = create_course_image_thumbnail(course, config.large)
except Exception: # pylint: disable=broad-except
log.exception(
"Could not create thumbnail for course %s with image %s (small=%s), (large=%s)",
course.id,
course.course_image,
config.small,
config.large
)
# Regardless of whether we created thumbnails or not, we need to save
# this record before returning. If no thumbnails were created (there was
# an error or the course has no source course_image), our url fields
# just keep their blank defaults.
try:
with transaction.atomic():
image_set.save()
course_overview.image_set = image_set
except (IntegrityError, ValueError):
# In the event of a race condition that tries to save two image sets
# to the same CourseOverview, we'll just silently pass on the one
# that fails. They should be the same data anyway.
#
# The ValueError above is to catch the following error that can
# happen in Django 1.8.4+ if the CourseOverview object fails to save
# (again, due to race condition).
#
# Example: ValueError: save() prohibited to prevent data loss due
# to unsaved related object 'course_overview'.")
pass
def __unicode__(self):
return u"CourseOverviewImageSet({}, small_url={}, large_url={})".format(
self.course_overview_id, self.small_url, self.large_url
)
class CourseOverviewImageConfig(ConfigurationModel):
"""
This sets the size of the thumbnail images that Course Overviews will generate
to display on the about, info, and student dashboard pages. If you make any
changes to this, you will have to regenerate CourseOverviews in order for it
to take effect. You might want to do this if you're doing precise theming of
your install of edx-platform... but really, you probably don't want to do this
at all at the moment, given how new this is. :-P
"""
# Small thumbnail, for things like the student dashboard
small_width = models.IntegerField(default=375)
small_height = models.IntegerField(default=200)
# Large thumbnail, for things like the about page
large_width = models.IntegerField(default=750)
large_height = models.IntegerField(default=400)
@property
def small(self):
"""Tuple for small image dimensions in pixels -- (width, height)"""
return (self.small_width, self.small_height)
@property
def large(self):
"""Tuple for large image dimensions in pixels -- (width, height)"""
return (self.large_width, self.large_height)
def __unicode__(self):
return u"CourseOverviewImageConfig(enabled={}, small={}, large={})".format(
self.enabled, self.small, self.large
)
| agpl-3.0 | 2,396,641,003,756,264,000 | 40.211874 | 114 | 0.651225 | false |
Oire/TWBlue | src/twitter/twitter.py | 1 | 3261 | # -*- coding: utf-8 -*-
import BaseHTTPServer
import webbrowser
from urlparse import urlparse, parse_qs
from twython import Twython, TwythonError
import config
import application
import output
import sound
import time
logged = False
verifier = None
class handler(BaseHTTPServer.BaseHTTPRequestHandler):
def do_GET(self):
global logged
self.send_response(200)
self.send_header("Content-type", "text/html")
self.end_headers()
logged = True
params = parse_qs(urlparse(self.path).query)
global verifier
verifier = params.get('oauth_verifier', [None])[0]
self.wfile.write("You have successfully logged in to Twitter with TW Blue. "
"You can close this window now.")
self.wfile.close()
class twitter(object):
def login(self, user_key=None, user_secret=None):
if user_key != None and user_secret != None:
self.twitter = Twython(application.app_key, application.app_secret, user_key, user_secret)
elif config.main != None:
self.twitter = Twython(application.app_key, application.app_secret, config.main["twitter"]["user_key"], config.main["twitter"]["user_secret"])
else:
self.twitter = Twython(application.app_key, application.app_secret, self.final_step['oauth_token'], self.final_step['oauth_token_secret'])
self.credentials = self.twitter.verify_credentials()
def authorise(self):
httpd = BaseHTTPServer.HTTPServer(('127.0.0.1', 8080), handler)
twitter = Twython(application.app_key, application.app_secret, auth_endpoint='authorize')
auth = twitter.get_authentication_tokens("http://127.0.0.1:8080")
webbrowser.open_new_tab(auth['auth_url'])
global logged, verifier
while logged == False:
httpd.handle_request()
self.twitter = Twython(application.app_key, application.app_secret, auth['oauth_token'], auth['oauth_token_secret'])
final = self.twitter.get_authorized_tokens(verifier)
self.save_configuration(final["oauth_token"], final["oauth_token_secret"])
httpd.server_close()
def save_configuration(self, user_key=None, user_secret=None):
if user_key != None and user_secret != None:
config.main["twitter"]["user_key"] = user_key
config.main["twitter"]["user_secret"] = user_secret
else:
config.main['twitter']['user_key'] = self.final_step['oauth_token']
config.main['twitter']['user_secret'] = self.final_step['oauth_token_secret']
config.main.write()
def api_call(self, call_name, action="", _sound=None, report_success=False, report_failure=True, preexec_message="", *args, **kwargs):
finished = False
tries = 0
if preexec_message:
output.speak(preexec_message, True)
while finished==False and tries < 25:
try:
val = getattr(self.twitter, call_name)(*args, **kwargs)
finished = True
except TwythonError as e:
# if hasattr(e, 'reason') and e.reason.startswith("Failed to send request"):
output.speak(e.message)
if report_failure and hasattr(e, 'message'):
output.speak(_("%s failed. Reason: %s") % (action, e.message))
finished = True
except:
tries = tries + 1
time.sleep(5)
# raise e
if report_success:
output.speak(_("%s succeeded.") % action)
if _sound != None: sound.player.play(_sound)
# return val | gpl-2.0 | 5,429,948,604,041,343,000 | 36.930233 | 145 | 0.687213 | false |
PokeAPI/pokeapi | pokemon_v2/migrations/0009_pokemontypepast.py | 1 | 1924 | # Generated by Django 2.1.11 on 2021-02-06 22:03
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
("pokemon_v2", "0008_auto_20201123_2045"),
]
operations = [
migrations.CreateModel(
name="PokemonTypePast",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
("slot", models.IntegerField()),
(
"generation",
models.ForeignKey(
blank=True,
null=True,
on_delete=django.db.models.deletion.CASCADE,
related_name="pokemontypepast",
to="pokemon_v2.Generation",
),
),
(
"pokemon",
models.ForeignKey(
blank=True,
null=True,
on_delete=django.db.models.deletion.CASCADE,
related_name="pokemontypepast",
to="pokemon_v2.Pokemon",
),
),
(
"type",
models.ForeignKey(
blank=True,
null=True,
on_delete=django.db.models.deletion.CASCADE,
related_name="pokemontypepast",
to="pokemon_v2.Type",
),
),
],
options={
"abstract": False,
},
),
]
| bsd-3-clause | 8,417,753,935,987,722,000 | 30.032258 | 68 | 0.366424 | false |
oferb/OpenTrains | webserver/opentrain/algorithm/utils.py | 1 | 1361 | import os
import gtfs.models
import analysis.models
import numpy as np
from scipy import spatial
import shelve
try:
import matplotlib.pyplot as plt
except ImportError:
pass
import simplekml
import config
import itertools
import datetime
def enum(**enums):
return type('Enum', (), enums)
def get_XY_pos(relativeNullPoint, p):
""" Calculates X and Y distances in meters.
"""
deltaLatitude = p.latitude - relativeNullPoint.latitude
deltaLongitude = p.longitude - relativeNullPoint.longitude
latitudeCircumference = 40075160 * cos(relativeNullPoint.latitude * pi / 180)
resultX = deltaLongitude * latitudeCircumference / 360
resultY = deltaLatitude * 40008000 / 360
return resultX, resultY
def query_coords(point_tree, query_coords, query_accuracies):
if isinstance( query_accuracies, ( int, long, float ) ):
res = point_tree.query_ball_point(query_coords, query_accuracies)
else:
res = [point_tree.query_ball_point(query_coords[i], query_accuracies[i]) for i in xrange(len(query_accuracies))]
return res
def enum(*sequential, **named):
enums = dict(zip(sequential, range(len(sequential))), **named)
return type('Enum', (), enums)
def is_strictly_increasing(L):
return all(x<y for x, y in zip(L, L[1:]))
def is_increasing(L):
return all(x<=y for x, y in zip(L, L[1:])) | bsd-3-clause | 4,164,793,536,097,898,500 | 29.266667 | 120 | 0.703894 | false |
robotpy/pyfrc | pyfrc/physics/motor_cfgs.py | 1 | 3366 | from collections import namedtuple
MotorModelConfig = namedtuple(
"MotorModelConfig",
[
"name",
"nominalVoltage",
"freeSpeed",
"freeCurrent",
"stallTorque",
"stallCurrent",
],
)
MotorModelConfig.__doc__ = """
Configuration parameters useful for simulating a motor. Typically these
parameters can be obtained from the manufacturer via a data sheet or other
specification.
RobotPy contains MotorModelConfig objects for many motors that are commonly
used in FRC. If you find that we're missing a motor you care about, please
file a bug report and let us know!
.. note:: The motor configurations that come with pyfrc are defined using the
pint units library. See :ref:`units`
"""
MotorModelConfig.name.__doc__ = "Descriptive name of motor"
MotorModelConfig.nominalVoltage.__doc__ = "Nominal voltage for the motor"
MotorModelConfig.freeSpeed.__doc__ = "No-load motor speed (``1 / [time]``)"
MotorModelConfig.freeCurrent.__doc__ = "No-load motor current"
MotorModelConfig.stallTorque.__doc__ = (
"Stall torque (``[length]**2 * [mass] / [time]**2``)"
)
MotorModelConfig.stallCurrent.__doc__ = "Stall current"
from .units import units
NOMINAL_VOLTAGE = 12 * units.volts
#: Motor configuration for CIM
MOTOR_CFG_CIM = MotorModelConfig(
"CIM",
NOMINAL_VOLTAGE,
5310 * units.cpm,
2.7 * units.amps,
2.42 * units.N_m,
133 * units.amps,
)
#: Motor configuration for Mini CIM
MOTOR_CFG_MINI_CIM = MotorModelConfig(
"MiniCIM",
NOMINAL_VOLTAGE,
5840 * units.cpm,
3.0 * units.amps,
1.41 * units.N_m,
89.0 * units.amps,
)
#: Motor configuration for Bag Motor
MOTOR_CFG_BAG = MotorModelConfig(
"Bag",
NOMINAL_VOLTAGE,
13180 * units.cpm,
1.8 * units.amps,
0.43 * units.N_m,
53.0 * units.amps,
)
#: Motor configuration for 775 Pro
MOTOR_CFG_775PRO = MotorModelConfig(
"775Pro",
NOMINAL_VOLTAGE,
18730 * units.cpm,
0.7 * units.amps,
0.71 * units.N_m,
134 * units.amps,
)
#: Motor configuration for Andymark RS 775-125
MOTOR_CFG_775_125 = MotorModelConfig(
"RS775-125",
NOMINAL_VOLTAGE,
5800 * units.cpm,
1.6 * units.amps,
0.28 * units.N_m,
18.0 * units.amps,
)
#: Motor configuration for Banebots RS 775
MOTOR_CFG_BB_RS775 = MotorModelConfig(
"RS775",
NOMINAL_VOLTAGE,
13050 * units.cpm,
2.7 * units.amps,
0.72 * units.N_m,
97.0 * units.amps,
)
#: Motor configuration for Andymark 9015
MOTOR_CFG_AM_9015 = MotorModelConfig(
"AM-9015",
NOMINAL_VOLTAGE,
14270 * units.cpm,
3.7 * units.amps,
0.36 * units.N_m,
71.0 * units.amps,
)
#: Motor configuration for Banebots RS 550
MOTOR_CFG_BB_RS550 = MotorModelConfig(
"RS550",
NOMINAL_VOLTAGE,
19000 * units.cpm,
0.4 * units.amps,
0.38 * units.N_m,
84.0 * units.amps,
)
#: Motor configuration for NEO 550 Brushless Motor
MOTOR_CFG_NEO_550 = MotorModelConfig(
"NEO 550",
NOMINAL_VOLTAGE,
11000 * units.cpm,
1.4 * units.amps,
0.97 * units.N_m,
100 * units.amps,
)
#: Motor configuration for Falcon 500 Brushless Motor
MOTOR_CFG_FALCON_500 = MotorModelConfig(
"Falcon 500",
NOMINAL_VOLTAGE,
6380 * units.cpm,
1.5 * units.amps,
4.69 * units.N_m,
257 * units.amps,
)
del units
| mit | 3,968,190,741,376,040,400 | 23.042857 | 81 | 0.643494 | false |
shrinidhi666/rbhus | etc/startupScripts/blend/beforeFile.py | 1 | 1070 | import bpy
import addon_utils
import sys
bpy.context.user_preferences.filepaths.use_relative_paths= False
if(str(bpy.app.version).find("(2, 78") < 0 ):
bpy.context.user_preferences.filepaths.use_load_ui = False
bpy.context.user_preferences.filepaths.save_version=0
bpy.context.user_preferences.system.use_scripts_auto_execute = True
try:
addon_utils.disable("ui_layer_manager")
except:
print(sys.exc_info())
try:
addon_utils.enable("ui_layer_manager")
except:
print(sys.exc_info())
if(str(bpy.app.version).find("(2, 78") < 0 ):
bpy.context.user_preferences.filepaths.use_load_ui = False
try:
addon_utils.disable("bone_selection_groups")
except:
print(sys.exc_info())
try:
addon_utils.enable("bone_selection_groups")
except:
print(sys.exc_info())
else:
try:
addon_utils.disable("bone_selection_sets")
except:
print(sys.exc_info())
try:
addon_utils.enable("bone_selection_sets")
except:
print(sys.exc_info())
# addon_utils.disable("camera_add_title_safe")
# addon_utils.enable("camera_add_title_safe")
| gpl-3.0 | 4,371,447,006,588,869,000 | 21.765957 | 67 | 0.705607 | false |
MERegistro/meregistro | meregistro/apps/titulos/models/NormativaNacional.py | 1 | 1738 | # -*- coding: utf-8 -*-
from django.db import models
from apps.titulos.models.EstadoNormativaNacional import EstadoNormativaNacional
import datetime
class NormativaNacional(models.Model):
numero = models.CharField(max_length=50, unique=True)
descripcion = models.CharField(max_length=255)
observaciones = models.CharField(max_length=255, null=True, blank=True)
estado = models.ForeignKey(EstadoNormativaNacional) # Concuerda con el último estado en NormativaNacionalEstado
fecha_alta = models.DateField(auto_now_add=True)
class Meta:
app_label = 'titulos'
db_table = 'titulos_normativa_nacional'
def __unicode__(self):
return str(self.numero)
"Sobreescribo el init para agregarle propiedades"
def __init__(self, *args, **kwargs):
super(NormativaNacional, self).__init__(*args, **kwargs)
self.estados = self.get_estados()
def registrar_estado(self):
import datetime
from apps.titulos.models.NormativaNacionalEstado import NormativaNacionalEstado
registro = NormativaNacionalEstado(estado=self.estado)
registro.fecha = datetime.date.today()
registro.normativa_nacional_id = self.id
registro.save()
def get_estados(self):
from apps.titulos.models.NormativaNacionalEstado import NormativaNacionalEstado
try:
estados = NormativaNacionalEstado.objects.filter(normativa_nacional=self).order_by('fecha', 'id')
except:
estados = {}
return estados
"Algún título nacional está asociado a la normativa?"
def asociada_titulo_nacional(self):
from apps.titulos.models.TituloNacional import TituloNacional
return TituloNacional.objects.filter(normativa_nacional__id=self.id).exists()
"Eliminable?"
def is_deletable(self):
ret = self.asociada_titulo_nacional() == False
return ret
| bsd-3-clause | 6,765,823,261,245,573,000 | 33 | 112 | 0.767589 | false |
mahajrod/MACE | scripts/old/FS_distribution.py | 1 | 1645 | #!/usr/bin/env python
__author__ = 'Sergei F. Kliver'
import os, sys
import argparse
import numpy as np
from MACE.General.File import split_filename, make_list_of_path_to_files
from MACE.Parsers.VCF import CollectionVCF
def vcf_filter(filename):
return True if filename[-4:] == ".vcf" else False
def is_homozygous(record):
return record.is_homozygous()
parser = argparse.ArgumentParser()
parser.add_argument("-o", "--output", action="store", dest="output", default="FS_distributions",
help="Directory to write output files")
parser.add_argument("-i", "--input_vcf", action="store", dest="input_vcf", type=lambda s: s.split(","),
help="Comma-separated list of vcf files or directories containing them", required=True)
parser.add_argument("-e", "--extension_list", action="store", dest="extension_list", type=lambda s: s.split(","),
default=[".png"],
help="Comma-separated list of extensions of figures. Default: .png")
args = parser.parse_args()
files_list = sorted(make_list_of_path_to_files(args.input_vcf, vcf_filter))
try:
os.mkdir(args.output)
except OSError:
pass
bins = np.arange(0, 66, 5)
for filename in files_list:
if args.output != "stdout":
print("Drawing distribution of FS in %s ..." % filename)
directory, prefix, extension = split_filename(filename)
variants = CollectionVCF(from_file=True, in_file=filename)
variants.draw_info_distribution("FS", is_homozygous, outfile_prefix="%s/%s" % (args.output, prefix),
extension_list=args.extension_list, bins=bins)
| apache-2.0 | 6,022,405,341,987,015,000 | 32.571429 | 113 | 0.657143 | false |
PeachyPrinter/peachyinstaller | windows/src/application.py | 1 | 3502 | import json
class Application(object):
def __init__(self,
id,
name,
available_version=None,
download_location=None,
relitive_install_path=None,
executable_path=None,
installed_path=None,
icon=None,
current_version=None,
shortcut_path=None):
self.id = id
self.name = name
self.available_version = available_version
self.download_location = download_location
self.relitive_install_path = relitive_install_path
self.executable_path = executable_path
self.installed_path = installed_path
self.icon = icon
self.current_version = current_version
self.shortcut_path = shortcut_path
@classmethod
def from_configs(cls, web_config, installed_config=None):
if installed_config and installed_config['id'] != web_config['id']:
raise Exception("Unexpected error processing config")
id = web_config['id']
name = web_config['name']['en-us']
available_version = web_config['available_version']
download_location = web_config['location']
relitive_install_path = web_config['install_path']
icon = web_config['icon']
executable_path = web_config['executable']
if installed_config:
installed_path = installed_config['installed_path']
current_version = installed_config['current_version']
shortcut_path = installed_config['shortcut_path']
else:
installed_path = None
current_version = None
shortcut_path = None
return cls(id, name, available_version, download_location, relitive_install_path, executable_path, installed_path, icon, current_version, shortcut_path)
def get_json(self):
this = {
"id": self.id,
"name": {
"en-us": self.name,
},
"available_version": self.available_version,
"download_location": self.download_location,
"relitive_install_path": self.relitive_install_path,
"executable_path": self.executable_path,
"installed_path": self.installed_path,
"icon": self.icon,
"current_version": self.current_version,
"shortcut_path": self.shortcut_path,
}
for (key, value) in this.items():
if value is None:
del this[key]
return json.dumps(this)
@property
def actions(self):
if self.current_version is not None:
if self.current_version == self.available_version:
return ['remove']
else:
return ['remove', 'upgrade']
else:
return ['install']
def __eq__(self, other):
return (
self.id == other.id and
self.name == other.name and
self.available_version == other.available_version and
self.download_location == other.download_location and
self.relitive_install_path == other.relitive_install_path and
self.executable_path == other.executable_path and
self.installed_path == other.installed_path and
self.icon == other.icon and
self.current_version == other.current_version and
self.shortcut_path == other.shortcut_path
)
| apache-2.0 | -4,559,871,324,107,743,000 | 37.911111 | 160 | 0.566248 | false |
foursquare/pants | tests/python/pants_test/backend/codegen/wire/java/test_java_wire_library.py | 1 | 2124 | # coding=utf-8
# Copyright 2014 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import absolute_import, division, print_function, unicode_literals
from pants.backend.codegen.wire.java.java_wire_library import JavaWireLibrary
from pants.base.exceptions import TargetDefinitionException
from pants_test.test_base import TestBase
class JavaWireLibraryTest(TestBase):
def setUp(self):
super(JavaWireLibraryTest, self).setUp()
def test_fields(self):
target = self.make_target('//:foo', JavaWireLibrary,
registry_class='com.squareup.Registry',
roots=['foo', 'bar'],
enum_options=['one', 'two', 'three'],
service_writer='com.squareup.wire.RetrofitServiceWriter')
self.assertEqual('com.squareup.Registry', target.payload.get_field_value('registry_class'))
self.assertEqual(['foo', 'bar'], target.payload.get_field_value('roots'))
self.assertEqual(['one', 'two', 'three'], target.payload.get_field_value('enum_options'))
self.assertFalse(target.payload.get_field_value('no_options'))
self.assertEqual('com.squareup.wire.RetrofitServiceWriter',
target.payload.get_field_value('service_writer'))
self.assertEqual([], target.payload.get_field_value('service_writer_options'))
def test_wire_service_options(self):
target = self.make_target('//:wire_service_options', JavaWireLibrary,
service_writer='com.squareup.wire.RetrofitServiceWriter',
service_writer_options=['foo', 'bar', 'baz'])
self.assertEquals(['foo', 'bar', 'baz'], target.payload.service_writer_options)
def test_invalid_service_writer_opts(self):
with self.assertRaisesRegexp(TargetDefinitionException,
r'service_writer_options requires setting service_writer'):
self.make_target('invalid:service_writer_opts', JavaWireLibrary,
service_writer_options=['one', 'two'])
| apache-2.0 | 8,379,027,613,503,642,000 | 50.804878 | 95 | 0.659605 | false |
xbmcmegapack/plugin.video.megapack.dev | resources/lib/menus/home_tvshows_fashion.py | 1 | 1092 | #!/usr/bin/python
# -*- coding: utf-8 -*-
"""
This file is part of XBMC Mega Pack Addon.
Copyright (C) 2014 Wolverine ([email protected])
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License along
with this program. If not, see http://www.gnu.org/licenses/gpl-3.0.html
"""
class Tvshows_Fashion():
'''Class that manages this specific menu context.'''
def open(self, plugin, menu):
menu.add_xplugins(plugin.get_xplugins(dictionaries=["TVShows"],
topics=["Fashion"],
languages=["My"], countries=["My"])) | gpl-3.0 | -1,902,513,686,243,087,000 | 36.62069 | 76 | 0.695413 | false |
andrucuna/python | interactivepython-coursera/interactivepython/week0/Variables.py | 1 | 1333 | __author__ = 'andrucuna'
# variables - placeholders for important values
# used to avoid recomputing values and to
# give values names that help reader understand code
# valid variable names - consists of letters, numbers, underscore (_)
# starts with letter or underscore
# case sensitive (capitalization matters)
# legal names - ninja, Ninja, n_i_n_j_a
# illegal names - 1337, 1337ninja
# Python convention - multiple words joined by _
# legal names - elite_ninja, leet_ninja, ninja_1337
# illegal name 1337_ninja
# assign to variable name using single equal sign =
# (remember that double equals == is used to test equality)
# examples
my_name = "Andres Ruiz"
print my_name
my_age = 27
print my_age
# birthday - add one
my_age += 1
print my_age
# the story of the magic pill
magic_pill = 30
print my_age - magic_pill
my_grand_dad = 74
print my_grand_dad - 2 * magic_pill
# Temperature examples
# convert from Fahrenheit to Celsuis
# c = 5 / 9 * (f - 32)
# use explanatory names
temp_Fahrenheit = 212
temp_Celsius = 5.0 / 9.0 * (temp_Fahrenheit - 32)
print temp_Celsius
# test it! 32 Fahrenheit is 0 Celsius, 212 Fahrenheit is 100 Celsius
# convert from Celsius to Fahrenheit
# f = 9 / 5 * c + 32
temp_Celsius = 100
temp_Fahrenheit = 9.0 / 5.0 * temp_Celsius + 32
print temp_Fahrenheit
# test it!
| gpl-2.0 | -2,776,057,309,682,962,400 | 16.773333 | 69 | 0.705926 | false |
cprogrammer1994/ModernGL | tests/test_buffer_read_errors.py | 1 | 1292 | import unittest
from common import get_context
class TestCase(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.ctx = get_context()
def test_1(self):
buf = self.ctx.buffer(b'abc')
with self.assertRaises(Exception):
buf.read(4)
with self.assertRaises(Exception):
buf.read(offset=-1)
with self.assertRaises(Exception):
buf.read(offset=1, size=3)
def test_2(self):
buf = self.ctx.buffer(b'123456789')
with self.assertRaises(Exception):
buf.read_chunks(1, 4, 1, 6)
with self.assertRaises(Exception):
buf.read_chunks(1, 4, -1, 6)
with self.assertRaises(Exception):
buf.read_chunks(2, -1, 2, 1)
def test_3(self):
buf = self.ctx.buffer(b'123456789')
with self.assertRaises(Exception):
buf.read_chunks(2, 2, 3, 3)
with self.assertRaises(Exception):
buf.read_chunks(2, -1, -3, 3)
with self.assertRaises(Exception):
buf.read_chunks(2, -4, -3, 3)
def test_4(self):
buf = self.ctx.buffer(b'123456789')
with self.assertRaises(Exception):
buf.read_chunks(3, 0, 2, 2)
if __name__ == '__main__':
unittest.main()
| mit | 8,729,349,598,844,909,000 | 22.071429 | 43 | 0.565015 | false |
vivek8943/soft-boiled | src/algorithms/slp.py | 1 | 13262 | import numpy as np
import itertools
from collections import namedtuple, defaultdict
import math
from math import floor, ceil, radians, sin, cos, asin, sqrt, pi
import pandas as pd
from src.utils.geo import bb_center, GeoCoord, haversine
LocEstimate = namedtuple('LocEstimate', ['geo_coord', 'dispersion', 'dispersion_std_dev'])
def median(distance_func, vertices, weights=None):
"""
given a python list of vertices, and a distance function, this will find the vertex that is most central
relative to all other vertices. All of the vertices must have geocoords
Args:
distance_func (function) : A function to calculate the distance between two GeoCoord objects
vertices (list) : List of GeoCoord objects
Returns:
LocEstimate: The median point
"""
#get the distance between any two points
distances = map(lambda (v0, v1) :distance_func(v0.geo_coord, v1.geo_coord), itertools.combinations (vertices, 2))
#create a dictionary with keys representing the index of a location
m = { a: list() for a in range(len(vertices)) }
#add the distances from each point to the dict
for (k0,k1),distance in zip(itertools.combinations(range(len(vertices)), 2), distances):
#a distance can be None if one of the vertices does not have a geocoord
if(weights is None):
m[k0].append(distance)
m[k1].append(distance)
else:
# Weight distances by weight of destination vertex
# Ex: distance=3, weight =4 extends m[k0] with [3, 3, 3, 3]
m[k0].extend([distance]*weights[k1])
m[k1].extend([distance]*weights[k0])
summed_values = map(sum, m.itervalues())
idx = summed_values.index(min(summed_values))
if weights is not None and weights[idx] > 1:
# Handle self-weight (i.e. if my vertex has weight of 6 there are 5 additional self connections if
# Starting from my location)
m[idx].extend([0.0]*(weights[idx]-1))
return LocEstimate(geo_coord=vertices[idx].geo_coord, dispersion=np.median(m[idx]), dispersion_std_dev=np.std(m[idx]))
def get_known_locs(sqlCtx, table_name, include_places=True, min_locs=3, num_partitions=30, dispersion_threshold=50):
'''
Given a loaded twitter table, this will return all the twitter users with locations. A user's location is determined
by the median location of all known tweets. A user must have at least min_locs locations in order for a location to be
estimated
Args:
sqlCtx (Spark SQL Context) : A Spark SQL context
table_name (string): Table name that was registered when loading the data
min_locs (int) : Minimum number tweets that have a location in order to infer a location for the user
num_partitions (int) : Optimizer for specifying the number of partitions for the resulting
RDD to use.
dispersion_threhold (int) : A distance threhold on the dispersion of the estimated location for a user.
We consider those estimated points with dispersion greater than the treshold unable to be
predicted given how dispersed the tweet distances are from one another.
Returns:
locations (rdd of LocEstimate) : Found locations of users. This rdd is often used as the ground truth of locations
'''
geo_coords = sqlCtx.sql('select user.id_str, geo.coordinates from %s where geo.coordinates is not null' % table_name)\
.map(lambda row: (row.id_str, row.coordinates))
if(include_places):
place_coords = sqlCtx.sql("select user.id_str, place.bounding_box.coordinates from %s "%table_name +
"where geo.coordinates is null and size(place.bounding_box.coordinates) > 0 and place.place_type " +
"in ('city', 'neighborhood', 'poi')").map(lambda row: (row.id_str, bb_center(row.coordinates)))
geo_coords = geo_coords.union(place_coords)
return geo_coords.groupByKey()\
.filter(lambda (id_str,coord_list): len(coord_list) >= min_locs)\
.map(lambda (id_str,coords): (id_str, median(haversine, [LocEstimate(GeoCoord(lat,lon), None, None)\
for lat,lon in coords])))\
.filter(lambda (id_str, loc): loc.dispersion < dispersion_threshold)\
.coalesce(num_partitions).cache()
def get_edge_list(sqlCtx, table_name, num_partitions=300):
'''
Given a loaded twitter table, this will return the @mention network in the form (src_id, (dest_id, num_@mentions))
Args:
sqlCtx (Spark SQL Context) : A Spark SQL context
table_name (string) : Table name that was registered when loading the data
num_paritions (int) : Optimizer for specifying the number of paritions for the resulting RDD to use
Returns:
edges (rdd (src_id, (dest_id, weight))) : edges loaded from the table
'''
tmp_edges = sqlCtx.sql('select user.id_str, entities.user_mentions from %s where size(entities.user_mentions) > 0' % table_name)\
.flatMap(lambda row : [((row.id_str, mentioned_user.id_str),1)\
for mentioned_user in row.user_mentions\
if mentioned_user.id_str is not None and row.id_str != mentioned_user.id_str])\
.reduceByKey(lambda x,y:x+y)
return tmp_edges.map(lambda ((src_id,dest_id),num_mentions): ((dest_id,src_id),num_mentions))\
.join(tmp_edges)\
.map(lambda ((src_id,dest_id), (count0, count1)): (src_id, (dest_id, min(count0,count1))))\
.coalesce(num_partitions).cache()
def train_slp(locs_known, edge_list, num_iters, neighbor_threshold=3, dispersion_threshold=100):
'''
Core SLP algorithm
Args:
locs_known (rdd of LocEstimate objects) : Locations that are known for the SLP network
edge_list (rdd of edges (src_id, (dest_id, weight))) : edges representing the at mention
network
num_iters (int) : number of iterations to run the algorithm
neighbor_threshold (int) : The minimum number of neighbors required in order for SLP to
try and predict a location of a node in the network
dispersion_theshold (int) : The maximum median distance amoung a local at mention network
in order to predict a node's location.
Returns:
locations (rdd of LocEstimate objects) : The locations found and known
'''
num_partitions = edge_list.getNumPartitions()
# Filter edge list so we never attempt to estimate a "known" location
filtered_edge_list = edge_list.keyBy(lambda (src_id, (dst_id, weight)): dst_id)\
.leftOuterJoin(locs_known)\
.flatMap(lambda (dst_id, (edge, loc_known)): [edge] if loc_known is None else [] )
l = locs_known
for i in range(num_iters):
l = filtered_edge_list.join(l)\
.map(lambda (src_id, ((dst_id, weight), known_vertex)) : (dst_id, (known_vertex, weight)))\
.groupByKey()\
.filter(lambda (src_id, neighbors) : neighbors.maxindex >= neighbor_threshold)\
.map(lambda (src_id, neighbors) :\
(src_id, median(haversine, [v for v,w in neighbors],[w for v,w in neighbors])))\
.filter(lambda (src_id, estimated_loc): estimated_loc.dispersion < dispersion_threshold)\
.union(locs_known).coalesce(num_partitions)
return l
def evaluate(locs_known, edges, holdout_func, slp_closure):
'''
This function is used to assess various stats regarding how well SLP is running.
Given all locs that are known and all edges that are known, this funciton will first
apply the holdout to the locs_known, allowing for a ground truth comparison to be used.
Then, it applies the non-holdout set to the training function, which should yield the
locations of the holdout for comparison.
For example::
holdout = lambda (src_id) : src_id[-1] == '6'
trainer = lambda l, e : slp.train_slp(l, e, 3)
results = evaluate(locs_known, edges, holdout, trainer)
Args:
locs_known (rdd of LocEstimate objects) : The complete list of locations
edges (rdd of (src_id, (dest_id, weight)): all available edge information
holdout_func (function) : function responsible for filtering a holdout data set. For example::
lambda (src_id) : src_id[-1] == '6'
can be used to get approximately 10% of the data since the src_id's are evenly distributed numeric values
slp_closure (function closure): a closure over the slp train function. For example::
lambda locs, edges :\n
slp.train_slp(locs, edges, 4, neighbor_threshold=4, dispersion_threshold=150)
can be used for training with specific threshold parameters
Returns:
results (dict) : stats of the results from the SLP algorithm
`median:` median difference of predicted versus actual
`mean:` mean difference of predicted versus actual
`coverage:` ratio of number of predicted locations to number of original unknown locations
`reserved_locs:` number of known locations used to train
`total_locs:` number of known locations input into this function
`found_locs:` number of predicted locations
`holdout_ratio:` ratio of the holdout set to the entire set
'''
reserved_locs = locs_known.filter(lambda (src_id, loc): not holdout_func(src_id))
num_locs = reserved_locs.count()
total_locs = locs_known.count()
print('Total Locations %s' % total_locs)
results = slp_closure(reserved_locs, edges)
errors = results\
.filter(lambda (src_id, loc): holdout_func(src_id))\
.join(locs_known)\
.map(lambda (src_id, (vtx_found, vtx_actual)) :\
(src_id, (haversine(vtx_found.geo_coord, vtx_actual.geo_coord), vtx_found)))
errors_local = errors.map(lambda (src_id, (dist, est_loc)) : dist).collect()
#because cannot easily calculate median in RDDs we will bring deltas local for stats calculations.
#With larger datasets, we may need to do this in the cluster, but for now will leave.
return (errors, {
'median': np.median(errors_local),
'mean': np.mean(errors_local),
'coverage':len(errors_local)/float(total_locs - num_locs),
'reserved_locs': num_locs,
'total_locs':total_locs,
'found_locs': len(errors_local),
'holdout_ratio' : 1 - num_locs/float(total_locs)
})
def predict_country_slp(tweets, bounding_boxes, sc):
'''
Take a set of estimates of user locations and estimate the country that user is in
Args:
tweets (RDD (id_str, LocEstimate))
bounding_boxes (list [(country_code, (min_lat, max_lat, min_lon, max_lon)),...])
Returns:
Country Codes (list) : Predicted countries reperesented as their numeric codes
'''
# Convert Bounding boxes to allow for more efficient lookups
bb_lookup_lat = defaultdict(set)
bb_lookup_lon = defaultdict(set)
for i, (cc, (min_lat, max_lat, min_lon, max_lon)) in enumerate(bounding_boxes):
for lon in range(int(math.floor(min_lon)), int(math.ceil(max_lon))):
bb_lookup_lon[lon].add(i)
for lat in range(int(math.floor(min_lat)), int(math.ceil(max_lat))):
bb_lookup_lat[lat].add(i)
# Do country lookups and return an RDD that is (id_str, [country_codes])
return tweets.mapValues(lambda loc_estimate: _predict_country_using_lookup_slp(loc_estimate,\
sc.broadcast(bb_lookup_lat),\
sc.broadcast(bb_lookup_lon),\
sc.broadcast(bounding_boxes)))
def _predict_country_using_lookup_slp(loc_estimate, lat_dict, lon_dict, bounding_boxes):
'''
Internal helper function that uses broadcast lookup tables to take a single location estimate and show
what country bounding boxes include that point
Args:
loc_estimate (LocEstimate) : Estimate location
lat_dict (broadcast dictionary {integer_lat:set([bounding_box_indexes containing this lat])}) :
Indexed lookup dictionary for finding countries that exist at the specified latitude
lon_dict ((broadcast dictionary) {integer_lon:set([bounding_box_indexes containing this lon])})) :
Index lookup dictionary for finding countries that exist at the speficied longitude
bounding_boxes (broadcast list [(country_code, (min_lat, max_lat, min_lon, max_lon)),...]) :
List of countries and their boudning boxes
'''
lat = loc_estimate.geo_coord.lat
lon = loc_estimate.geo_coord.lon
countries = set()
potential_lats = lat_dict.value[math.floor(lat)]
potential_lons = lon_dict.value[math.floor(lon)]
intersection = potential_lats.intersection(potential_lons)
if len(intersection) == 0:
return []
#raise ValueError('uh oh')
else:
for index in intersection:
cc, (min_lat, max_lat, min_lon, max_lon) = bounding_boxes.value[index]
if min_lon < lon and lon < max_lon and min_lat < lat and lat < max_lat:
countries.add(cc)
return list(countries)
| apache-2.0 | 4,146,304,402,109,052,000 | 43.804054 | 133 | 0.653748 | false |
thammegowda/incubator-joshua | scripts/support/query_http.py | 3 | 2476 | #!/bin/env python
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Allows a file to be queried against a Joshua HTTP server. The file should be tokenized
and normalized, with one sentence per line. This script takes that file, packages it up
into blocks of size 100 (changeable with -b), and sends it to the server. The JSON output
is dumped to STDOUT. If you wish to only dump the "curl" commands instead of calling them,
add "--dry-run".
Usage:
query_http.py --dry-run -s localhost -p 5674 /path/to/corpus
"""
import sys
import urllib
import argparse
import subprocess
parser = argparse.ArgumentParser(description='Send a (tokenized) test set to a Joshua HTTP server')
parser.add_argument('-s', '--server', dest='server', default='localhost', help='server host')
parser.add_argument('-p', '--port', dest='port', type=int, default=5674, help='server port')
parser.add_argument('-b', '--blocksize', dest='size', type=int, default=100, help='number of sentences at a time')
parser.add_argument('--dry-run', default=None, action='store_true', help='print curl commands only (don\'t run')
parser.add_argument('test_file', help='the (tokenized) test file')
args = parser.parse_args()
sentences = []
def process(sentence = None):
global sentences
if sentence is None or len(sentences) == args.size:
urlstr = '{}:{}/translate?{}'.format(args.server, args.port, urllib.urlencode(sentences))
cmd = 'curl -s "{}"'.format(urlstr)
if args.dry_run:
print cmd
else:
subprocess.call(cmd, shell=True)
sentences = []
if sentence is not None:
sentences.append(('q', sentence.rstrip()))
for line in open(args.test_file):
process(line.rstrip())
process()
| apache-2.0 | -7,411,478,688,813,369,000 | 37.6875 | 114 | 0.712439 | false |
wanderer2/pymc3 | pymc3/examples/arma_example.py | 1 | 2135 | from pymc3 import Normal, sample, Model, plots, Potential, variational, HalfCauchy
from theano import scan, shared
import numpy as np
"""
ARMA example
It is interesting to note just how much more compact this is that the original STAN example
The original implementation is in the STAN documentation by Gelman et al and is reproduced below
Example from STAN- slightly altered
data {
int<lower=1> T;
real y[T];
}
parameters {
// assume err[0] == 0
}
nu[t] <- mu + phi * y[t-1] + theta * err[t-1];
err[t] <- y[t] - nu[t];
}
mu ~ normal(0,10);
phi ~ normal(0,2);
theta ~ normal(0,2);
real mu;
real phi;
real theta;
real<lower=0> sigma;
} model {
vector[T] nu;
vector[T] err;
nu[1] <- mu + phi * mu;
err[1] <- y[1] - nu[1];
for (t in 2:T) {
// num observations
// observed outputs
// mean coeff
// autoregression coeff
// moving avg coeff
// noise scale
// prediction for time t
// error for time t
sigma ~ cauchy(0,5);
err ~ normal(0,sigma);
// priors
// likelihood
Ported to PyMC3 by Peadar Coyle and Chris Fonnesbeck (c) 2016.
"""
def build_model():
y = shared(np.array([15, 10, 16, 11, 9, 11, 10, 18], dtype=np.float32))
with Model() as arma_model:
sigma = HalfCauchy('sigma', 5)
theta = Normal('theta', 0, sd=2)
phi = Normal('phi', 0, sd=2)
mu = Normal('mu', 0, sd=10)
err0 = y[0] - (mu + phi * mu)
def calc_next(last_y, this_y, err, mu, phi, theta):
nu_t = mu + phi * last_y + theta * err
return this_y - nu_t
err, _ = scan(fn=calc_next,
sequences=dict(input=y, taps=[-1, 0]),
outputs_info=[err0],
non_sequences=[mu, phi, theta])
Potential('like', Normal.dist(0, sd=sigma).logp(err))
mu, sds, elbo = variational.advi(n=2000)
return arma_model
def run(n_samples=1000):
model = build_model()
with model:
trace = sample(draws=n_samples)
burn = n_samples // 10
plots.traceplot(trace[burn:])
plots.forestplot(trace[burn:])
if __name__ == '__main__':
run()
| apache-2.0 | 4,759,934,765,427,243,000 | 23.261364 | 96 | 0.578923 | false |
fccoelho/Curso_Blockchain | assignments/Solutions/MerkleTree_Breno.py | 1 | 2790 | """
Solution to HW 2 by Breno Gomes
"""
import hashlib
def join_and_hash(str1, str2):
return hashlib.sha256(str1.encode("utf-8") + str2.encode("utf-8")).hexdigest()
class MerkleTree:
def __init__(self, content):
# SORTING SO SAME SUBSETS GENERATE SAME MERKEL TREE
self.content = sorted(content)
self.branches = [self.content]
def __str__(self):
return str(list(self.branches))
def root(self):
return self.branches[len(self.branches) - 1][0]
def make_merkel_tree(self):
while len(self.branches[len(self.branches) - 1]) != 1:
new_branch = []
current_branch = self.branches[len(self.branches) - 1]
for i in range(0, len(current_branch), 2):
if i == len(current_branch) - 1:
new_branch.append(join_and_hash(current_branch[i], current_branch[i]))
break
new_branch.append(join_and_hash(current_branch[i], current_branch[i + 1]))
self.branches.append(new_branch)
def join_trees(self, iterator):
new_tree = MerkelTree(iterator)
new_tree.make_merkel_tree()
self.branches.append([join_and_hash(self.root(), new_tree.root())])
for i in range(0, len(new_tree.branches)):
self.branches[len(self.branches) - (i + 2)] += new_tree.branches[len(new_tree.branches) - (i + 1)]
# Added by Flavio for grading
import unittest
import random
test_leaves_even = ['casa', 'bola', 'peixe', 'caixa']
test_leaves_odd = ['um', 'dois', 'três']
class TestMerkleTree(unittest.TestCase):
def test_tree_creation(self):
mt = MerkleTree(test_leaves_even)
self.assertIsInstance(mt, MerkleTree)
def test_return_leaves(self):
mt = MerkleTree(test_leaves_even)
self.assertListEqual(test_leaves_even, mt.content)
def test_get_leaves(self):
mt = MerkleTree(test_leaves_even)
self.assertListEqual(test_leaves_even, mt.content)
def test_odd_leaves(self):
mt = MerkleTree(test_leaves_even)
self.assertIsInstance(mt, MerkleTree)
def test_order(self):
mt = MerkleTree(test_leaves_even)
mt2 = MerkleTree(['bola', 'caixa', 'casa', 'peixe'])
self.assertEqual(mt.root(), mt2.root())
def test_verify_leaves(self):
mt = MerkleTree(test_leaves_even)
r = mt.verify_leaf('casa', mt.root)
def test_join_trees(self):
mt = MerkleTree(test_leaves_even)
mt2 = MerkleTree(test_leaves_odd)
mtj = mt.join_trees()
# Example of creating and joining a tree
if __name__ == "__main__":
mt = MerkleTree(["b", "c", "a", "d", "e"])
mt.make_merkel_tree()
print(mt)
mt.join_trees(["f", "g", "h"])
print(mt)
unittest.main()
| lgpl-3.0 | 3,745,561,583,888,974,000 | 28.989247 | 110 | 0.602366 | false |
mheap/ansible | lib/ansible/plugins/connection/winrm.py | 1 | 30176 | # (c) 2014, Chris Church <[email protected]>
# Copyright (c) 2017 Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
DOCUMENTATION = """
author: Ansible Core Team
connection: winrm
short_description: Run tasks over Microsoft's WinRM
description:
- Run commands or put/fetch on a target via WinRM
- This plugin allows extra arguments to be passed that are supported by the protocol but not explicitly defined here.
They should take the form of variables declared with the following pattern `ansible_winrm_<option>`.
version_added: "2.0"
requirements:
- pywinrm (python library)
options:
# figure out more elegant 'delegation'
remote_addr:
description:
- Address of the windows machine
default: inventory_hostname
vars:
- name: ansible_host
- name: ansible_winrm_host
remote_user:
keywords:
- name: user
- name: remote_user
description:
- The user to log in as to the Windows machine
vars:
- name: ansible_user
- name: ansible_winrm_user
port:
description:
- port for winrm to connect on remote target
- The default is the https (5896) port, if using http it should be 5895
vars:
- name: ansible_port
- name: ansible_winrm_port
default: 5986
keywords:
- name: port
type: integer
scheme:
description:
- URI scheme to use
- If not set, then will default to C(https) or C(http) if I(port) is
C(5985).
choices: [http, https]
vars:
- name: ansible_winrm_scheme
path:
description: URI path to connect to
default: '/wsman'
vars:
- name: ansible_winrm_path
transport:
description:
- List of winrm transports to attempt to to use (ssl, plaintext, kerberos, etc)
- If None (the default) the plugin will try to automatically guess the correct list
- The choices avialable depend on your version of pywinrm
type: list
vars:
- name: ansible_winrm_transport
kerberos_command:
description: kerberos command to use to request a authentication ticket
default: kinit
vars:
- name: ansible_winrm_kinit_cmd
kerberos_mode:
description:
- kerberos usage mode.
- The managed option means Ansible will obtain kerberos ticket.
- While the manual one means a ticket must already have been obtained by the user.
- If having issues with Ansible freezing when trying to obtain the
Kerberos ticket, you can either set this to C(manual) and obtain
it outside Ansible or install C(pexpect) through pip and try
again.
choices: [managed, manual]
vars:
- name: ansible_winrm_kinit_mode
connection_timeout:
description:
- Sets the operation and read timeout settings for the WinRM
connection.
- Corresponds to the C(operation_timeout_sec) and
C(read_timeout_sec) args in pywinrm so avoid setting these vars
with this one.
- The default value is whatever is set in the installed version of
pywinrm.
vars:
- name: ansible_winrm_connection_timeout
"""
import base64
import inspect
import os
import re
import traceback
import json
import tempfile
import subprocess
HAVE_KERBEROS = False
try:
import kerberos
HAVE_KERBEROS = True
except ImportError:
pass
from ansible.errors import AnsibleError, AnsibleConnectionFailure
from ansible.errors import AnsibleFileNotFound
from ansible.module_utils.parsing.convert_bool import boolean
from ansible.module_utils.six.moves.urllib.parse import urlunsplit
from ansible.module_utils._text import to_bytes, to_native, to_text
from ansible.module_utils.six import binary_type
from ansible.plugins.connection import ConnectionBase
from ansible.plugins.shell.powershell import leaf_exec
from ansible.utils.hashing import secure_hash
from ansible.utils.path import makedirs_safe
try:
import winrm
from winrm import Response
from winrm.protocol import Protocol
HAS_WINRM = True
except ImportError as e:
HAS_WINRM = False
WINRM_IMPORT_ERR = e
try:
import xmltodict
HAS_XMLTODICT = True
except ImportError as e:
HAS_XMLTODICT = False
XMLTODICT_IMPORT_ERR = e
try:
import pexpect
HAS_PEXPECT = True
except ImportError as e:
HAS_PEXPECT = False
# used to try and parse the hostname and detect if IPv6 is being used
try:
import ipaddress
HAS_IPADDRESS = True
except ImportError:
HAS_IPADDRESS = False
try:
from __main__ import display
except ImportError:
from ansible.utils.display import Display
display = Display()
class Connection(ConnectionBase):
'''WinRM connections over HTTP/HTTPS.'''
transport = 'winrm'
module_implementation_preferences = ('.ps1', '.exe', '')
become_methods = ['runas']
allow_executable = False
has_pipelining = True
allow_extras = True
def __init__(self, *args, **kwargs):
self.always_pipeline_modules = True
self.has_native_async = True
self.protocol = None
self.shell_id = None
self.delegate = None
self._shell_type = 'powershell'
super(Connection, self).__init__(*args, **kwargs)
def set_options(self, task_keys=None, var_options=None, direct=None):
if not HAS_WINRM:
return
super(Connection, self).set_options(task_keys=None, var_options=var_options, direct=direct)
self._winrm_host = self.get_option('remote_addr')
self._winrm_user = self.get_option('remote_user')
self._winrm_pass = self._play_context.password
self._become_method = self._play_context.become_method
self._become_user = self._play_context.become_user
self._become_pass = self._play_context.become_pass
self._winrm_port = self.get_option('port')
self._winrm_scheme = self.get_option('scheme')
# old behaviour, scheme should default to http if not set and the port
# is 5985 otherwise https
if self._winrm_scheme is None:
self._winrm_scheme = 'http' if self._winrm_port == 5985 else 'https'
self._winrm_path = self.get_option('path')
self._kinit_cmd = self.get_option('kerberos_command')
self._winrm_transport = self.get_option('transport')
self._winrm_connection_timeout = self.get_option('connection_timeout')
if hasattr(winrm, 'FEATURE_SUPPORTED_AUTHTYPES'):
self._winrm_supported_authtypes = set(winrm.FEATURE_SUPPORTED_AUTHTYPES)
else:
# for legacy versions of pywinrm, use the values we know are supported
self._winrm_supported_authtypes = set(['plaintext', 'ssl', 'kerberos'])
# calculate transport if needed
if self._winrm_transport is None or self._winrm_transport[0] is None:
# TODO: figure out what we want to do with auto-transport selection in the face of NTLM/Kerb/CredSSP/Cert/Basic
transport_selector = ['ssl'] if self._winrm_scheme == 'https' else ['plaintext']
if HAVE_KERBEROS and ((self._winrm_user and '@' in self._winrm_user)):
self._winrm_transport = ['kerberos'] + transport_selector
else:
self._winrm_transport = transport_selector
unsupported_transports = set(self._winrm_transport).difference(self._winrm_supported_authtypes)
if unsupported_transports:
raise AnsibleError('The installed version of WinRM does not support transport(s) %s' %
to_native(list(unsupported_transports), nonstring='simplerepr'))
# if kerberos is among our transports and there's a password specified, we're managing the tickets
kinit_mode = self.get_option('kerberos_mode')
if kinit_mode is None:
# HACK: ideally, remove multi-transport stuff
self._kerb_managed = "kerberos" in self._winrm_transport and (self._winrm_pass is not None and self._winrm_pass != "")
elif kinit_mode == "managed":
self._kerb_managed = True
elif kinit_mode == "manual":
self._kerb_managed = False
# arg names we're going passing directly
internal_kwarg_mask = set(['self', 'endpoint', 'transport', 'username', 'password', 'scheme', 'path', 'kinit_mode', 'kinit_cmd'])
self._winrm_kwargs = dict(username=self._winrm_user, password=self._winrm_pass)
argspec = inspect.getargspec(Protocol.__init__)
supported_winrm_args = set(argspec.args)
supported_winrm_args.update(internal_kwarg_mask)
passed_winrm_args = set([v.replace('ansible_winrm_', '') for v in self.get_option('_extras')])
unsupported_args = passed_winrm_args.difference(supported_winrm_args)
# warn for kwargs unsupported by the installed version of pywinrm
for arg in unsupported_args:
display.warning("ansible_winrm_{0} unsupported by pywinrm (is an up-to-date version of pywinrm installed?)".format(arg))
# pass through matching extras, excluding the list we want to treat specially
for arg in passed_winrm_args.difference(internal_kwarg_mask).intersection(supported_winrm_args):
self._winrm_kwargs[arg] = self.get_option('_extras')['ansible_winrm_%s' % arg]
# Until pykerberos has enough goodies to implement a rudimentary kinit/klist, simplest way is to let each connection
# auth itself with a private CCACHE.
def _kerb_auth(self, principal, password):
if password is None:
password = ""
self._kerb_ccache = tempfile.NamedTemporaryFile()
display.vvvvv("creating Kerberos CC at %s" % self._kerb_ccache.name)
krb5ccname = "FILE:%s" % self._kerb_ccache.name
os.environ["KRB5CCNAME"] = krb5ccname
krb5env = dict(KRB5CCNAME=krb5ccname)
# stores various flags to call with kinit, we currently only use this
# to set -f so we can get a forward-able ticket (cred delegation)
kinit_flags = []
if boolean(self.get_option('_extras').get('ansible_winrm_kerberos_delegation', False)):
kinit_flags.append('-f')
kinit_cmdline = [self._kinit_cmd]
kinit_cmdline.extend(kinit_flags)
kinit_cmdline.append(principal)
# pexpect runs the process in its own pty so it can correctly send
# the password as input even on MacOS which blocks subprocess from
# doing so. Unfortunately it is not available on the built in Python
# so we can only use it if someone has installed it
if HAS_PEXPECT:
proc_mechanism = "pexpect"
command = kinit_cmdline.pop(0)
password = to_text(password, encoding='utf-8',
errors='surrogate_or_strict')
display.vvvv("calling kinit with pexpect for principal %s"
% principal)
try:
child = pexpect.spawn(command, kinit_cmdline, timeout=60,
env=krb5env)
except pexpect.ExceptionPexpect as err:
err_msg = "Kerberos auth failure when calling kinit cmd " \
"'%s': %s" % (command, to_native(err))
raise AnsibleConnectionFailure(err_msg)
try:
child.expect(".*:")
child.sendline(password)
except OSError as err:
# child exited before the pass was sent, Ansible will raise
# error based on the rc below, just display the error here
display.vvvv("kinit with pexpect raised OSError: %s"
% to_native(err))
# technically this is the stdout + stderr but to match the
# subprocess error checking behaviour, we will call it stderr
stderr = child.read()
child.wait()
rc = child.exitstatus
else:
proc_mechanism = "subprocess"
password = to_bytes(password, encoding='utf-8',
errors='surrogate_or_strict')
display.vvvv("calling kinit with subprocess for principal %s"
% principal)
try:
p = subprocess.Popen(kinit_cmdline, stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
env=krb5env)
except OSError as err:
err_msg = "Kerberos auth failure when calling kinit cmd " \
"'%s': %s" % (self._kinit_cmd, to_native(err))
raise AnsibleConnectionFailure(err_msg)
stdout, stderr = p.communicate(password + b'\n')
rc = p.returncode != 0
if rc != 0:
err_msg = "Kerberos auth failure for principal %s with %s: %s" \
% (principal, proc_mechanism, to_native(stderr.strip()))
raise AnsibleConnectionFailure(err_msg)
display.vvvvv("kinit succeeded for principal %s" % principal)
def _winrm_connect(self):
'''
Establish a WinRM connection over HTTP/HTTPS.
'''
display.vvv("ESTABLISH WINRM CONNECTION FOR USER: %s on PORT %s TO %s" %
(self._winrm_user, self._winrm_port, self._winrm_host), host=self._winrm_host)
winrm_host = self._winrm_host
if HAS_IPADDRESS:
display.vvvv("checking if winrm_host %s is an IPv6 address" % winrm_host)
try:
ipaddress.IPv6Address(winrm_host)
except ipaddress.AddressValueError:
pass
else:
winrm_host = "[%s]" % winrm_host
netloc = '%s:%d' % (winrm_host, self._winrm_port)
endpoint = urlunsplit((self._winrm_scheme, netloc, self._winrm_path, '', ''))
errors = []
for transport in self._winrm_transport:
if transport == 'kerberos':
if not HAVE_KERBEROS:
errors.append('kerberos: the python kerberos library is not installed')
continue
if self._kerb_managed:
self._kerb_auth(self._winrm_user, self._winrm_pass)
display.vvvvv('WINRM CONNECT: transport=%s endpoint=%s' % (transport, endpoint), host=self._winrm_host)
try:
winrm_kwargs = self._winrm_kwargs.copy()
if self._winrm_connection_timeout:
winrm_kwargs['operation_timeout_sec'] = self._winrm_connection_timeout
winrm_kwargs['read_timeout_sec'] = self._winrm_connection_timeout + 1
protocol = Protocol(endpoint, transport=transport, **winrm_kwargs)
# open the shell from connect so we know we're able to talk to the server
if not self.shell_id:
self.shell_id = protocol.open_shell(codepage=65001) # UTF-8
display.vvvvv('WINRM OPEN SHELL: %s' % self.shell_id, host=self._winrm_host)
return protocol
except Exception as e:
err_msg = to_text(e).strip()
if re.search(to_text(r'Operation\s+?timed\s+?out'), err_msg, re.I):
raise AnsibleError('the connection attempt timed out')
m = re.search(to_text(r'Code\s+?(\d{3})'), err_msg)
if m:
code = int(m.groups()[0])
if code == 401:
err_msg = 'the specified credentials were rejected by the server'
elif code == 411:
return protocol
errors.append(u'%s: %s' % (transport, err_msg))
display.vvvvv(u'WINRM CONNECTION ERROR: %s\n%s' % (err_msg, to_text(traceback.format_exc())), host=self._winrm_host)
if errors:
raise AnsibleConnectionFailure(', '.join(map(to_native, errors)))
else:
raise AnsibleError('No transport found for WinRM connection')
def _winrm_send_input(self, protocol, shell_id, command_id, stdin, eof=False):
rq = {'env:Envelope': protocol._get_soap_header(
resource_uri='http://schemas.microsoft.com/wbem/wsman/1/windows/shell/cmd',
action='http://schemas.microsoft.com/wbem/wsman/1/windows/shell/Send',
shell_id=shell_id)}
stream = rq['env:Envelope'].setdefault('env:Body', {}).setdefault('rsp:Send', {})\
.setdefault('rsp:Stream', {})
stream['@Name'] = 'stdin'
stream['@CommandId'] = command_id
stream['#text'] = base64.b64encode(to_bytes(stdin))
if eof:
stream['@End'] = 'true'
protocol.send_message(xmltodict.unparse(rq))
def _winrm_exec(self, command, args=(), from_exec=False, stdin_iterator=None):
if not self.protocol:
self.protocol = self._winrm_connect()
self._connected = True
if from_exec:
display.vvvvv("WINRM EXEC %r %r" % (command, args), host=self._winrm_host)
else:
display.vvvvvv("WINRM EXEC %r %r" % (command, args), host=self._winrm_host)
command_id = None
try:
stdin_push_failed = False
command_id = self.protocol.run_command(self.shell_id, to_bytes(command), map(to_bytes, args), console_mode_stdin=(stdin_iterator is None))
try:
if stdin_iterator:
for (data, is_last) in stdin_iterator:
self._winrm_send_input(self.protocol, self.shell_id, command_id, data, eof=is_last)
except Exception as ex:
display.warning("FATAL ERROR DURING FILE TRANSFER: %s" % to_text(ex))
stdin_push_failed = True
# NB: this can hang if the receiver is still running (eg, network failed a Send request but the server's still happy).
# FUTURE: Consider adding pywinrm status check/abort operations to see if the target is still running after a failure.
resptuple = self.protocol.get_command_output(self.shell_id, command_id)
# ensure stdout/stderr are text for py3
# FUTURE: this should probably be done internally by pywinrm
response = Response(tuple(to_text(v) if isinstance(v, binary_type) else v for v in resptuple))
# TODO: check result from response and set stdin_push_failed if we have nonzero
if from_exec:
display.vvvvv('WINRM RESULT %r' % to_text(response), host=self._winrm_host)
else:
display.vvvvvv('WINRM RESULT %r' % to_text(response), host=self._winrm_host)
display.vvvvvv('WINRM STDOUT %s' % to_text(response.std_out), host=self._winrm_host)
display.vvvvvv('WINRM STDERR %s' % to_text(response.std_err), host=self._winrm_host)
if stdin_push_failed:
stderr = to_bytes(response.std_err, encoding='utf-8')
if self.is_clixml(stderr):
stderr = self.parse_clixml_stream(stderr)
raise AnsibleError('winrm send_input failed; \nstdout: %s\nstderr %s' % (to_native(response.std_out), to_native(stderr)))
return response
finally:
if command_id:
self.protocol.cleanup_command(self.shell_id, command_id)
def _connect(self):
if not HAS_WINRM:
raise AnsibleError("winrm or requests is not installed: %s" % to_native(WINRM_IMPORT_ERR))
elif not HAS_XMLTODICT:
raise AnsibleError("xmltodict is not installed: %s" % to_native(XMLTODICT_IMPORT_ERR))
super(Connection, self)._connect()
if not self.protocol:
self.protocol = self._winrm_connect()
self._connected = True
return self
def _reset(self): # used by win_reboot (and any other action that might need to bounce the state)
self.protocol = None
self.shell_id = None
self._connect()
def _create_raw_wrapper_payload(self, cmd, environment=None):
environment = {} if environment is None else environment
payload = {
'module_entry': to_text(base64.b64encode(to_bytes(cmd))),
'powershell_modules': {},
'actions': ['exec'],
'exec': to_text(base64.b64encode(to_bytes(leaf_exec))),
'environment': environment,
'min_ps_version': None,
'min_os_version': None
}
return json.dumps(payload)
def _wrapper_payload_stream(self, payload, buffer_size=200000):
payload_bytes = to_bytes(payload)
byte_count = len(payload_bytes)
for i in range(0, byte_count, buffer_size):
yield payload_bytes[i:i + buffer_size], i + buffer_size >= byte_count
def exec_command(self, cmd, in_data=None, sudoable=True):
super(Connection, self).exec_command(cmd, in_data=in_data, sudoable=sudoable)
cmd_parts = self._shell._encode_script(cmd, as_list=True, strict_mode=False, preserve_rc=False)
# TODO: display something meaningful here
display.vvv("EXEC (via pipeline wrapper)")
stdin_iterator = None
if in_data:
stdin_iterator = self._wrapper_payload_stream(in_data)
result = self._winrm_exec(cmd_parts[0], cmd_parts[1:], from_exec=True, stdin_iterator=stdin_iterator)
result.std_out = to_bytes(result.std_out)
result.std_err = to_bytes(result.std_err)
# parse just stderr from CLIXML output
if self.is_clixml(result.std_err):
try:
result.std_err = self.parse_clixml_stream(result.std_err)
except Exception:
# unsure if we're guaranteed a valid xml doc- use raw output in case of error
pass
return (result.status_code, result.std_out, result.std_err)
def is_clixml(self, value):
return value.startswith(b"#< CLIXML")
# hacky way to get just stdout- not always sure of doc framing here, so use with care
def parse_clixml_stream(self, clixml_doc, stream_name='Error'):
clear_xml = clixml_doc.replace(b'#< CLIXML\r\n', b'')
doc = xmltodict.parse(clear_xml)
lines = [l.get('#text', '').replace('_x000D__x000A_', '') for l in doc.get('Objs', {}).get('S', {}) if l.get('@S') == stream_name]
return '\r\n'.join(lines)
# FUTURE: determine buffer size at runtime via remote winrm config?
def _put_file_stdin_iterator(self, in_path, out_path, buffer_size=250000):
in_size = os.path.getsize(to_bytes(in_path, errors='surrogate_or_strict'))
offset = 0
with open(to_bytes(in_path, errors='surrogate_or_strict'), 'rb') as in_file:
for out_data in iter((lambda: in_file.read(buffer_size)), b''):
offset += len(out_data)
self._display.vvvvv('WINRM PUT "%s" to "%s" (offset=%d size=%d)' % (in_path, out_path, offset, len(out_data)), host=self._winrm_host)
# yes, we're double-encoding over the wire in this case- we want to ensure that the data shipped to the end PS pipeline is still b64-encoded
b64_data = base64.b64encode(out_data) + b'\r\n'
# cough up the data, as well as an indicator if this is the last chunk so winrm_send knows to set the End signal
yield b64_data, (in_file.tell() == in_size)
if offset == 0: # empty file, return an empty buffer + eof to close it
yield "", True
def put_file(self, in_path, out_path):
super(Connection, self).put_file(in_path, out_path)
out_path = self._shell._unquote(out_path)
display.vvv('PUT "%s" TO "%s"' % (in_path, out_path), host=self._winrm_host)
if not os.path.exists(to_bytes(in_path, errors='surrogate_or_strict')):
raise AnsibleFileNotFound('file or module does not exist: "%s"' % to_native(in_path))
script_template = u'''
begin {{
$path = '{0}'
$DebugPreference = "Continue"
$ErrorActionPreference = "Stop"
Set-StrictMode -Version 2
$fd = [System.IO.File]::Create($path)
$sha1 = [System.Security.Cryptography.SHA1CryptoServiceProvider]::Create()
$bytes = @() #initialize for empty file case
}}
process {{
$bytes = [System.Convert]::FromBase64String($input)
$sha1.TransformBlock($bytes, 0, $bytes.Length, $bytes, 0) | Out-Null
$fd.Write($bytes, 0, $bytes.Length)
}}
end {{
$sha1.TransformFinalBlock($bytes, 0, 0) | Out-Null
$hash = [System.BitConverter]::ToString($sha1.Hash).Replace("-", "").ToLowerInvariant()
$fd.Close()
Write-Output "{{""sha1"":""$hash""}}"
}}
'''
script = script_template.format(self._shell._escape(out_path))
cmd_parts = self._shell._encode_script(script, as_list=True, strict_mode=False, preserve_rc=False)
result = self._winrm_exec(cmd_parts[0], cmd_parts[1:], stdin_iterator=self._put_file_stdin_iterator(in_path, out_path))
# TODO: improve error handling
if result.status_code != 0:
raise AnsibleError(to_native(result.std_err))
put_output = json.loads(result.std_out)
remote_sha1 = put_output.get("sha1")
if not remote_sha1:
raise AnsibleError("Remote sha1 was not returned")
local_sha1 = secure_hash(in_path)
if not remote_sha1 == local_sha1:
raise AnsibleError("Remote sha1 hash {0} does not match local hash {1}".format(to_native(remote_sha1), to_native(local_sha1)))
def fetch_file(self, in_path, out_path):
super(Connection, self).fetch_file(in_path, out_path)
in_path = self._shell._unquote(in_path)
out_path = out_path.replace('\\', '/')
display.vvv('FETCH "%s" TO "%s"' % (in_path, out_path), host=self._winrm_host)
buffer_size = 2**19 # 0.5MB chunks
makedirs_safe(os.path.dirname(out_path))
out_file = None
try:
offset = 0
while True:
try:
script = '''
$path = "%(path)s"
If (Test-Path -Path $path -PathType Leaf)
{
$buffer_size = %(buffer_size)d
$offset = %(offset)d
$stream = New-Object -TypeName IO.FileStream($path, [IO.FileMode]::Open, [IO.FileAccess]::Read, [IO.FileShare]::ReadWrite)
$stream.Seek($offset, [System.IO.SeekOrigin]::Begin) > $null
$buffer = New-Object -TypeName byte[] $buffer_size
$bytes_read = $stream.Read($buffer, 0, $buffer_size)
if ($bytes_read -gt 0) {
$bytes = $buffer[0..($bytes_read - 1)]
[System.Convert]::ToBase64String($bytes)
}
$stream.Close() > $null
}
ElseIf (Test-Path -Path $path -PathType Container)
{
Write-Host "[DIR]";
}
Else
{
Write-Error "$path does not exist";
Exit 1;
}
''' % dict(buffer_size=buffer_size, path=self._shell._escape(in_path), offset=offset)
display.vvvvv('WINRM FETCH "%s" to "%s" (offset=%d)' % (in_path, out_path, offset), host=self._winrm_host)
cmd_parts = self._shell._encode_script(script, as_list=True, preserve_rc=False)
result = self._winrm_exec(cmd_parts[0], cmd_parts[1:])
if result.status_code != 0:
raise IOError(to_native(result.std_err))
if result.std_out.strip() == '[DIR]':
data = None
else:
data = base64.b64decode(result.std_out.strip())
if data is None:
makedirs_safe(out_path)
break
else:
if not out_file:
# If out_path is a directory and we're expecting a file, bail out now.
if os.path.isdir(to_bytes(out_path, errors='surrogate_or_strict')):
break
out_file = open(to_bytes(out_path, errors='surrogate_or_strict'), 'wb')
out_file.write(data)
if len(data) < buffer_size:
break
offset += len(data)
except Exception:
traceback.print_exc()
raise AnsibleError('failed to transfer file to "%s"' % to_native(out_path))
finally:
if out_file:
out_file.close()
def close(self):
if self.protocol and self.shell_id:
display.vvvvv('WINRM CLOSE SHELL: %s' % self.shell_id, host=self._winrm_host)
self.protocol.close_shell(self.shell_id)
self.shell_id = None
self.protocol = None
self._connected = False
| gpl-3.0 | 3,836,380,334,355,508,000 | 42.733333 | 156 | 0.57728 | false |
apache/allura | Allura/allura/tests/scripts/test_misc_scripts.py | 2 | 1912 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import unicode_literals
from __future__ import absolute_import
from bson import ObjectId
from alluratest.tools import assert_equal
from allura.scripts.clear_old_notifications import ClearOldNotifications
from alluratest.controller import setup_basic_test
from allura import model as M
from ming.odm import session
class TestClearOldNotifications(object):
def setUp(self):
setup_basic_test()
def run_script(self, options):
cls = ClearOldNotifications
opts = cls.parser().parse_args(options)
cls.execute(opts)
def test(self):
n = M.Notification(app_config_id=ObjectId(), neighborhood_id=ObjectId(), project_id=ObjectId(),
tool_name='blah')
session(n).flush(n)
assert_equal(M.Notification.query.find().count(), 1)
self.run_script(['--back-days', '7'])
assert_equal(M.Notification.query.find().count(), 1)
self.run_script(['--back-days', '0'])
assert_equal(M.Notification.query.find().count(), 0)
| apache-2.0 | -2,569,682,538,316,318,000 | 39.680851 | 103 | 0.682531 | false |
Snesha/azure-linux-extensions | SteppingStone/web_console/distro/redhat_installer.py | 1 | 1272 | #!/usr/bin/python
#
# Copyright 2014 Microsoft Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Requires Python 2.4+
import os
from Utils.WAAgentUtil import waagent
import Utils.HandlerUtil as Util
from base_installer import BaseInstaller
class RedhatInstaller(BaseInstaller):
def __init__(self):
super(RedhatInstaller, self).__init__()
self.install_cmd = 'yum -q -y install'
def install_shellinabox(self):
if waagent.Run('which shellinaboxd', False):
self.install_pkg('openssl')
self.install_pkg('shellinabox')
def install_pkg(self, pkg):
return waagent.Run(' '.join([self.install_cmd, pkg]))
def stop_shellinabox(self):
return waagent.Run('service shellinaboxd stop')
| apache-2.0 | 1,452,466,597,780,487,700 | 29.285714 | 74 | 0.709906 | false |
Aloomaio/googleads-python-lib | examples/ad_manager/v201808/user_service/get_all_users.py | 1 | 1759 | #!/usr/bin/env python
#
# Copyright 2016 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This example gets all users.
"""
# Import appropriate modules from the client library.
from googleads import ad_manager
def main(client):
# Initialize appropriate service.
user_service = client.GetService('UserService', version='v201808')
# Create a statement to select users.
statement = ad_manager.StatementBuilder(version='v201808')
# Retrieve a small amount of users at a time, paging
# through until all users have been retrieved.
while True:
response = user_service.getUsersByStatement(statement.ToStatement())
if 'results' in response and len(response['results']):
for user in response['results']:
# Print out some information for each user.
print('User with ID "%d" and name "%s" was found.\n' % (user['id'],
user['name']))
statement.offset += statement.limit
else:
break
print '\nNumber of results found: %s' % response['totalResultSetSize']
if __name__ == '__main__':
# Initialize client object.
ad_manager_client = ad_manager.AdManagerClient.LoadFromStorage()
main(ad_manager_client)
| apache-2.0 | 8,587,936,234,622,361,000 | 34.897959 | 78 | 0.693576 | false |
OpenDrift/opendrift | opendrift/models/seaicedrift.py | 1 | 1535 | # This file is part of OpenDrift.
#
# OpenDrift is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, version 2
#
# OpenDrift is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with OpenDrift. If not, see <https://www.gnu.org/licenses/>.
#
# Copyright 2019, Knut-Frode Dagestad, MET Norway
import logging; logger = logging.getLogger(__name__)
from opendrift.models.basemodel import OpenDriftSimulation
from opendrift.elements.passivetracer import PassiveTracer
class SeaIceDrift(OpenDriftSimulation):
"""Trajectory model based on the OpenDrift framework.
Simply propagation with ocean sea ice (this module is not for ice bergs)
Developed at MET Norway.
"""
ElementType = PassiveTracer
required_variables = {
'sea_ice_x_velocity': {'fallback': None},
'sea_ice_y_velocity': {'fallback': None},
'land_binary_mask': {'fallback': None}
}
def __init__(self, *args, **kwargs):
super(SeaIceDrift, self).__init__(*args, **kwargs)
def update(self):
"""Update positions and properties of elements."""
# Move particles with sea ice velocity
self.advect_with_sea_ice()
| gpl-2.0 | -887,467,786,537,552,400 | 32.369565 | 76 | 0.701629 | false |
dailab/roomba-python | demo.py | 1 | 3196 | import create
import time
import io
import os
import sys
import argparse
# define silence
r = 30
# map note names in the lilypad notation to irobot commands
c4 = 60
cis4 = des4 = 61
d4 = 62
dis4 = ees4 = 63
e4 = 64
f4 = 65
fis4 = ges4 = 66
g4 = 67
gis4 = aes4 = 68
a4 = 69
ais4 = bes4 = 70
b4 = 71
c5 = 72
cis5 = des5 = 73
d5 = 74
dis5 = ees5 = 75
e5 = 76
f5 = 77
fis5 = ges5 = 78
g5 = 79
gis5 = aes5 = 80
a5 = 81
ais5 = bes5 = 82
b5 = 83
c6 = 84
cis6 = des6 = 85
d6 = 86
dis6 = ees6 = 87
e6 = 88
f6 = 89
fis6 = ges6 = 90
# define some note lengths
# change the top MEASURE (4/4 time) to get faster/slower speeds
MEASURE = 160
HALF = MEASURE/2
Q = MEASURE/4
E = MEASURE/8
Ed = MEASURE*3/16
S = MEASURE/16
MEASURE_TIME = MEASURE/64.
ROOMBA_PORT = "/dev/rfcomm0"
FIFO_PATH = "/tmp/roombaCommands"
#parser
parser = argparse.ArgumentParser(description="Roomba Voice Command Control Software")
parser.add_argument("-k", dest="keyword", help="Keyword for addressing the roomba", default="")
parser.add_argument("-p", dest="path", help="path where creating the FIFO", default=FIFO_PATH)
parser.add_argument("-r", dest="roomba", help="serial port to the roomba", default=ROOMBA_PORT)
#parsing args
args = parser.parse_args()
keyword = args.keyword.lower()
print("keyword is " + keyword)
FIFO_PATH = args.path
print("created fifo in "+ FIFO_PATH)
ROOMBA_PORT = args.roomba
print("roomba port set to "+ ROOMBA_PORT)
#telekom jingle
telekom = [(c4,S), (c4,S), (c4,S), (e4,S), (c4,Q)]
#fifo init
try:
os.mkfifo(FIFO_PATH, 0766)
except:
os.unlink(FIFO_PATH)
os.mkfifo(FIFO_PATH, 0766)
#robot init
robot = create.Create(ROOMBA_PORT, create.SAFE_MODE)
robot.setSong(1, telekom)
def clean_up():
print("clean up and exit")
os.unlink(FIFO_PATH)
robot.close()
sys.exit(0)
def main():
exit_loop = False
fifo = open(FIFO_PATH, "r")
while exit_loop == False:
line = fifo.readline()
if line != "":
#line = keyword_ignore.sub("", line).strip(" ").strip("\n")
line = line.lower().replace(keyword, "").strip(" ").strip("\n")
print(line)
if line == "clean":
robot.toSafeMode()
time.sleep(.5)
print("starting to clean")
robot._write(create.CLEAN)
if line == "spot":
robot.toSafeMode()
time.sleep(.5)
print("starting to spot clean")
robot._write(create.SPOT)
if line == "stop":
print("stopping")
robot.toSafeMode()
time.sleep(.5)
if line == "dock":
robot.toSafeMode()
time.sleep(.5)
print("seeking dock")
robot._write(create.FORCESEEKINGDOCK)
if line == "jingle":
robot.toSafeMode()
time.sleep(.5)
robot.playSongNumber(1)
if line == "close":
exit_loop = True
try:
main()
except:
print("\nexception -> ")
clean_up()
| artistic-2.0 | 8,155,639,683,856,493,000 | 23.030075 | 95 | 0.556008 | false |
sedruk/Red-DiscordBot | cogs/russianroulette.py | 1 | 14234 | # Roulette.py was created by Redjumpman for Redbot
# This will create a rrgame.JSON file and a data folder
import os
import random
import asyncio
from time import gmtime, strftime
from discord.ext import commands
from .utils.dataIO import dataIO
from .utils import checks
from __main__ import send_cmd_help
kill_message = ["I was really pulling for {0} too. Oh well!",
"I guess {0} really wasn't a pea-brain!",
"Ahhh now that {0} is gone we can quit playing! No? Ok fine!",
("All things considered, I think we can all agree that {0} was a "
"straight shooter."),
"Noooooooo. Not {0}!", "I call dibs on {0}\'s stuff. Too soon?",
"Well I guess {0} and I won't be doing that thing anymore...",
"Here lies {0}. A loser.", "RIP {0}.", "I kinda hated {0} anyway.",
"Hey {0}! I'm back with your snacks! Oh...",
"{0}, you\'re like modern art now!", "Called it!",
"Really guys? {0}\'s dead? Well this server officially blows now.",
"Does this mean I don't have to return the book {0} lent me?",
"Oh come on! Now {0}\'s blood is all over my server!",
"I\'ll always remember {0}...", "Well at least {0} stopped crying.",
"Don\'t look at me. You guys are cleaning up {0}.",
"What I'm not crying. *sniff*", "I TOLD YOU, YOU COULD DO IT!",
"Well I'm sure someone will miss you, {0}.", "Never forget. {0}."
"Yeah. Real smart guys. Just kill off all the fun people.",
"I think I got some splatter on me. Gross",
"I told you it would blow your mind!", "Well this is fun...",
"I go to get popcorn and you all start without me. Rude.",
"Oh God. Just before {0} pulled the trigger they shit their pants.",
"I guess I\'ll dig this hole a little bigger...",
"10/10 would watch {0} blow their brains out again.",
"Well I hope {0} has life insurance...",
"See you in the next life, {0}", "AND THEIR OFF! Oh... wrong game."
"I don\'t know how, but I think {1} cheated.",
"{0} always said they wanted to go out with a bang.",
"So don\'t sing *another one bites the dust* ?",
"I can\'t tell if the smile on {1}\'s face is relief or insanity.",
"Oh stop crying {1}. {0} knew what they were getting into.",
"So that\'s what a human looks like on the inside!",
"My condolences {1}. I know you were *so* close to {0}.",
"GOD NO. PLEASE NO. PLEASE GOD NO. NOOOOOOOOOOOOOOOOOOOOOOO!",
"Time of death {2}. Cause: Stupidity.", "BOOM HEADSHOT! Sorry..."
"Don\'t act like you didn\'t enjoy that, {1}!",
"Is it weird that I wish {1} was dead instead?",
"Oh real great. {0} dies and I\'m still stuck with {1}. Real. Great.",
"Are you eating cheetos? Have some respect {1}! {0} just died!"]
class Russianroulette:
"""Allows 6 players to play Russian Roulette"""
def __init__(self, bot):
self.bot = bot
self.file_path = "data/JumperCogs/roulette/russian.json"
self.system = dataIO.load_json(self.file_path)
self.version = "2.2.01"
@commands.group(pass_context=True, no_pm=True)
async def setrussian(self, ctx):
"""Russian Roulette Settings"""
if ctx.invoked_subcommand is None:
await send_cmd_help(ctx)
@commands.command(name="rrversion", pass_context=True)
@checks.admin_or_permissions(manage_server=True)
async def _version_setrussian(self):
"""Shows the version of Russian Roulette"""
await self.bot.say("You are currently running Russian Roulette version "
"{}".format(self.version))
@setrussian.command(name="minbet", pass_context=True)
@checks.admin_or_permissions(manage_server=True)
async def _minbet_setrussian(self, ctx, bet: int):
"""Set the minimum starting bet for Russian Roulette games"""
server = ctx.message.server
settings = self.check_server_settings(server)
if bet > 0:
settings["System"]["Min Bet"] = bet
dataIO.save_json(self.file_path, self.system)
msg = "The initial bet to play russian roulette is set to {}".format(bet)
else:
msg = "I need a number higher than 0."
await self.bot.say(msg)
@commands.command(pass_context=True, no_pm=True)
@checks.admin_or_permissions(manage_server=True)
async def resetrr(self, ctx):
"""Reset command if game is stuck."""
server = ctx.message.server
settings = self.check_server_settings(server)
self.reset_game(settings)
await self.bot.say("Russian Roulette system has been reset.")
@commands.command(pass_context=True, no_pm=True, aliases=["rr"])
async def russian(self, ctx, bet: int):
user = ctx.message.author
server = ctx.message.server
settings = self.check_server_settings(server)
bank = self.bot.get_cog("Economy").bank
if await self.logic_checks(settings, user, bet):
if settings["System"]["Roulette Initial"]:
if user.id in settings["Players"]:
msg = "You are already in the circle. Don\'t be so eager to die."
elif len(settings["Players"].keys()) >= 6:
msg = "Sorry. The max amount of players is 6."
else:
if bet == settings["System"]["Start Bet"]:
self.player_add(settings, user, bet)
self.subtract_credits(settings, user, bet)
msg = "{} has joined the roulette circle".format(user.name)
else:
start_bet = settings["System"]["Start Bet"]
msg = "Your bet must be equal to {}.".format(start_bet)
await self.bot.say(msg)
else:
self.initial_set(settings, bet)
self.player_add(settings, user, bet)
self.subtract_credits(settings, user, bet)
await self.bot.say("{} has started a game of roulette with a starting bet of "
"{}\nThe game will start in 30 seconds or when 5 more "
"players join.".format(user.name, bet))
await asyncio.sleep(30)
if len(settings["Players"].keys()) == 1:
await self.bot.say("Sorry I can't let you play by yourself, that's just "
"suicide.\nTry again when you find some 'friends'.")
player = list(settings["Players"].keys())[0]
mobj = server.get_member(player)
initial_bet = settings["Players"][player]["Bet"]
bank.deposit_credits(mobj, initial_bet)
self.reset_game(settings)
else:
settings["System"]["Active"] = not settings["System"]["Active"]
await self.bot.say("Gather around! The game of russian roulette is starting.\n"
"I'm going to load a round into this six shot **revolver**, "
"give it a good spin, and pass it off to someone at random. "
"**If** everyone is lucky enough to have a turn, I\'ll "
"start all over. Good luck!")
await asyncio.sleep(5)
await self.roulette_game(settings, server)
self.reset_game(settings)
async def logic_checks(self, settings, user, bet):
if settings["System"]["Active"]:
await self.bot.say("A game of roulette is already active. Wait for it to end.")
return False
elif bet < settings["System"]["Min Bet"]:
min_bet = settings["System"]["Min Bet"]
await self.bot.say("Your bet must be greater than or equal to {}.".format(min_bet))
return False
elif len(settings["Players"].keys()) >= 6:
await self.bot.say("There are too many players playing at the moment")
return False
elif not self.enough_credits(user, bet):
await self.bot.say("You do not have enough credits or may need to register a bank "
"account")
return False
else:
return True
async def roulette_game(self, settings, server):
pot = settings["System"]["Pot"]
turn = 0
count = len(settings["Players"].keys())
while count > 0:
players = [server.get_member(x) for x in list(settings["Players"].keys())]
if count > 1:
count -= 1
turn += 1
await self.roulette_round(settings, server, players, turn)
else:
winner = players[0]
await self.bot.say("Congratulations {}, you're the only person alive. Enjoy your "
"blood money...\n{} credits were deposited into {}\'s "
"account".format(winner.mention, pot, winner.name))
bank = self.bot.get_cog("Economy").bank
bank.deposit_credits(winner, pot)
break
async def roulette_round(self, settings, server, players, turn):
roulette_circle = players[:]
chamber = 6
await self.bot.say("*{} put one round into the six shot revolver and gave it a good spin. "
"With a flick of the wrist, it locks in place."
"*".format(self.bot.user.name))
await asyncio.sleep(4)
await self.bot.say("Let's begin round {}.".format(turn))
while chamber >= 1:
if not roulette_circle:
roulette_circle = players[:] # Restart the circle when list is exhausted
chance = random.randint(1, chamber)
player = random.choice(roulette_circle)
await self.bot.say("{} presses the revolver to their temple and slowly squeezes the "
"trigger...".format(player.name))
if chance == 1:
await asyncio.sleep(4)
msg = "**BOOM**\n```{} died and was removed from the group.```".format(player.name)
await self.bot.say(msg)
msg2 = random.choice(kill_message)
settings["Players"].pop(player.id)
remaining = [server.get_member(x) for x in list(settings["Players"].keys())]
player2 = random.choice(remaining)
death_time = strftime("%H:%M:%S", gmtime())
await asyncio.sleep(5)
await self.bot.say(msg2.format(player.name, player2.name, death_time))
await asyncio.sleep(5)
break
else:
await asyncio.sleep(4)
await self.bot.say("**CLICK**\n```{} survived and passed the "
"revolver.```".format(player.name))
await asyncio.sleep(3)
roulette_circle.remove(player)
chamber -= 1
def reset_game(self, settings):
settings["System"]["Pot"] = 0
settings["System"]["Active"] = False
settings["System"]["Start Bet"] = 0
settings["System"]["Roulette Initial"] = False
settings["Players"] = {}
def player_add(self, settings, user, bet):
settings["System"]["Pot"] += bet
settings["Players"][user.id] = {"Name": user.name,
"Mention": user.mention,
"Bet": bet}
def initial_set(self, settings, bet):
settings["System"]["Start Bet"] = bet
settings["System"]["Roulette Initial"] = True
def subtract_credits(self, settings, user, bet):
bank = self.bot.get_cog('Economy').bank
bank.withdraw_credits(user, bet)
def enough_credits(self, user, amount):
bank = self.bot.get_cog('Economy').bank
if bank.account_exists(user):
if bank.can_spend(user, amount):
return True
else:
return False
else:
return False
def check_server_settings(self, server):
if server.id not in self.system["Servers"]:
default = {"System": {"Pot": 0,
"Active": False,
"Start Bet": 0,
"Roulette Initial": False,
"Min Bet": 50},
"Players": {}
}
self.system["Servers"][server.id] = default
dataIO.save_json(self.file_path, self.system)
print("Creating default russian roulette settings for Server: {}".format(server.name))
path = self.system["Servers"][server.id]
return path
else:
path = self.system["Servers"][server.id]
return path
def check_folders():
if not os.path.exists("data/JumperCogs/roulette"):
print("Creating data/JumperCogs/roulette folder...")
os.makedirs("data/JumperCogs/roulette")
def check_files():
system = {"Servers": {}}
f = "data/JumperCogs/roulette/russian.json"
if not dataIO.is_valid_json(f):
print("Creating default russian.json...")
dataIO.save_json(f, system)
def setup(bot):
check_folders()
check_files()
bot.add_cog(Russianroulette(bot))
| gpl-3.0 | -5,136,934,830,560,906,000 | 47.252595 | 100 | 0.523535 | false |
dirk-thomas/vcstool | vcstool/commands/import_.py | 1 | 8799 | import argparse
import os
from shutil import which
import sys
import urllib.request as request
from vcstool import __version__ as vcstool_version
from vcstool.clients import vcstool_clients
from vcstool.clients.vcs_base import run_command
from vcstool.executor import ansi
from vcstool.executor import execute_jobs
from vcstool.executor import output_repositories
from vcstool.executor import output_results
from vcstool.streams import set_streams
import yaml
from .command import add_common_arguments
from .command import Command
class ImportCommand(Command):
command = 'import'
help = 'Import the list of repositories'
def __init__(
self, args, url, version=None, recursive=False, shallow=False
):
super(ImportCommand, self).__init__(args)
self.url = url
self.version = version
self.force = args.force
self.retry = args.retry
self.skip_existing = args.skip_existing
self.recursive = recursive
self.shallow = shallow
def get_parser():
parser = argparse.ArgumentParser(
description='Import the list of repositories', prog='vcs import')
group = parser.add_argument_group('"import" command parameters')
group.add_argument(
'--input', type=file_or_url_type, default='-',
help='Where to read YAML from', metavar='FILE_OR_URL')
group.add_argument(
'--force', action='store_true', default=False,
help="Delete existing directories if they don't contain the "
'repository being imported')
group.add_argument(
'--shallow', action='store_true', default=False,
help='Create a shallow clone without a history')
group.add_argument(
'--recursive', action='store_true', default=False,
help='Recurse into submodules')
group.add_argument(
'--retry', type=int, metavar='N', default=2,
help='Retry commands requiring network access N times on failure')
group.add_argument(
'--skip-existing', action='store_true', default=False,
help="Don't overwrite existing directories or change custom checkouts "
'in repos using the same URL (but fetch repos with same URL)')
return parser
def file_or_url_type(value):
if os.path.exists(value) or '://' not in value:
return argparse.FileType('r')(value)
# use another user agent to avoid getting a 403 (forbidden) error,
# since some websites blacklist or block unrecognized user agents
return request.Request(
value, headers={'User-Agent': 'vcstool/' + vcstool_version})
def get_repositories(yaml_file):
try:
root = yaml.safe_load(yaml_file)
except yaml.YAMLError as e:
raise RuntimeError('Input data is not valid yaml format: %s' % e)
try:
repositories = root['repositories']
return get_repos_in_vcstool_format(repositories)
except KeyError as e:
raise RuntimeError('Input data is not valid format: %s' % e)
except TypeError as e:
# try rosinstall file format
try:
return get_repos_in_rosinstall_format(root)
except Exception:
raise RuntimeError('Input data is not valid format: %s' % e)
def get_repos_in_vcstool_format(repositories):
repos = {}
if repositories is None:
print(
ansi('yellowf') + 'List of repositories is empty' + ansi('reset'),
file=sys.stderr)
return repos
for path in repositories:
repo = {}
attributes = repositories[path]
try:
repo['type'] = attributes['type']
repo['url'] = attributes['url']
if 'version' in attributes:
repo['version'] = attributes['version']
except KeyError as e:
print(
ansi('yellowf') + (
"Repository '%s' does not provide the necessary "
'information: %s' % (path, e)) + ansi('reset'),
file=sys.stderr)
continue
repos[path] = repo
return repos
def get_repos_in_rosinstall_format(root):
repos = {}
for i, item in enumerate(root):
if len(item.keys()) != 1:
raise RuntimeError('Input data is not valid format')
repo = {'type': list(item.keys())[0]}
attributes = list(item.values())[0]
try:
path = attributes['local-name']
except KeyError as e:
print(
ansi('yellowf') + (
'Repository #%d does not provide the necessary '
'information: %s' % (i, e)) + ansi('reset'),
file=sys.stderr)
continue
try:
repo['url'] = attributes['uri']
if 'version' in attributes:
repo['version'] = attributes['version']
except KeyError as e:
print(
ansi('yellowf') + (
"Repository '%s' does not provide the necessary "
'information: %s' % (path, e)) + ansi('reset'),
file=sys.stderr)
continue
repos[path] = repo
return repos
def generate_jobs(repos, args):
jobs = []
for path, repo in repos.items():
path = os.path.join(args.path, path)
clients = [c for c in vcstool_clients if c.type == repo['type']]
if not clients:
from vcstool.clients.none import NoneClient
job = {
'client': NoneClient(path),
'command': None,
'cwd': path,
'output':
"Repository type '%s' is not supported" % repo['type'],
'returncode': NotImplemented
}
jobs.append(job)
continue
client = clients[0](path)
command = ImportCommand(
args, repo['url'],
str(repo['version']) if 'version' in repo else None,
recursive=args.recursive, shallow=args.shallow)
job = {'client': client, 'command': command}
jobs.append(job)
return jobs
def add_dependencies(jobs):
paths = [job['client'].path for job in jobs]
for job in jobs:
job['depends'] = set()
path = job['client'].path
while True:
parent_path = os.path.dirname(path)
if parent_path == path:
break
path = parent_path
if path in paths:
job['depends'].add(path)
def main(args=None, stdout=None, stderr=None):
set_streams(stdout=stdout, stderr=stderr)
parser = get_parser()
add_common_arguments(
parser, skip_hide_empty=True, skip_nested=True, path_nargs='?',
path_help='Base path to clone repositories to')
args = parser.parse_args(args)
try:
input_ = args.input
if isinstance(input_, request.Request):
input_ = request.urlopen(input_)
repos = get_repositories(input_)
except (RuntimeError, request.URLError) as e:
print(ansi('redf') + str(e) + ansi('reset'), file=sys.stderr)
return 1
jobs = generate_jobs(repos, args)
add_dependencies(jobs)
if args.repos:
output_repositories([job['client'] for job in jobs])
workers = args.workers
# for ssh URLs check if the host is known to prevent ssh asking for
# confirmation when using more than one worker
if workers > 1:
ssh_keygen = None
checked_hosts = set()
for job in list(jobs):
if job['command'] is None:
continue
url = job['command'].url
# only check the host from a ssh URL
if not url.startswith('git@') or ':' not in url:
continue
host = url[4:].split(':', 1)[0]
# only check each host name once
if host in checked_hosts:
continue
checked_hosts.add(host)
# get ssh-keygen path once
if ssh_keygen is None:
ssh_keygen = which('ssh-keygen') or False
if not ssh_keygen:
continue
result = run_command([ssh_keygen, '-F', host], '')
if result['returncode']:
print(
'At least one hostname (%s) is unknown, switching to a '
'single worker to allow interactively answering the ssh '
'question to confirm the fingerprint' % host)
workers = 1
break
results = execute_jobs(
jobs, show_progress=True, number_of_workers=workers,
debug_jobs=args.debug)
output_results(results)
any_error = any(r['returncode'] for r in results)
return 1 if any_error else 0
if __name__ == '__main__':
sys.exit(main())
| apache-2.0 | 6,492,176,253,853,990,000 | 32.712644 | 79 | 0.575406 | false |
danielballan/docs | source/_cookbook/csv_writer.py | 1 | 3011 | # -*- coding: utf-8 -*-
"""
========================================
A Minimal CSV writer for data collection
========================================
Problem
-------
Write (a subset of) the data to a CSV file during data collection.
Approach
--------
Write a callback function that integrates Python's built-in csv module with
bluesky.
Example Solution
----------------
"""
###############################################################################
# Boiler plate imports and configuration
import path
import os
import bluesky as bs
import bluesky.plans as bp
import bluesky.callbacks as bc
import csv
from bluesky.examples import motor, det
import matplotlib.pyplot as plt
# Do this if running the example interactively;
# skip it when building the documentation.
import os
if 'BUILDING_DOCS' not in os.environ:
from bluesky.utils import install_qt_kicker # for notebooks, qt -> nb
install_qt_kicker()
plt.ion()
det.exposure_time = .1 # simulate detector exposure time
RE = bs.RunEngine({})
###############################################################################
# Define a callback class which writes out a CSV file
class CSVWriter(bc.CallbackBase):
def __init__(self, fields, fname_format, fpath):
self._path = path.Path(fpath)
os.makedirs(self._path, exist_ok=True)
self._fname_fomat = fname_format
self._fields = fields
self._writer = None
self._fout = None
def close(self):
if self._fout is not None:
self._fout.close()
self._fout = None
self._writer = None
def start(self, doc):
self.close()
fname = self._path / self._fname_fomat.format(**doc)
self._fout = open(fname, 'xt')
self._writer = csv.writer(self._fout)
def descriptor(self, doc):
if self._writer is not None:
self._writer.writerow(self._fields)
def event(self, doc):
data = doc['data']
if self._writer is not None:
self._writer.writerow(data[k] for k in self._fields)
def stop(self, doc):
self.close()
###############################################################################
# Set up some callbacks
def create_cbs():
return [bc.LiveTable([motor, det]), bc.LivePlot('det', 'motor')]
fmt = '{user}_{uid:.6s}.csv'
export_path = '/tmp/export_demo'
csv_writer = CSVWriter(('motor', 'det'), fmt, export_path)
# send all documents to the CSV writer
RE.subscribe('all', csv_writer)
###############################################################################
# run the scan
uid, = RE(bp.scan([det], motor, -5, 5, 11),
create_cbs(), user='tcaswell')
###############################################################################
# check file
fname = os.path.join(export_path,
'{user}_{uid:.6s}.csv'.format(user='tcaswell', uid=uid))
print("--- {} ---".format(fname))
with open(fname, 'r') as fin:
for ln in fin:
print(ln.strip())
| bsd-2-clause | 6,292,943,657,254,038,000 | 24.091667 | 79 | 0.524078 | false |
frinat/fribourg-natation.ch | fabtasks/webfaction.py | 1 | 11702 | import os
from fabric.api import env, task, local, hide, put, run, get
from fabric.contrib.project import rsync_project
import pipes
import xmlrpclib
import functools
APACHE_START_SCRIPT = r"""
#!/bin/bash
/home/frinat/bin/envdir /home/frinat/webapps/{appname}/conf \\
/home/frinat/webapps/{appname}/apache2/bin/httpd.worker \\
-f /home/frinat/webapps/{appname}/apache2/conf/httpd.conf \\
-k start
"""
APACHE_CONF = r"""
ServerRoot "/home/frinat/webapps/{appname}/apache2"
LoadModule dir_module modules/mod_dir.so
LoadModule env_module modules/mod_env.so
LoadModule log_config_module modules/mod_log_config.so
LoadModule mime_module modules/mod_mime.so
LoadModule rewrite_module modules/mod_rewrite.so
LoadModule setenvif_module modules/mod_setenvif.so
LoadModule wsgi_module modules/mod_wsgi.so
LogFormat "%{{X-Forwarded-For}}i %l %u %t \\"%r\\" %>s %b \\"%{{Referer}}i\\" \\"%{{User-Agent}}i\\"" combined
CustomLog /home/frinat/logs/user/access_{appname}.log combined
ErrorLog /home/frinat/logs/user/error_{appname}.log
KeepAlive Off
Listen {port}
MaxSpareThreads 3
MinSpareThreads 1
ServerLimit 1
SetEnvIf X-Forwarded-SSL on HTTPS=1
ThreadsPerChild 5
WSGIPythonHome /home/frinat/webapps/{appname}
WSGIScriptAlias / /home/frinat/webapps/{appname}/lib/python2.7/site-packages/frinat/wsgi.py
WSGIDaemonProcess {appname} processes=2 python-path=/home/frinat/webapps/{appname}/lib/python2.7 threads=12
WSGIProcessGroup {appname}
WSGIRestrictEmbedded On
WSGILazyInitialization On
"""
def runl(*args, **kwargs):
args = [pipes.quote(a) for a in args]
return run(' '.join(args), **kwargs)
@task
def backupdb():
dumpfile = '/home/frinat/db.dump.tmp'
runl(
'pg_dump',
'--clean',
'--blobs',
'--format', 'custom',
'--no-owner',
'--no-password',
'--oids',
'--no-privileges',
'--host', 'localhost',
'--port', '5432',
'--compress', '9',
'--username', env.webfaction_db_user,
'--file', dumpfile,
env.webfaction_db_name,
)
get(dumpfile, 'db.dump')
runl('rm', '-f', dumpfile)
@task
def syncoldmedia():
rsync_project(
remote_dir='/home/frinat/webapps/staging/stages/test03/static/assets',
local_dir='_dev/media',
upload=False,
delete=True,
)
def dnsping(domain, server):
pass
def get_main_ip(api):
ips = api.list_ips()
for info in ips:
if info['is_main']:
return info['ip']
else:
raise RuntimeError('Main server not found.')
def get_current_revision():
with hide('everything'):
return str(local('git log -n 1 --format=\'%h\'', capture=True))
class AuthenticationError(Exception):
pass
class WebfactionAPI(object):
endpoint = 'https://api.webfaction.com/'
def __init__(self, username, password):
self.server = xmlrpclib.ServerProxy(self.endpoint)
try:
self.session_id, _ = self.server.login(username, password)
except xmlrpclib.Fault as e:
raise AuthenticationError(e.faultCode, e.faultString)
def __getattr__(self, name):
assert self.session_id, 'Login before executing any action'
return functools.partial(getattr(self.server, name), self.session_id)
class RevisionApp(object):
prefix = 'frinat_'
domain = 'fribourg-natation.ch'
app_type = 'mod_wsgi33-python27'
def __init__(self, api, revision, is_active=False):
self.revision = revision
self.is_active = is_active
self.api = api
def __str__(self):
return 'RevisionApp({!r}, {!r})'.format(self.domain, self.revision)
@property
def subdomain(self):
return str(self.revision)
@property
def fqdn(self):
return '{}.{}'.format(self.subdomain, self.domain)
@property
def appname(self):
return '{}{}'.format(self.prefix, self.revision)
@classmethod
def active_revision(cls, api):
active = '{}active'.format(cls.prefix)
for website in api.list_websites():
if website['name'] == active:
for app, _ in website['website_apps']:
if app.startswith(cls.prefix):
rev = app[len(cls.prefix):]
if rev not in ['static', 'media']:
return rev
@classmethod
def iterall(cls, api):
active = cls.active_revision(api)
for app in api.list_apps():
if app['name'].startswith(cls.prefix):
revision = app['name'][len(cls.prefix):]
if revision not in ['static', 'media']:
yield cls(api, revision, revision == active)
def _log(self, msg, *args, **kwargs):
print msg.format(*args, **kwargs)
def deploy(self):
ip = get_main_ip(self.api)
appname = self.appname
self._log('Creating app...')
self.api.create_app(appname, self.app_type, True, '', False)
self._log('Creating domain...')
self.api.create_domain(self.domain, self.subdomain)
self._log('Creating website...')
self.api.create_website(appname, ip, False, [self.fqdn],
[appname, '/'],
['frinat_static', '/static'],
['frinat_media', '/media'])
self._install()
self._restart()
def _install(self):
appname = self.appname
build_name = 'frinat_www-0.1.0-py27-none-any.whl'
port = self.api.system(
'grep Listen /home/frinat/webapps/{}/apache2/conf/httpd.conf'
.format(self.appname)
).strip().split(' ', 1)[1]
# Build distribution
self._log('Building distribution (locally)...')
local('python setup.py bdist_wheel')
# Upload distribution
self._log('Uploading package...')
put(os.path.join('dist', build_name),
'/home/frinat/webapps/{}/{}'.format(appname, build_name))
# Create and setup virtualenv
self._log('Creating virtualenv...')
run('/home/frinat/bin/virtualenv-2.7 --python=/usr/local/bin/python2.7'
' /home/frinat/webapps/{}'.format(appname))
run('/home/frinat/webapps/{}/bin/pip install wheel'.format(appname))
# Install package and dependencies
self._log('Installing dependencies...')
run('/home/frinat/webapps/{}/bin/pip install --no-index -I -f '
'/home/frinat/wheelhouse /home/frinat/webapps/{}/{}'
.format(appname, appname, build_name))
put('manage.py', '/home/frinat/webapps/{}/bin/manage.py'
.format(appname))
# Configure webserver
self._log('Configuring webserver...')
run('rm -rf /home/frinat/webapps/{}/htdocs'.format(appname))
run('mkdir /home/frinat/webapps/{}/conf'.format(appname))
self._setopt('DATABASE_URL', 'postgres://{}:{}@localhost/{}'.format(
env.webfaction_db_user,
env.webfaction_db_pwd,
env.webfaction_db_name,
))
self._setopt('DJANGO_SECRET_KEY', env.production_secret_key)
self._setopt('DJANGO_MEDIA_ROOT', '/home/frinat/webapps/frinat_media')
self._setopt('DJANGO_STATIC_ROOT',
'/home/frinat/webapps/frinat_static')
self._setopt('LD_LIBRARY_PATH',
'/home/frinat/webapps/{}/apache2/lib'.format(appname))
self.api.write_file(
'/home/frinat/webapps/{}/apache2/conf/httpd.conf'.format(appname),
APACHE_CONF.format(appname=appname, port=port).strip(),
)
self.api.write_file(
'/home/frinat/webapps/{}/apache2/bin/start'.format(appname),
APACHE_START_SCRIPT.format(appname=appname).strip(),
)
def _setopt(self, name, value):
self.api.write_file(
'/home/frinat/webapps/{}/conf/{}'.format(self.appname, name),
value,
)
def _restart(self):
self._log('Restarting webserver...')
self._log(run('/home/frinat/webapps/{}/apache2/bin/restart'
.format(self.appname)))
def _run_django(self, *cmd):
envdir = '/home/frinat/bin/envdir'
confdir = '/home/frinat/webapps/{}/conf'.format(self.appname)
executable = (
'/home/frinat/webapps/{0}/bin/python '
'/home/frinat/webapps/{0}/bin/manage.py'.format(self.appname)
)
args = [pipes.quote(c) for c in cmd] + [
'--settings=frinat.settings',
'--configuration=Config',
]
run('{} {} {} {}'.format(envdir, confdir, executable, ' '.join(args)))
def collectstatic(self):
self._run_django('collectstatic')
def promote(self):
ip = get_main_ip(self.api)
self.api.update_website('{}active'.format(self.prefix), ip, False,
[self.domain, 'www.{}'.format(self.domain)])
self.api.update_website('{}active'.format(self.prefix), ip, False,
[self.domain, 'www.{}'.format(self.domain)],
[self.appname, '/'],
['frinat_static', '/static'],
['frinat_media', '/media'])
def destroy(self):
ip = get_main_ip(self.api)
appname = self.appname
self._log('Stopping webserver')
run('/home/frinat/webapps/{}/apache2/bin/stop'.format(appname))
self._log('Destroying website...')
self.api.delete_website(appname, ip, False)
self._log('Destroying domain...')
self.api.delete_domain(self.domain, self.subdomain)
self._log('Destroying app...')
self.api.delete_app(appname)
line = self.api.system('crontab -l | grep {}'.format(appname)).strip()
if line:
self.api.delete_cronjob(line)
def browse(self):
local('zsh -ic "open http://{}"'.format(self.fqdn))
@task
def reqs():
api = WebfactionAPI(env.webfaction_username, env.webfaction_password)
try:
with open('requirements.txt') as fh:
api.write_file('requirements.txt.tmp', fh.read())
run('~/wheels-builder/bin/pip wheel -r requirements.txt.tmp')
finally:
run('rm -f requirements.txt.tmp')
@task
def list_instances():
rev = get_current_revision()
api = WebfactionAPI(env.webfaction_username, env.webfaction_password)
apps = list(RevisionApp.iterall(api))
print
print 'Current revision: {}'.format(rev)
print
print 'Deployed instances:'
if apps:
for i, app in enumerate(apps):
print ' {}. {} {}'.format(i + 1, app.revision,
'*' if app.is_active else '')
else:
print 'No instances found.'
class RevisionMethodCaller(object):
def __init__(self, method):
self.method = method
@classmethod
def build(cls, method):
return task(cls(method))
def __call__(self, rev=None, *args, **kwargs):
if rev is None:
rev = get_current_revision()
api = WebfactionAPI(env.webfaction_username, env.webfaction_password)
app = RevisionApp(api, rev)
method = getattr(app, self.method)
return method(*args, **kwargs)
browse = RevisionMethodCaller.build('browse')
promote = RevisionMethodCaller.build('promote')
collectstatic = RevisionMethodCaller.build('collectstatic')
fixdb = RevisionMethodCaller.build('fixdb')
deploy = RevisionMethodCaller.build('deploy')
destroy = RevisionMethodCaller.build('destroy')
| mit | 7,476,490,094,085,385,000 | 30.972678 | 110 | 0.587934 | false |
ajaniv/django-core-models | django_core_models/locations/tests/test_validation.py | 1 | 5553 | """
.. module:: django_core_models.location.tests.test_validation
:synopsis: location application validation unit test module.
*location* application validation unit test module.
"""
from __future__ import print_function
from django.core.exceptions import ValidationError
from django_core_utils.tests.test_utils import BaseModelTestCase
from . import factories
from ..validation import (country_validation, language_validation,
post_office_box_validation, postal_code_validation,
province_validation, state_validation)
from .factories import (CountryModelFactory, LanguageModelFactory,
ProvinceModelFactory, StateModelFactory)
class ValidationTestCase(BaseModelTestCase):
"""Base validation unit test class."""
def country_usa(self):
return factories.country_usa()
def country_france(self):
return factories.country_france()
valid_post_office_boxes = (
'PO Box 001', 'P.O. Box 002', 'po b 001', 'po bin 001',
'Post O bin 001', 'P. Office bin 001',
'P.O.Box 003')
invalid_post_office_boxes = ('004 P.O. Box', '005 PO Box', '006', 'abc')
class PostOfficeBoxValidationTestCase(ValidationTestCase):
"""Post office box validation unit test class."""
def test_post_office_box_validation_usa(self):
for pob in valid_post_office_boxes:
post_office_box_validation(self.country_usa(), pob)
def test_usa_post_office_box_validation_exceptions_usa(self):
for pob in invalid_post_office_boxes:
with self.assertRaises(ValidationError):
post_office_box_validation(self.country_usa(), pob)
valid_postal_codes = ('12345', '12345-6789', '12345 - 6789')
invalid_postal_codes = ('1234', '1234A', '12345 A', '12345-6789A')
class PostalCodeValidationTestCase(ValidationTestCase):
"""Postal code validation unit test class."""
def test_postal_code_validation_usa(self):
for postal_box in valid_postal_codes:
postal_code_validation(self.country_usa(), postal_box)
def test_postal_code_validation_exceptions_usa(self):
for pob in invalid_postal_codes:
with self.assertRaises(ValidationError):
postal_code_validation(self.country_usa(), pob)
class CountryValidationTestCase(ValidationTestCase):
"""Country validation unit test class."""
def test_country_validation_usa(self):
country_validation(self.country_usa())
def test_postal_code_validation_exceptions_usa(self):
with self.assertRaises(ValidationError):
country_validation(CountryModelFactory(
name="USA", iso_code="US"))
class LanguageValidationTestCase(ValidationTestCase):
"""Language validation unit test class."""
def test_language_validation_usa(self):
language_validation(LanguageModelFactory(
name=LanguageModelFactory.LANGUAGE_FRENCH,
iso_code=LanguageModelFactory.ISO_639_2_FR))
def test_language_validation_exceptions_usa(self):
with self.assertRaises(ValidationError):
country_validation(CountryModelFactory(
name="French", iso_code="zz"))
class ProvinceValidationTestCase(ValidationTestCase):
"""Province validation unit test class."""
def test_province_validation(self):
province_validation(ProvinceModelFactory(
name=ProvinceModelFactory.PROVINCE_NORMANDY,
iso_code=ProvinceModelFactory.ISO_3166_2_NORMANDY,
country=self.country_france()))
def test_province_validation_invalid_iso(self):
with self.assertRaises(ValidationError):
province_validation(ProvinceModelFactory(
name=ProvinceModelFactory.PROVINCE_NORMANDY,
iso_code="FR-P",
country=self.country_france()))
def test_province_validation_invalid_name(self):
with self.assertRaises(ValidationError):
province_validation(StateModelFactory(
name="Bad name",
iso_code=ProvinceModelFactory.ISO_3166_2_NORMANDY,
country=self.country_france()))
def test_state_validation_invalid_country(self):
with self.assertRaises(ValidationError):
province_validation(StateModelFactory(
name=ProvinceModelFactory.PROVINCE_NORMANDY,
iso_code=ProvinceModelFactory.ISO_3166_2_NORMANDY,
country=self.country_usa()))
class StateValidationTestCase(ValidationTestCase):
"""State validation unit test class."""
def test_state_validation(self):
state_validation(StateModelFactory(
name="New Jersey", iso_code="US-NJ",
country=self.country_usa()))
def test_state_validation_invalid_iso(self):
with self.assertRaises(ValidationError):
state_validation(StateModelFactory(
name="New Jersey",
iso_code="US-NJT",
country=self.country_usa()))
def test_state_validation_invalid_name(self):
with self.assertRaises(ValidationError):
state_validation(StateModelFactory(
name="Old Jersey",
iso_code="US-NJ",
country=self.country_usa()))
def test_state_validation_invalid_country(self):
with self.assertRaises(ValidationError):
state_validation(StateModelFactory(
name="New Jersey",
iso_code="US-NJ",
country=self.country_france()))
| mit | -865,493,100,001,814,000 | 36.02 | 77 | 0.660004 | false |
nitely/Spirit | spirit/core/utils/decorators.py | 1 | 1458 | # -*- coding: utf-8 -*-
from functools import wraps
from django.core.exceptions import PermissionDenied
from django.contrib.auth.views import redirect_to_login
from django.shortcuts import redirect
from spirit.core.conf import settings
def moderator_required(view_func):
@wraps(view_func)
def wrapper(request, *args, **kwargs):
user = request.user
if not user.is_authenticated:
return redirect_to_login(next=request.get_full_path(),
login_url=settings.LOGIN_URL)
if not user.st.is_moderator:
raise PermissionDenied
return view_func(request, *args, **kwargs)
return wrapper
def administrator_required(view_func):
@wraps(view_func)
def wrapper(request, *args, **kwargs):
user = request.user
if not user.is_authenticated:
return redirect_to_login(next=request.get_full_path(),
login_url=settings.LOGIN_URL)
if not user.st.is_administrator:
raise PermissionDenied
return view_func(request, *args, **kwargs)
return wrapper
def guest_only(view_func):
# TODO: test!
@wraps(view_func)
def wrapper(request, *args, **kwargs):
if request.user.is_authenticated:
return redirect(request.GET.get('next', request.user.st.get_absolute_url()))
return view_func(request, *args, **kwargs)
return wrapper
| mit | 2,144,196,783,148,449,000 | 25.509091 | 88 | 0.631001 | false |
roryyorke/python-control | control/margins.py | 1 | 13996 | """margins.py
Functions for computing stability margins and related functions.
Routines in this module:
margins.stability_margins
margins.phase_crossover_frequencies
margins.margin
"""
# Python 3 compatibility (needs to go here)
from __future__ import print_function
"""Copyright (c) 2011 by California Institute of Technology
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
1. Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
3. Neither the name of the California Institute of Technology nor
the names of its contributors may be used to endorse or promote
products derived from this software without specific prior
written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL CALTECH
OR THE CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF
USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
SUCH DAMAGE.
Author: Richard M. Murray
Date: 14 July 2011
$Id$
"""
import math
import numpy as np
import scipy as sp
from . import xferfcn
from .lti import issiso
from . import frdata
__all__ = ['stability_margins', 'phase_crossover_frequencies', 'margin']
# helper functions for stability_margins
def _polyimsplit(pol):
"""split a polynomial with (iw) applied into a real and an
imaginary part with w applied"""
rpencil = np.zeros_like(pol)
ipencil = np.zeros_like(pol)
rpencil[-1::-4] = 1.
rpencil[-3::-4] = -1.
ipencil[-2::-4] = 1.
ipencil[-4::-4] = -1.
return pol * rpencil, pol*ipencil
def _polysqr(pol):
"""return a polynomial squared"""
return np.polymul(pol, pol)
# Took the framework for the old function by
# Sawyer B. Fuller <[email protected]>, removed a lot of the innards
# and replaced with analytical polynomial functions for LTI systems.
#
# idea for the frequency data solution copied/adapted from
# https://github.com/alchemyst/Skogestad-Python/blob/master/BODE.py
# Rene van Paassen <[email protected]>
#
# RvP, July 8, 2014, corrected to exclude phase=0 crossing for the gain
# margin polynomial
# RvP, July 8, 2015, augmented to calculate all phase/gain crossings with
# frd data. Correct to return smallest phase
# margin, smallest gain margin and their frequencies
# RvP, Jun 10, 2017, modified the inclusion of roots found for phase
# crossing to include all >= 0, made subsequent calc
# insensitive to div by 0
# also changed the selection of which crossings to
# return on basis of "A note on the Gain and Phase
# Margin Concepts" Journal of Control and Systems
# Engineering, Yazdan Bavafi-Toosi, Dec 2015, vol 3
# issue 1, pp 51-59, closer to Matlab behavior, but
# not completely identical in edge cases, which don't
# cross but touch gain=1
def stability_margins(sysdata, returnall=False, epsw=0.0):
"""Calculate stability margins and associated crossover frequencies.
Parameters
----------
sysdata: LTI system or (mag, phase, omega) sequence
sys : LTI system
Linear SISO system
mag, phase, omega : sequence of array_like
Arrays of magnitudes (absolute values, not dB), phases (degrees),
and corresponding frequencies. Crossover frequencies returned are
in the same units as those in `omega` (e.g., rad/sec or Hz).
returnall: bool, optional
If true, return all margins found. If False (default), return only the
minimum stability margins. For frequency data or FRD systems, only
margins in the given frequency region can be found and returned.
epsw: float, optional
Frequencies below this value (default 0.0) are considered static gain,
and not returned as margin.
Returns
-------
gm: float or array_like
Gain margin
pm: float or array_loke
Phase margin
sm: float or array_like
Stability margin, the minimum distance from the Nyquist plot to -1
wg: float or array_like
Frequency for gain margin (at phase crossover, phase = -180 degrees)
wp: float or array_like
Frequency for phase margin (at gain crossover, gain = 1)
ws: float or array_like
Frequency for stability margin (complex gain closest to -1)
"""
try:
if isinstance(sysdata, frdata.FRD):
sys = frdata.FRD(sysdata, smooth=True)
elif isinstance(sysdata, xferfcn.TransferFunction):
sys = sysdata
elif getattr(sysdata, '__iter__', False) and len(sysdata) == 3:
mag, phase, omega = sysdata
sys = frdata.FRD(mag * np.exp(1j * phase * math.pi/180),
omega, smooth=True)
else:
sys = xferfcn._convert_to_transfer_function(sysdata)
except Exception as e:
print (e)
raise ValueError("Margin sysdata must be either a linear system or "
"a 3-sequence of mag, phase, omega.")
# calculate gain of system
if isinstance(sys, xferfcn.TransferFunction):
# check for siso
if not issiso(sys):
raise ValueError("Can only do margins for SISO system")
# real and imaginary part polynomials in omega:
rnum, inum = _polyimsplit(sys.num[0][0])
rden, iden = _polyimsplit(sys.den[0][0])
# test (imaginary part of tf) == 0, for phase crossover/gain margins
test_w_180 = np.polyadd(np.polymul(inum, rden), np.polymul(rnum, -iden))
w_180 = np.roots(test_w_180)
# first remove imaginary and negative frequencies, epsw removes the
# "0" frequency for type-2 systems
w_180 = np.real(w_180[(np.imag(w_180) == 0) * (w_180 >= epsw)])
# evaluate response at remaining frequencies, to test for phase 180 vs 0
with np.errstate(all='ignore'):
resp_w_180 = np.real(
np.polyval(sys.num[0][0], 1.j*w_180) /
np.polyval(sys.den[0][0], 1.j*w_180))
# only keep frequencies where the negative real axis is crossed
w_180 = w_180[np.real(resp_w_180) <= 0.0]
# and sort
w_180.sort()
# test magnitude is 1 for gain crossover/phase margins
test_wc = np.polysub(np.polyadd(_polysqr(rnum), _polysqr(inum)),
np.polyadd(_polysqr(rden), _polysqr(iden)))
wc = np.roots(test_wc)
wc = np.real(wc[(np.imag(wc) == 0) * (wc > epsw)])
wc.sort()
# stability margin was a bitch to elaborate, relies on magnitude to
# point -1, then take the derivative. Second derivative needs to be >0
# to have a minimum
test_wstabd = np.polyadd(_polysqr(rden), _polysqr(iden))
test_wstabn = np.polyadd(_polysqr(np.polyadd(rnum,rden)),
_polysqr(np.polyadd(inum,iden)))
test_wstab = np.polysub(
np.polymul(np.polyder(test_wstabn),test_wstabd),
np.polymul(np.polyder(test_wstabd),test_wstabn))
# find the solutions, for positive omega, and only real ones
wstab = np.roots(test_wstab)
wstab = np.real(wstab[(np.imag(wstab) == 0) *
(np.real(wstab) >= 0)])
# and find the value of the 2nd derivative there, needs to be positive
wstabplus = np.polyval(np.polyder(test_wstab), wstab)
wstab = np.real(wstab[(np.imag(wstab) == 0) * (wstab > epsw) *
(wstabplus > 0.)])
wstab.sort()
else:
# a bit coarse, have the interpolated frd evaluated again
def _mod(w):
"""Calculate |G(jw)| - 1"""
return np.abs(sys._evalfr(w)[0][0]) - 1
def _arg(w):
"""Calculate the phase angle at -180 deg"""
return np.angle(-sys._evalfr(w)[0][0])
def _dstab(w):
"""Calculate the distance from -1 point"""
return np.abs(sys._evalfr(w)[0][0] + 1.)
# Find all crossings, note that this depends on omega having
# a correct range
widx = np.where(np.diff(np.sign(_mod(sys.omega))))[0]
wc = np.array(
[sp.optimize.brentq(_mod, sys.omega[i], sys.omega[i+1])
for i in widx])
# find the phase crossings ang(H(jw) == -180
widx = np.where(np.diff(np.sign(_arg(sys.omega))))[0]
widx = widx[np.real(sys._evalfr(sys.omega[widx])[0][0]) <= 0]
w_180 = np.array(
[sp.optimize.brentq(_arg, sys.omega[i], sys.omega[i+1])
for i in widx])
# find all stab margins?
widx, = np.where(np.diff(np.sign(np.diff(_dstab(sys.omega)))) > 0)
wstab = np.array(
[sp.optimize.minimize_scalar(_dstab,
bracket=(sys.omega[i], sys.omega[i+1])
).x
for i in widx])
wstab = wstab[(wstab >= sys.omega[0]) * (wstab <= sys.omega[-1])]
# margins, as iterables, converted frdata and xferfcn calculations to
# vector for this
with np.errstate(all='ignore'):
gain_w_180 = np.abs(sys._evalfr(w_180)[0][0])
GM = 1.0/gain_w_180
SM = np.abs(sys._evalfr(wstab)[0][0]+1)
PM = np.remainder(np.angle(sys._evalfr(wc)[0][0], deg=True), 360.0) - 180.0
if returnall:
return GM, PM, SM, w_180, wc, wstab
else:
if GM.shape[0] and not np.isinf(GM).all():
with np.errstate(all='ignore'):
gmidx = np.where(np.abs(np.log(GM)) ==
np.min(np.abs(np.log(GM))))
else:
gmidx = -1
if PM.shape[0]:
pmidx = np.where(np.abs(PM) == np.amin(np.abs(PM)))[0]
return (
(not gmidx != -1 and float('inf')) or GM[gmidx][0],
(not PM.shape[0] and float('inf')) or PM[pmidx][0],
(not SM.shape[0] and float('inf')) or np.amin(SM),
(not gmidx != -1 and float('nan')) or w_180[gmidx][0],
(not wc.shape[0] and float('nan')) or wc[pmidx][0],
(not wstab.shape[0] and float('nan')) or wstab[SM==np.amin(SM)][0])
# Contributed by Steffen Waldherr <[email protected]>
def phase_crossover_frequencies(sys):
"""Compute frequencies and gains at intersections with real axis
in Nyquist plot.
Call as:
omega, gain = phase_crossover_frequencies()
Returns
-------
omega: 1d array of (non-negative) frequencies where Nyquist plot
intersects the real axis
gain: 1d array of corresponding gains
Examples
--------
>>> tf = TransferFunction([1], [1, 2, 3, 4])
>>> PhaseCrossoverFrequenies(tf)
(array([ 1.73205081, 0. ]), array([-0.5 , 0.25]))
"""
# Convert to a transfer function
tf = xferfcn._convert_to_transfer_function(sys)
# if not siso, fall back to (0,0) element
#! TODO: should add a check and warning here
num = tf.num[0][0]
den = tf.den[0][0]
# Compute frequencies that we cross over the real axis
numj = (1.j)**np.arange(len(num)-1,-1,-1)*num
denj = (-1.j)**np.arange(len(den)-1,-1,-1)*den
allfreq = np.roots(np.imag(np.polymul(numj,denj)))
realfreq = np.real(allfreq[np.isreal(allfreq)])
realposfreq = realfreq[realfreq >= 0.]
# using real() to avoid rounding errors and results like 1+0j
# it would be nice to have a vectorized version of self.evalfr here
gain = np.real(np.asarray([tf._evalfr(f)[0][0] for f in realposfreq]))
return realposfreq, gain
def margin(*args):
"""margin(sysdata)
Calculate gain and phase margins and associated crossover frequencies
Parameters
----------
sysdata : LTI system or (mag, phase, omega) sequence
sys : StateSpace or TransferFunction
Linear SISO system
mag, phase, omega : sequence of array_like
Input magnitude, phase (in deg.), and frequencies (rad/sec) from
bode frequency response data
Returns
-------
gm : float
Gain margin
pm : float
Phase margin (in degrees)
wg: float
Frequency for gain margin (at phase crossover, phase = -180 degrees)
wp: float
Frequency for phase margin (at gain crossover, gain = 1)
Margins are calculated for a SISO open-loop system.
If there is more than one gain crossover, the one at the smallest
margin (deviation from gain = 1), in absolute sense, is
returned. Likewise the smallest phase margin (in absolute sense)
is returned.
Examples
--------
>>> sys = tf(1, [1, 2, 1, 0])
>>> gm, pm, wg, wp = margin(sys)
"""
if len(args) == 1:
sys = args[0]
margin = stability_margins(sys)
elif len(args) == 3:
margin = stability_margins(args)
else:
raise ValueError("Margin needs 1 or 3 arguments; received %i."
% len(args))
return margin[0], margin[1], margin[3], margin[4]
| bsd-3-clause | 1,176,016,653,817,876,700 | 37.032609 | 80 | 0.61539 | false |
wrenoud/blueberry-bush | RepositoryState.py | 1 | 1808 | import os
from FileState import FileStateLocal
class Remote(object):
def __init__(self): pass
def create(self): pass
def update(self): pass
def modified(self): pass
def delete(self): pass
class RepositoryState(object):
"""Manages the sync information, this includes the local root, ingnores, files, and update que"""
def __init__(self, rootpath):
self.root = os.path.abspath(rootpath)
self.local_files = {}
def create(self, src_path):
local_file = FileStateLocal(self.root, src_path)
if local_file.check_exists():
local_file.check_modified()
local_file.check_size()
local_file.check_hash()
self.local_files[local_file.key] = local_file
return local_file.key
def modified(self, src_path):
src_key = FileStateLocal.as_key(self.root,src_path)
self.local_files[src_key].check_size()
self.local_files[src_key].check_modified()
self.local_files[src_key].check_hash()
return src_key
def move(self, src_path, dest_path):
src_key = FileStateLocal.as_key(self.root,src_path)
self.local_files[src_key].local_path = dest_path
dest_key = self.local_files[src_key].key
self.local_files[dest_key] = self.local_files.pop(src_key)
return (src_key,dest_key)
def delete(self, src_path):
src_key = FileStateLocal.as_key(self.root,src_path)
del self.local_files[src_key]
return src_key
def ignore(self, src_path):
dir, name = os.path.split(src_path)
if name.startswith(".~lock"): return True
if name.endswith("~"): return True
if name == STATE_FILE: return True
return False
| gpl-2.0 | -8,155,816,514,990,178,000 | 30.719298 | 101 | 0.601217 | false |
texnokrates/electroballz | electroballz/single_coeff.py | 1 | 1461 | from scipy import *
from scipy.special import sph_jn, sph_yn
# The following is an entirely computationally inefficient draft, intended for basic orientation.
def jl(l,z):
"""Wrapper for sph_jn (discards the unnecessary data)"""
return sph_jn(n, z)[0][l]
def yl(l,z):
"""Wrapper for sph_yn (discards the unnecessary data)"""
return sph_yn(l, z)[0][l]
def h1l(l,z):
"""First spherical Hankel function"""
return jl(l,z) + 1j*yl(l,z)
def h2l(l,z):
"""Second spherical Hankel function"""
return j1(l,z) - 1j*yl(l,z)
def bf_coeff(l, km, k0, etam, eta0, r):
"""Ratios between (b1lm,f1lm) and a1lm. See the single_spherical_wave_scatter.nb file"""
sph_j_kmr = sph_jn(l, km*r)
sph_j_k0r = sph_jn(l, k0*r)
sph_y_k0r = sph_yn(l, k0*r)
jm = sph_j_kmr[0][l]
h01 = sph_j_k0r[0][l] + 1j * sph_y_k0r[0][l]
h02 = sph_j_k0r[0][l] - 1j * sph_y_k0r[0][l]
Jm = jm + km * r * sph_j_kmr[1][l]
H01 = h01 + k0 * r * (sph_j_k0r[1][l] + 1j * sph_y_k0r[1][l])
H02 = h02 + k0 * r * (sph_j_k0r[1][l] - 1j * sph_y_k0r[1][l])
denom1 = h01*Jm*k0*eta0 - H01*jm*km*etam
b1_a1 = - (h02*Jm*k0*eta0 - H02*jm*km*etam) / denom1
f1_a1 = - k0 * sqrt(eta0*etam) * (H01*h02 - h01*H02) / denom1
denom2 = (H01*jm*km*eta0 - h01*Jm*k0*etam)
b2_a2 = - (H02*jm*km*eta0 - h02*Jm*k0*etam) / denom2
f2_a2 = - k0 * sqrt(eta0*etam) * (-H01*h02 + h01*H02) / denom2
return (b1_a1, f1_a1, b2_a2, f2_a2)
| gpl-3.0 | -3,926,437,970,418,607,000 | 30.76087 | 97 | 0.577002 | false |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.