repo_name
stringlengths 5
92
| path
stringlengths 4
232
| copies
stringclasses 19
values | size
stringlengths 4
7
| content
stringlengths 721
1.04M
| license
stringclasses 15
values | hash
int64 -9,223,277,421,539,062,000
9,223,102,107B
| line_mean
float64 6.51
99.9
| line_max
int64 15
997
| alpha_frac
float64 0.25
0.97
| autogenerated
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|
tiborsimko/invenio-jsonschemas | invenio_jsonschemas/errors.py | 1 | 1593 | # -*- coding: utf-8 -*-
#
# This file is part of Invenio.
# Copyright (C) 2015-2018 CERN.
#
# Invenio is free software; you can redistribute it and/or modify it
# under the terms of the MIT License; see LICENSE file for more details.
"""Invenio-JSONSchemas errors."""
from __future__ import absolute_import, print_function
class JSONSchemaError(Exception):
"""Base class for errors in Invenio-JSONSchemas module."""
class JSONSchemaNotFound(JSONSchemaError):
"""Exception raised when a requested JSONSchema is not found."""
def __init__(self, schema, *args, **kwargs):
"""Constructor.
:param schema: path of the requested schema which was not found.
"""
self.schema = schema
super(JSONSchemaNotFound, self).__init__(
'Schema "{}" not found'.format(schema), *args, **kwargs
)
class JSONSchemaDuplicate(JSONSchemaError):
"""Exception raised when multiple schemas match the same path."""
def __init__(self, schema, first_dir, second_dir, *args, **kwargs):
"""Constructor.
:param schema: duplicate schema path.
:param first_dir: first directory where the schema was found.
:param second_dir: second directory where the schema was found.
"""
self.schema = schema
super(JSONSchemaDuplicate, self).__init__(
'Schema "{schema}" defined in multiple ' +
'directories: "{first}" and "{second}"'.format(
schema=schema,
first=first_dir,
second=second_dir),
*args, **kwargs)
| mit | 5,699,083,285,520,204,000 | 31.510204 | 72 | 0.625235 | false |
qPCR4vir/orange3 | Orange/widgets/unsupervised/owpca.py | 1 | 14413 | from PyQt4.QtGui import QFormLayout, QColor, QApplication, QLineEdit
from PyQt4.QtCore import Qt, QTimer
import numpy
import pyqtgraph as pg
from Orange.data import Table, Domain, StringVariable
from Orange.data.sql.table import SqlTable, AUTO_DL_LIMIT
from Orange.preprocess import Normalize
from Orange.projection import PCA
from Orange.widgets import widget, gui, settings
try:
from orangecontrib import remote
remotely = True
except ImportError:
remotely = False
class OWPCA(widget.OWWidget):
name = "PCA"
description = "Principal component analysis with a scree-diagram."
icon = "icons/PCA.svg"
priority = 3050
inputs = [("Data", Table, "set_data")]
outputs = [("Transformed data", Table),
("Components", Table),
("PCA", PCA)]
ncomponents = settings.Setting(2)
variance_covered = settings.Setting(100)
batch_size = settings.Setting(100)
address = settings.Setting('')
auto_update = settings.Setting(True)
auto_commit = settings.Setting(True)
normalize = settings.Setting(True)
maxp = settings.Setting(20)
axis_labels = settings.Setting(10)
graph_name = "plot.plotItem"
def __init__(self):
super().__init__()
self.data = None
self._pca = None
self._transformed = None
self._variance_ratio = None
self._cumulative = None
self._line = False
self._pca_projector = PCA()
self._pca_projector.component = self.ncomponents
self._pca_preprocessors = PCA.preprocessors
# Components Selection
box = gui.vBox(self.controlArea, "Components Selection")
form = QFormLayout()
box.layout().addLayout(form)
self.components_spin = gui.spin(
box, self, "ncomponents", 0, 1000,
callback=self._update_selection_component_spin,
keyboardTracking=False
)
self.components_spin.setSpecialValueText("All")
self.variance_spin = gui.spin(
box, self, "variance_covered", 1, 100,
callback=self._update_selection_variance_spin,
keyboardTracking=False
)
self.variance_spin.setSuffix("%")
form.addRow("Components:", self.components_spin)
form.addRow("Variance covered:", self.variance_spin)
# Incremental learning
self.sampling_box = gui.vBox(self.controlArea, "Incremental learning")
self.addresstext = QLineEdit(box)
self.addresstext.setPlaceholderText('Remote server')
if self.address:
self.addresstext.setText(self.address)
self.sampling_box.layout().addWidget(self.addresstext)
form = QFormLayout()
self.sampling_box.layout().addLayout(form)
self.batch_spin = gui.spin(
self.sampling_box, self, "batch_size", 50, 100000, step=50,
keyboardTracking=False)
form.addRow("Batch size ~ ", self.batch_spin)
self.start_button = gui.button(
self.sampling_box, self, "Start remote computation",
callback=self.start, autoDefault=False,
tooltip="Start/abort computation on the server")
self.start_button.setEnabled(False)
gui.checkBox(self.sampling_box, self, "auto_update",
"Periodically fetch model", callback=self.update_model)
self.__timer = QTimer(self, interval=2000)
self.__timer.timeout.connect(self.get_model)
self.sampling_box.setVisible(remotely)
# Options
self.options_box = gui.vBox(self.controlArea, "Options")
gui.checkBox(self.options_box, self, "normalize", "Normalize data",
callback=self._update_normalize)
self.maxp_spin = gui.spin(
self.options_box, self, "maxp", 1, 100,
label="Show only first", callback=self._setup_plot,
keyboardTracking=False
)
self.controlArea.layout().addStretch()
gui.auto_commit(self.controlArea, self, "auto_commit", "Apply",
checkbox_label="Apply automatically")
self.plot = pg.PlotWidget(background="w")
axis = self.plot.getAxis("bottom")
axis.setLabel("Principal Components")
axis = self.plot.getAxis("left")
axis.setLabel("Proportion of variance")
self.plot_horlabels = []
self.plot_horlines = []
self.plot.getViewBox().setMenuEnabled(False)
self.plot.getViewBox().setMouseEnabled(False, False)
self.plot.showGrid(True, True, alpha=0.5)
self.plot.setRange(xRange=(0.0, 1.0), yRange=(0.0, 1.0))
self.mainArea.layout().addWidget(self.plot)
self._update_normalize()
def update_model(self):
self.get_model()
if self.auto_update and self.rpca and not self.rpca.ready():
self.__timer.start(2000)
else:
self.__timer.stop()
def start(self):
if 'Abort' in self.start_button.text():
self.rpca.abort()
self.__timer.stop()
self.start_button.setText("Start remote computation")
else:
self.address = self.addresstext.text()
with remote.server(self.address):
from Orange.projection.pca import RemotePCA
maxiter = (1e5 + self.data.approx_len()) / self.batch_size * 3
self.rpca = RemotePCA(self.data, self.batch_size, int(maxiter))
self.update_model()
self.start_button.setText("Abort remote computation")
def set_data(self, data):
self.information(0)
if isinstance(data, SqlTable):
if data.approx_len() < AUTO_DL_LIMIT:
data = Table(data)
elif not remotely:
self.information(0, "Data has been sampled")
data_sample = data.sample_time(1, no_cache=True)
data_sample.download_data(2000, partial=True)
data = Table(data_sample)
self.data = data
self.fit()
def fit(self):
self.clear()
self.start_button.setEnabled(False)
if self.data is None:
return
data = self.data
self._transformed = None
if isinstance(data, SqlTable): # data was big and remote available
self.sampling_box.setVisible(True)
self.start_button.setText("Start remote computation")
self.start_button.setEnabled(True)
else:
self.sampling_box.setVisible(False)
pca = self._pca_projector(data)
variance_ratio = pca.explained_variance_ratio_
cumulative = numpy.cumsum(variance_ratio)
self.components_spin.setRange(0, len(cumulative))
self._pca = pca
self._variance_ratio = variance_ratio
self._cumulative = cumulative
self._setup_plot()
self.unconditional_commit()
def clear(self):
self._pca = None
self._transformed = None
self._variance_ratio = None
self._cumulative = None
self._line = None
self.plot_horlabels = []
self.plot_horlines = []
self.plot.clear()
def get_model(self):
if self.rpca is None:
return
if self.rpca.ready():
self.__timer.stop()
self.start_button.setText("Restart (finished)")
self._pca = self.rpca.get_state()
if self._pca is None:
return
self._variance_ratio = self._pca.explained_variance_ratio_
self._cumulative = numpy.cumsum(self._variance_ratio)
self._setup_plot()
self._transformed = None
self.commit()
def _setup_plot(self):
self.plot.clear()
explained_ratio = self._variance_ratio
explained = self._cumulative
p = min(len(self._variance_ratio), self.maxp)
self.plot.plot(numpy.arange(p), explained_ratio[:p],
pen=pg.mkPen(QColor(Qt.red), width=2),
antialias=True,
name="Variance")
self.plot.plot(numpy.arange(p), explained[:p],
pen=pg.mkPen(QColor(Qt.darkYellow), width=2),
antialias=True,
name="Cumulative Variance")
cutpos = self._nselected_components() - 1
self._line = pg.InfiniteLine(
angle=90, pos=cutpos, movable=True, bounds=(0, p - 1))
self._line.setCursor(Qt.SizeHorCursor)
self._line.setPen(pg.mkPen(QColor(Qt.black), width=2))
self._line.sigPositionChanged.connect(self._on_cut_changed)
self.plot.addItem(self._line)
self.plot_horlines = (
pg.PlotCurveItem(pen=pg.mkPen(QColor(Qt.blue), style=Qt.DashLine)),
pg.PlotCurveItem(pen=pg.mkPen(QColor(Qt.blue), style=Qt.DashLine)))
self.plot_horlabels = (
pg.TextItem(color=QColor(Qt.black), anchor=(1, 0)),
pg.TextItem(color=QColor(Qt.black), anchor=(1, 1)))
for item in self.plot_horlabels + self.plot_horlines:
self.plot.addItem(item)
self._set_horline_pos()
self.plot.setRange(xRange=(0.0, p - 1), yRange=(0.0, 1.0))
self._update_axis()
def _set_horline_pos(self):
cutidx = self.ncomponents - 1
for line, label, curve in zip(self.plot_horlines, self.plot_horlabels,
(self._variance_ratio, self._cumulative)):
y = curve[cutidx]
line.setData([-1, cutidx], 2 * [y])
label.setPos(cutidx, y)
label.setPlainText("{:.2f}".format(y))
def _on_cut_changed(self, line):
# cut changed by means of a cut line over the scree plot.
value = int(round(line.value()))
self._line.setValue(value)
current = self._nselected_components()
components = value + 1
if not (self.ncomponents == 0 and
components == len(self._variance_ratio)):
self.ncomponents = components
self._set_horline_pos()
if self._pca is not None:
self.variance_covered = self._cumulative[components - 1] * 100
if current != self._nselected_components():
self._invalidate_selection()
def _update_selection_component_spin(self):
# cut changed by "ncomponents" spin.
if self._pca is None:
self._invalidate_selection()
return
if self.ncomponents == 0:
# Special "All" value
cut = len(self._variance_ratio)
else:
cut = self.ncomponents
self.variance_covered = self._cumulative[cut - 1] * 100
if numpy.floor(self._line.value()) + 1 != cut:
self._line.setValue(cut - 1)
self._invalidate_selection()
def _update_selection_variance_spin(self):
# cut changed by "max variance" spin.
if self._pca is None:
return
cut = numpy.searchsorted(self._cumulative,
self.variance_covered / 100.0)
self.ncomponents = cut + 1
if numpy.floor(self._line.value()) + 1 != cut:
self._line.setValue(cut - 1)
self._invalidate_selection()
def _update_normalize(self):
if self.normalize:
pp = self._pca_preprocessors + [Normalize()]
else:
pp = self._pca_preprocessors
self._pca_projector.preprocessors = pp
self.fit()
if self.data is None:
self._invalidate_selection()
def _nselected_components(self):
"""Return the number of selected components."""
if self._pca is None:
return 0
if self.ncomponents == 0:
# Special "All" value
max_comp = len(self._variance_ratio)
else:
max_comp = self.ncomponents
var_max = self._cumulative[max_comp - 1]
if var_max != numpy.floor(self.variance_covered / 100.0):
cut = max_comp
self.variance_covered = var_max * 100
else:
self.ncomponents = cut = numpy.searchsorted(
self._cumulative, self.variance_covered / 100.0) + 1
return cut
def _invalidate_selection(self):
self.commit()
def _update_axis(self):
p = min(len(self._variance_ratio), self.maxp)
axis = self.plot.getAxis("bottom")
d = max((p-1)//(self.axis_labels-1), 1)
axis.setTicks([[(i, str(i+1)) for i in range(0, p, d)]])
def commit(self):
transformed = components = None
if self._pca is not None:
if self._transformed is None:
# Compute the full transform (all components) only once.
self._transformed = self._pca(self.data)
transformed = self._transformed
domain = Domain(
transformed.domain.attributes[:self.ncomponents],
self.data.domain.class_vars,
self.data.domain.metas
)
transformed = transformed.from_table(domain, transformed)
dom = Domain(self._pca.orig_domain.attributes,
metas=[StringVariable(name='component')])
metas = numpy.array([['PC{}'.format(i + 1)
for i in range(self.ncomponents)]],
dtype=object).T
components = Table(dom, self._pca.components_[:self.ncomponents],
metas=metas)
components.name = 'components'
self._pca_projector.component = self.ncomponents
self.send("Transformed data", transformed)
self.send("Components", components)
self.send("PCA", self._pca_projector)
def send_report(self):
if self.data is None:
return
self.report_items((
("Selected components", self.ncomponents),
("Explained variance", "{:.3f} %".format(self.variance_covered))
))
self.report_plot()
def main():
import gc
app = QApplication([])
w = OWPCA()
# data = Table("iris")
# data = Table("wine")
data = Table("housing")
w.set_data(data)
w.show()
w.raise_()
rval = w.exec()
w.deleteLater()
del w
app.processEvents()
gc.collect()
return rval
if __name__ == "__main__":
main()
| bsd-2-clause | 5,290,078,399,030,746,000 | 34.153659 | 80 | 0.576077 | false |
ECESeniorDesign/lazy_record | test/test_base.py | 1 | 11343 | import unittest
import mock
import sys
import os
sys.path.append(os.path.join(
os.path.dirname(os.path.abspath(os.path.dirname(__file__))),
"lazy_record"))
import base
from base import Base
import lazy_record
class MyModel(Base):
__attributes__ = {
"name": str,
}
__validates__ = {
"name": lambda record: record.name != "invalid"
}
def my_childs():
pass
class MyOtherModel(Base):
pass
@mock.patch("base.datetime")
@mock.patch("base.Repo")
@mock.patch("base.Query")
class TestBase(unittest.TestCase):
def test_gets_number_of_records(self, Query, Repo, datetime):
Query.return_value.all.return_value.__len__.return_value = 3
self.assertEqual(len(MyModel), 3)
Query.assert_called_with(MyModel)
q = Query.return_value
q.all.assert_called_once_with()
all = q.all.return_value
all.__len__.assert_called_once_with()
def test_creates_records(self, Query, Repo, datetime):
Repo.table_name.return_value = "my_model"
my_record = MyModel(name="me")
my_record.save()
Repo.table_name.assert_called_with(MyModel)
Repo.assert_called_with("my_model")
repo = Repo.return_value
today = datetime.datetime.today.return_value
repo.insert.assert_called_with(name="me",
created_at=today,
updated_at=today)
def test_updates_records(self, Query, Repo, datetime):
Repo.table_name.return_value = "my_model"
my_record = MyModel(name="foo")
my_record._id = 3
my_record._created_at = datetime.datetime.today.return_value
my_record.save()
Repo.table_name.assert_called_with(MyModel)
Repo.assert_called_with("my_model")
repo = Repo.return_value
repo.where.assert_called_with(id=3)
where = repo.where.return_value
today = datetime.datetime.today.return_value
where.update.assert_called_with(name="foo",
created_at=today,
updated_at=today)
def test_does_not_create_invalid_records(self, Query, Repo, datetime):
Repo.table_name.return_value = "my_model"
my_record = MyModel(name="invalid")
with self.assertRaises(base.RecordInvalid):
my_record.save()
self.assertEqual(Repo.return_value.insert.call_count, 0)
def test_does_not_update_invalid_records(self, Query, Repo, datetime):
Repo.table_name.return_value = "my_model"
my_record = MyModel(name="invalid")
my_record._id = 3
my_record._created_at = datetime.datetime.today.return_value
with self.assertRaises(base.RecordInvalid):
my_record.save()
self.assertEqual(Repo.return_value.update.call_count, 0)
def test_deletes_records(self, Query, Repo, datetime):
Repo.table_name.return_value = "my_model"
my_record = MyModel(name="foo")
my_record._id = 3
my_record._created_at = datetime.datetime.today.return_value
my_record.delete()
Repo.table_name.assert_called_with(MyModel)
Repo.assert_called_with("my_model")
repo = Repo.return_value
repo.where.assert_called_with(id=3)
where = repo.where.return_value
where.delete.assert_called_with()
def test_allows_finding_of_records_by_id(self, Query, Repo, datetime):
MyModel.find(1)
Query.assert_called_with(MyModel)
query = Query.return_value
query.find.assert_called_with(1)
def test_allows_finding_of_records_by_attribute(self, Query, Repo, dt):
MyModel.find_by(name="foo")
Query.assert_called_with(MyModel)
query = Query.return_value
query.find_by.assert_called_with(name="foo")
def test_allows_searching_of_records_by_attribute(self, Query, Repo, dt):
MyModel.where(name="foo")
Query.assert_called_with(MyModel)
query = Query.return_value
query.where.assert_called_with(name="foo")
def test_allows_custom_where(self, Query, Repo, dt):
MyModel.where("name LIKE ?", "foo")
Query.assert_called_with(MyModel)
query = Query.return_value
query.where.assert_called_with("name LIKE ?", "foo")
def test_allows_fetching_of_all_records(self, Query, Repo, datetime):
MyModel.all()
Query.assert_called_with(MyModel)
query = Query.return_value
query.all.assert_called_with()
def test_allows_fetching_through_joins(self, Query, Repo, datetime):
MyModel.joins("my_other_models")
Query.assert_called_with(MyModel)
query = Query.return_value
query.joins.assert_called_with("my_other_models")
def test_casts_attributes_to_correct_type(self, Query, Repo, datetime):
m = MyModel(name=1)
self.assertEqual(m.name, "1")
def test_creates_from_dictionary(self, Query, Repo, datetime):
m = MyModel.from_dict(id=1, name="foo",
created_at=datetime.datetime.today.return_value)
self.assertEqual(m.id, 1)
self.assertEqual(m.name, "foo")
self.assertEqual(m.created_at, datetime.datetime.today.return_value)
def test_forbids_setting_of_id(self, Query, Repo, datetime):
m = MyModel()
with self.assertRaises(AttributeError):
m.id = 15
def test_forbids_setting_of_created_at(self, Query, Repo, datetime):
m = MyModel()
with self.assertRaises(AttributeError):
m.created_at = datetime.datetime.today.return_value
def test_allows_setting_of_attributes(self, Query, Repo, datetime):
m = MyModel()
m.name = "turnip"
self.assertEqual(m.name, "turnip")
def test_forbits_instantiation_with_id(self, Query, Repo, datetime):
with self.assertRaises(AttributeError):
MyModel(id=3)
def test_forbits_instantiation_with_created_at(self, Query, Repo, dt):
with self.assertRaises(AttributeError):
MyModel(created_at=3)
def test_gets_first_record(self, Query, Repo, datetime):
MyModel.first()
Query.assert_called_with(MyModel)
query = Query.return_value
query.first.assert_called_with()
def test_gets_last_record(self, Query, Repo, datetime):
MyModel.last()
Query.assert_called_with(MyModel)
query = Query.return_value
query.last.assert_called_with()
def test_mass_assigns_records(self, Query, Repo, datetime):
m = MyModel()
m.update(name="foo")
self.assertEqual(m.name, "foo")
def test_get_does_not_cast_attr_if_none(self, Query, Repo, datetime):
m = MyModel()
self.assertEqual(m.name, None)
def test_set_does_not_cast_attr_if_none(self, Query, Repo, datetime):
m = MyModel()
m.name = None
self.assertEqual(m.name, None)
def test_gets_other_attributes_without_cast(self, Query, Repo, datetime):
m = MyModel()
self.assertEqual(m.__class__, MyModel)
def test_raises_if_attribute_not_found(self, Query, Repo, datetime):
m = MyModel()
with self.assertRaises(AttributeError):
m.turkey
def test_repr_displays_meaningful_represenation(self, Query, Repo, dt):
m = MyModel()
self.assertEqual(repr(m),
"MyModel(id=None, name=None, created_at=None, "
"updated_at=None)")
def test_evaluates_equality_based_on_id(self, Query, Repo, dt):
m1 = MyModel()
m1._id = 1
m2 = MyModel()
m2._id = 1
m3 = MyModel()
m3._id = 2
self.assertEqual(m1, m2)
self.assertNotEqual(m1, m3)
def test_evaluates_as_inequal_if_either_id_is_None(self, Query, Repo, dt):
m1 = MyModel()
m1._id = 1
m2 = MyModel()
m2._id = None
m3 = MyModel()
m3._id = None
self.assertNotEqual(m1, m2)
self.assertNotEqual(m2, m1)
self.assertNotEqual(m2, m3)
def test_evaluates_as_inequal_if_models_are_different(self, Q, R, dt):
m1 = MyModel()
m1._id = 1
m2 = MyOtherModel()
m2._id = 1
self.assertNotEqual(m1, m2)
def test_identical_records_evaluate_as_equal(self, Query, Repo, dt):
m1 = MyModel()
m1._id = None
self.assertEqual(m1, m1)
def test_casts_to_int_as_id(self, Query, Repo, dt):
m = MyModel()
m._id = 11
self.assertEqual(int(m), 11)
def test_casts_to_0_if_no_id(self, Query, Repo, dt):
m = MyModel()
m._id = None
self.assertEqual(int(m), 0)
def test_valid_returns_true_when_valid(self, Query, Repo, datetime):
m = MyModel(name="valid")
self.assertTrue(m.is_valid())
def test_valid_returns_false_when_invalid(self, Query, Repo, datetime):
m = MyModel(name="invalid")
self.assertFalse(m.is_valid())
def test_accessing_attribute_not_loaded_raises(self, Query, Repo, dt):
m = MyModel.from_dict(id=1)
with self.assertRaises(lazy_record.MissingAttributeError):
m.name
def test_accessing_attribute_not_loaded_raises_no_id(self, Query, R, dt):
m = MyModel.from_dict(name="foo")
with self.assertRaises(lazy_record.MissingAttributeError) as e:
m.id
self.assertEqual(e.exception.message,
"'MyModel' object has no attribute 'id'")
def test_repr_without_timestamps(self, Query, Repo, datetime):
m = MyModel.from_dict(name="foo")
self.assertEqual(repr(m), "MyModel(name='foo')")
def test_create_makes_record(self, Query, Repo, datetime):
with mock.patch.object(MyModel.__metaclass__, "__call__") as model:
MyModel.create(name="foo")
Query(MyModel).create.assert_called_with(name="foo")
def test_create_returns_record(self, Query, Repo, datetime):
record = mock.Mock(name="record")
Query(MyModel).create.return_value = record
self.assertEqual(MyModel.create(), record)
@mock.patch("base.Repo")
class TestBaseDestroy(unittest.TestCase):
def setUp(self):
self.my_model = MyModel(name="hi")
self.my_model._id = 5
def test_deletes_without_dependents(self, Repo):
self.my_model.destroy()
Repo.assert_called_once_with("my_models")
repo = Repo.return_value
repo.where.assert_called_once_with(id=5)
where = repo.where.return_value
where.delete.assert_called_once_with()
@mock.patch.object(MyModel, "__dependents__", new=["my_childs"])
def test_deletes_dependents(self, Repo):
my_childs = mock.PropertyMock()
type(self.my_model).my_childs = my_childs
child = mock.Mock()
my_childs.return_value.__iter__.return_value = [child]
self.my_model.destroy()
my_childs.assert_called_with()
child._do_destroy.assert_called_with()
Repo.assert_called_with("my_models")
repo = Repo.return_value
repo.where.assert_called_with(id=5)
where = repo.where.return_value
where.delete.assert_called_with()
if __name__ == '__main__':
unittest.main()
| mit | -5,940,867,194,754,726,000 | 34.669811 | 78 | 0.611655 | false |
agoragames/haigha | tests/unit/connection_test.py | 1 | 34872 | '''
Copyright (c) 2011-2017, Agora Games, LLC All rights reserved.
https://github.com/agoragames/haigha/blob/master/LICENSE.txt
'''
import logging
from chai import Chai
from haigha import connection, __version__
from haigha.connection import Connection, ConnectionChannel, ConnectionError, ConnectionClosed
from haigha.channel import Channel
from haigha.frames.frame import Frame
from haigha.frames.method_frame import MethodFrame
from haigha.frames.heartbeat_frame import HeartbeatFrame
from haigha.frames.header_frame import HeaderFrame
from haigha.frames.content_frame import ContentFrame
from haigha.classes.basic_class import BasicClass
from haigha.classes.channel_class import ChannelClass
from haigha.classes.exchange_class import ExchangeClass
from haigha.classes.queue_class import QueueClass
from haigha.classes.transaction_class import TransactionClass
from haigha.classes.protocol_class import ProtocolClass
from haigha.transports import event_transport
from haigha.transports import gevent_transport
from haigha.transports import socket_transport
class ConnectionTest(Chai):
def setUp(self):
super(ConnectionTest, self).setUp()
self.connection = Connection.__new__(Connection)
self.connection._debug = False
self.connection._logger = self.mock()
self.connection._user = 'guest'
self.connection._password = 'guest'
self.connection._host = 'localhost'
self.connection._vhost = '/'
self.connection._connect_timeout = 5
self.connection._sock_opts = None
self.connection._sock = None # mock anything?
self.connection._heartbeat = None
self.connection._open_cb = self.mock()
self.connection._close_cb = self.mock()
self.connection._login_method = 'AMQPLAIN'
self.connection._locale = 'en_US'
self.connection._client_properties = None
self.connection._properties = {
'library': 'Haigha',
'library_version': 'x.y.z',
}
self.connection._closed = False
self.connection._connected = False
self.connection._close_info = {
'reply_code': 0,
'reply_text': 'first connect',
'class_id': 0,
'method_id': 0
}
self.connection._class_map = {}
self.connection._channels = {
0: self.mock()
}
self.connection._login_response = 'loginresponse'
self.connection._channel_counter = 0
self.connection._channel_max = 65535
self.connection._frame_max = 65535
self.connection._frames_read = 0
self.connection._frames_written = 0
self.connection._strategy = self.mock()
self.connection._output_frame_buffer = []
self.connection._transport = mock()
self.connection._synchronous = False
self.connection._synchronous_connect = False
def test_init_without_keyword_args(self):
conn = Connection.__new__(Connection)
strategy = mock()
transport = mock()
mock(connection, 'ConnectionChannel')
expect(connection.ConnectionChannel).args(
conn, 0, {}).returns('connection_channel')
expect(socket_transport.SocketTransport).args(conn).returns(transport)
expect(conn.connect).args('localhost', 5672)
conn.__init__()
assert_false(conn._debug)
assert_equal(logging.root, conn._logger)
assert_equal('guest', conn._user)
assert_equal('guest', conn._password)
assert_equal('localhost', conn._host)
assert_equal(5672, conn._port)
assert_equal('/', conn._vhost)
assert_equal(5, conn._connect_timeout)
assert_equal(None, conn._sock_opts)
assert_equal(None, conn._sock)
assert_equal(None, conn._heartbeat)
assert_equal(None, conn._open_cb)
assert_equal(None, conn._close_cb)
assert_equal('AMQPLAIN', conn._login_method)
assert_equal('en_US', conn._locale)
assert_equal(None, conn._client_properties)
assert_equal(conn._properties, {
'library': 'Haigha',
'library_version': __version__,
})
assert_false(conn._closed)
assert_false(conn._connected)
assert_equal(conn._close_info, {
'reply_code': 0,
'reply_text': 'first connect',
'class_id': 0,
'method_id': 0
})
assert_equals({
20: ChannelClass,
40: ExchangeClass,
50: QueueClass,
60: BasicClass,
90: TransactionClass
}, conn._class_map)
assert_equal({0: 'connection_channel'}, conn._channels)
assert_equal(
'\x05LOGINS\x00\x00\x00\x05guest\x08PASSWORDS\x00\x00\x00\x05guest', conn._login_response)
assert_equal(0, conn._channel_counter)
assert_equal(65535, conn._channel_max)
assert_equal(65535, conn._frame_max)
assert_equal([], conn._output_frame_buffer)
assert_equal(transport, conn._transport)
transport.synchronous = True
assert_false(conn._synchronous)
assert_true(conn.synchronous)
assert_true(conn._synchronous_connect)
def test_init_with_event_transport(self):
conn = Connection.__new__(Connection)
strategy = mock()
transport = mock()
mock(connection, 'ConnectionChannel')
expect(connection.ConnectionChannel).args(
conn, 0, {}).returns('connection_channel')
expect(event_transport.EventTransport).args(conn).returns(transport)
expect(conn.connect).args('localhost', 5672)
conn.__init__(transport='event')
def test_properties(self):
assert_equal(self.connection._logger, self.connection.logger)
assert_equal(self.connection._debug, self.connection.debug)
assert_equal(self.connection._frame_max, self.connection.frame_max)
assert_equal(self.connection._channel_max, self.connection.channel_max)
assert_equal(self.connection._frames_read, self.connection.frames_read)
assert_equal(
self.connection._frames_written, self.connection.frames_written)
assert_equal(self.connection._closed, self.connection.closed)
# sync property tested in the test_inits
def test_synchronous_when_no_transport(self):
self.connection._transport = None
with assert_raises(connection.ConnectionClosed):
self.connection.synchronous
self.connection._close_info = {
'reply_code': 100,
'reply_text': 'breakdown'
}
with assert_raises(connection.ConnectionClosed):
self.connection.synchronous
def test_synchronous_when_transport(self):
self.connection._transport.synchronous = True
assert_true(self.connection.synchronous)
self.connection._transport.synchronous = False
assert_false(self.connection.synchronous)
def test_connect_when_asynchronous_transport(self):
self.connection._transport.synchronous = False
self.connection._connected = 'maybe'
self.connection._closed = 'possibly'
self.connection._debug = 'sure'
self.connection._connect_timeout = 42
self.connection._sock_opts = {
('f1', 't1'): 5,
('f2', 't2'): 6
}
expect(self.connection._transport.connect).args(('host', 5672))
expect(self.connection._transport.write).args('AMQP\x00\x00\x09\x01')
self.connection.connect('host', 5672)
assert_false(self.connection._connected)
assert_false(self.connection._closed)
assert_equals(self.connection._close_info,
{
'reply_code': 0,
'reply_text': 'failed to connect to host:5672',
'class_id': 0,
'method_id': 0
})
assert_equals('host:5672', self.connection._host)
def test_connect_when_asynchronous_transport_but_synchronous_connect(self):
self.connection._transport.synchronous = False
self.connection._synchronous_connect = True
self.connection._connected = 'maybe'
self.connection._closed = 'possibly'
self.connection._debug = 'sure'
self.connection._connect_timeout = 42
self.connection._sock_opts = {
('f1', 't1'): 5,
('f2', 't2'): 6
}
expect(self.connection._transport.connect).args(('host', 5672))
expect(self.connection._transport.write).args('AMQP\x00\x00\x09\x01')
expect(self.connection._channels[0].add_synchronous_cb).args(
self.connection._channels[0]._recv_start)
expect(self.connection.read_frames)
expect(self.connection.read_frames).side_effect(
lambda: setattr(self.connection, '_connected', True))
self.connection.connect('host', 5672)
assert_true(self.connection._connected)
assert_false(self.connection._closed)
assert_equals(self.connection._close_info,
{
'reply_code': 0,
'reply_text': 'failed to connect to host:5672',
'class_id': 0,
'method_id': 0
})
assert_equals('host:5672', self.connection._host)
def test_connect_when_synchronous_transport(self):
self.connection._transport.synchronous = True
# would have been written in ctor
self.connection._synchronous_connect = True
self.connection._connected = 'maybe'
self.connection._closed = 'possibly'
self.connection._debug = 'sure'
self.connection._connect_timeout = 42
self.connection._sock_opts = {
('f1', 't1'): 5,
('f2', 't2'): 6
}
expect(self.connection._transport.connect).args(('host', 5672))
expect(self.connection._transport.write).args('AMQP\x00\x00\x09\x01')
expect(self.connection._channels[0].add_synchronous_cb)
expect(self.connection.read_frames)
expect(self.connection.read_frames).side_effect(
lambda: setattr(self.connection, '_connected', True))
self.connection.connect('host', 5672)
assert_true(self.connection._connected)
assert_false(self.connection._closed)
assert_equals(self.connection._close_info,
{
'reply_code': 0,
'reply_text': 'failed to connect to host:5672',
'class_id': 0,
'method_id': 0
})
assert_equals('host:5672', self.connection._host)
def test_disconnect_when_transport_disconnects(self):
self.connection._connected = 'yup'
expect(self.connection._transport.disconnect)
self.connection.disconnect()
assert_false(self.connection._connected)
assert_equals(None, self.connection._transport)
def test_disconnect_when_transport_disconnects_with_error(self):
self.connection._connected = 'yup'
self.connection._host = 'server'
expect(self.connection._transport.disconnect).raises(
RuntimeError('fail'))
expect(self.connection.logger.error).args(
"Failed to disconnect from %s", 'server', exc_info=True)
assert_raises(RuntimeError, self.connection.disconnect)
assert_false(self.connection._connected)
assert_equals(None, self.connection._transport)
def test_disconnect_when_systemexit(self):
self.connection._connected = 'yup'
self.connection._host = 'server'
expect(self.connection._transport.disconnect).raises(SystemExit())
stub(self.connection.logger.error)
assert_raises(SystemExit, self.connection.disconnect)
assert_false(self.connection._connected)
assert_equals(None, self.connection._transport)
def test_transport_closed_with_no_args(self):
self.connection._host = 'server'
self.connection._connected = 'yes'
expect(self.connection.logger.warning).args(
'transport to server closed : unknown cause')
expect(self.connection._callback_close)
self.connection.transport_closed()
assert_equals(0, self.connection._close_info['reply_code'])
assert_equals(
'unknown cause', self.connection._close_info['reply_text'])
assert_equals(0, self.connection._close_info['class_id'])
assert_equals(0, self.connection._close_info['method_id'])
def test_next_channel_id_when_less_than_max(self):
self.connection._channel_counter = 32
self.connection._channel_max = 23423
assert_equals(33, self.connection._next_channel_id())
def test_next_channel_id_when_at_max(self):
self.connection._channel_counter = 32
self.connection._channel_max = 32
assert_equals(1, self.connection._next_channel_id())
def test_channel_creates_new_when_not_at_limit(self):
ch = mock()
expect(self.connection._next_channel_id).returns(1)
mock(connection, 'Channel')
expect(connection.Channel).args(
self.connection, 1, self.connection._class_map, synchronous=False).returns(ch)
expect(ch.add_close_listener).args(self.connection._channel_closed)
expect(ch.open)
assert_equals(ch, self.connection.channel())
assert_equals(ch, self.connection._channels[1])
def test_channel_creates_optionally_synchronous(self):
ch = mock()
expect(self.connection._next_channel_id).returns(1)
mock(connection, 'Channel')
expect(connection.Channel).args(
self.connection, 1, self.connection._class_map, synchronous=True).returns(ch)
expect(ch.add_close_listener).args(self.connection._channel_closed)
expect(ch.open)
assert_equals(ch, self.connection.channel(synchronous=True))
assert_equals(ch, self.connection._channels[1])
def test_channel_finds_the_first_free_channel_id(self):
self.connection._channels[1] = 'foo'
self.connection._channels[2] = 'bar'
self.connection._channels[4] = 'cat'
ch = mock()
expect(self.connection._next_channel_id).returns(1)
expect(self.connection._next_channel_id).returns(2)
expect(self.connection._next_channel_id).returns(3)
mock(connection, 'Channel')
expect(connection.Channel).args(
self.connection, 3, self.connection._class_map, synchronous=False).returns(ch)
expect(ch.add_close_listener).args(self.connection._channel_closed)
expect(ch.open)
assert_equals(ch, self.connection.channel())
assert_equals(ch, self.connection._channels[3])
def test_channel_raises_toomanychannels(self):
self.connection._channels[1] = 'foo'
self.connection._channels[2] = 'bar'
self.connection._channels[4] = 'cat'
self.connection._channel_max = 3
assert_raises(Connection.TooManyChannels, self.connection.channel)
def test_channel_returns_cached_instance_if_known(self):
self.connection._channels[1] = 'foo'
assert_equals('foo', self.connection.channel(1))
def test_channel_raises_invalidchannel_if_unknown_id(self):
assert_raises(Connection.InvalidChannel, self.connection.channel, 42)
def test_channel_closed(self):
ch = mock()
ch.channel_id = 42
self.connection._channels[42] = ch
self.connection._channel_closed(ch)
assert_false(42 in self.connection._channels)
ch.channel_id = 500424834
self.connection._channel_closed(ch)
def test_close(self):
self.connection._channels[0] = mock()
expect(self.connection._channels[0].close)
self.connection.close()
assert_equals({'reply_code': 0, 'reply_text': '', 'class_id': 0, 'method_id': 0},
self.connection._close_info)
self.connection.close(1, 'foo', 2, 3)
assert_equals({'reply_code': 1, 'reply_text': 'foo', 'class_id': 2, 'method_id': 3},
self.connection._close_info)
def test_close_when_disconnect(self):
self.connection._channels[0] = mock()
stub(self.connection._channels[0].close)
assert_false(self.connection._closed)
expect(self.connection.disconnect)
expect(self.connection._callback_close)
self.connection.close(1, 'foo', 2, 3, disconnect=True)
assert_true(self.connection._closed)
assert_equals({'reply_code': 1, 'reply_text': 'foo', 'class_id': 2, 'method_id': 3},
self.connection._close_info)
def test_callback_open_when_no_cb(self):
self.connection._open_cb = None
self.connection._callback_open()
def test_callback_open_when_user_cb(self):
self.connection._open_cb = mock()
expect(self.connection._open_cb)
self.connection._callback_open()
def test_callback_open_raises_when_user_cb_does(self):
self.connection._open_cb = mock()
expect(self.connection._open_cb).raises(SystemExit())
assert_raises(SystemExit, self.connection._callback_open)
def test_callback_close_when_no_cb(self):
self.connection._close_cb = None
self.connection._callback_close()
def test_callback_close_when_user_cb(self):
self.connection._close_cb = mock()
expect(self.connection._close_cb)
self.connection._callback_close()
def test_callback_close_raises_when_user_cb_does(self):
self.connection._close_cb = mock()
expect(self.connection._close_cb).raises(SystemExit())
assert_raises(SystemExit, self.connection._callback_close)
def test_read_frames_when_no_transport(self):
self.connection._transport = None
self.connection.read_frames()
assert_equals(0, self.connection._frames_read)
def test_read_frames_when_transport_returns_no_data(self):
self.connection._heartbeat = None
expect(self.connection._channels[0].send_heartbeat)
expect(self.connection._transport.read).args(None).returns(None)
self.connection.read_frames()
assert_equals(0, self.connection._frames_read)
def test_read_frames_when_transport_when_frame_data_and_no_debug_and_no_buffer(self):
reader = mock()
frame = mock()
frame.channel_id = 42
channel = mock()
mock(connection, 'Reader')
self.connection._heartbeat = 3
expect(self.connection._channels[0].send_heartbeat)
expect(self.connection._transport.read).args(3).returns('data')
expect(connection.Reader).args('data').returns(reader)
expect(connection.Frame.read_frames).args(reader).returns([frame])
expect(self.connection.channel).args(42).returns(channel)
expect(channel.buffer_frame).args(frame)
expect(self.connection._transport.process_channels).args(
set([channel]))
expect(reader.tell).returns(4)
self.connection.read_frames()
assert_equals(1, self.connection._frames_read)
def test_read_frames_when_transport_when_frame_data_and_debug_and_buffer(self):
reader = mock()
frame = mock()
frame.channel_id = 42
channel = mock()
mock(connection, 'Reader')
self.connection._debug = 2
expect(self.connection._channels[0].send_heartbeat)
expect(self.connection._transport.read).args(None).returns('data')
expect(connection.Reader).args('data').returns(reader)
expect(connection.Frame.read_frames).args(reader).returns([frame])
expect(self.connection.logger.debug).args('READ: %s', frame)
expect(self.connection.channel).args(42).returns(channel)
expect(channel.buffer_frame).args(frame)
expect(self.connection._transport.process_channels).args(
set([channel]))
expect(reader.tell).times(2).returns(2)
expect(self.connection._transport.buffer).args('ta')
self.connection.read_frames()
assert_equals(1, self.connection._frames_read)
def test_read_frames_when_read_frame_error(self):
reader = mock()
frame = mock()
frame.channel_id = 42
channel = mock()
mock(connection, 'Reader')
self.connection._heartbeat = 3
expect(self.connection._channels[0].send_heartbeat)
expect(self.connection._transport.read).args(3).returns('data')
expect(connection.Reader).args('data').returns(reader)
expect(connection.Frame.read_frames).args(
reader).raises(Frame.FrameError)
stub(self.connection.channel)
stub(channel.buffer_frame)
stub(self.connection._transport.process_channels)
stub(reader.tell)
stub(self.connection._transport.buffer)
expect(self.connection.close).args(
reply_code=501, reply_text=str, class_id=0, method_id=0, disconnect=True)
assert_raises(ConnectionError, self.connection.read_frames)
def test_flush_buffered_frames(self):
self.connection._output_frame_buffer = ['frame1', 'frame2']
expect(self.connection.send_frame).args('frame1')
expect(self.connection.send_frame).args('frame2')
self.connection._flush_buffered_frames()
assert_equals([], self.connection._output_frame_buffer)
def test_send_frame_when_connected_and_transport_and_no_debug(self):
frame = mock()
expect(frame.write_frame).args(var('ba'))
expect(self.connection._transport.write).args(var('ba'))
self.connection._connected = True
self.connection.send_frame(frame)
assert_true(isinstance(var('ba').value, bytearray))
assert_equals(1, self.connection._frames_written)
def test_send_frame_when_not_connected_and_not_channel_0(self):
frame = mock()
frame.channel_id = 42
stub(frame.write_frame)
stub(self.connection._transport.write)
self.connection._connected = False
self.connection.send_frame(frame)
assert_equals([frame], self.connection._output_frame_buffer)
def test_send_frame_when_not_connected_and_channel_0(self):
frame = mock()
frame.channel_id = 0
expect(frame.write_frame).args(var('ba'))
expect(self.connection._transport.write).args(var('ba'))
self.connection._connected = False
self.connection.send_frame(frame)
assert_true(isinstance(var('ba').value, bytearray))
assert_equals(1, self.connection._frames_written)
def test_send_frame_when_debugging(self):
frame = mock()
expect(self.connection.logger.debug).args('WRITE: %s', frame)
expect(frame.write_frame).args(var('ba'))
expect(self.connection._transport.write).args(var('ba'))
self.connection._connected = True
self.connection._debug = 2
self.connection.send_frame(frame)
assert_true(isinstance(var('ba').value, bytearray))
assert_equals(1, self.connection._frames_written)
def test_send_frame_when_closed(self):
self.connection._closed = True
self.connection._close_info['reply_text'] = 'failed'
assert_raises(connection.ConnectionClosed,
self.connection.send_frame, 'frame')
self.connection._close_info['reply_text'] = ''
assert_raises(connection.ConnectionClosed,
self.connection.send_frame, 'frame')
self.connection._close_info = None
assert_raises(connection.ConnectionClosed,
self.connection.send_frame, 'frame')
def test_send_frame_when_frame_overflow(self):
frame = mock()
self.connection._frame_max = 100
expect(frame.write_frame).side_effect(
lambda buf: buf.extend('a' * 200))
expect(self.connection.close).args(
reply_code=501, reply_text=var('reply'), class_id=0, method_id=0, disconnect=True)
stub(self.connection._transport.write)
self.connection._connected = True
with assert_raises(ConnectionClosed):
self.connection.send_frame(frame)
class ConnectionChannelTest(Chai):
def setUp(self):
super(ConnectionChannelTest, self).setUp()
self.connection = mock()
self.ch = ConnectionChannel(self.connection, 0, {})
def test_init(self):
mock(connection, 'super')
with expect(connection, 'super').args(is_arg(ConnectionChannel), ConnectionChannel).returns(mock()) as s:
expect(s.__init__).args('a', 'b')
c = ConnectionChannel('a', 'b')
assert_equals(c._method_map,
{
10: c._recv_start,
20: c._recv_secure,
30: c._recv_tune,
41: c._recv_open_ok,
50: c._recv_close,
51: c._recv_close_ok,
}
)
assert_equal(0, c._last_heartbeat_send)
def test_dispatch_on_heartbeat_frame(self):
frame = mock()
expect(frame.type).returns(HeartbeatFrame.type())
expect(self.ch.send_heartbeat)
self.ch.dispatch(frame)
def test_dispatch_method_frame_class_10(self):
frame = mock()
frame.class_id = 10
frame.method_id = 10
method = self.ch._method_map[10] = mock()
expect(frame.type).returns(MethodFrame.type())
expect(method).args(frame)
self.ch.dispatch(frame)
def test_dispatch_runs_callbacks(self):
frame = mock()
frame.class_id = 10
frame.method_id = 10
method = self.ch._method_map[10] = mock()
cb = mock()
expect(frame.type).returns(MethodFrame.type())
expect(self.ch.clear_synchronous_cb).args(method).returns(cb)
expect(cb).args(frame)
self.ch.dispatch(frame)
def test_dispatch_method_frame_raises_invalidmethod(self):
frame = mock()
frame.class_id = 10
frame.method_id = 500
expect(frame.type).returns(MethodFrame.type())
with assert_raises(Channel.InvalidMethod):
self.ch.dispatch(frame)
def test_dispatch_method_frame_raises_invalidclass(self):
frame = mock()
frame.class_id = 11
frame.method_id = 10
expect(frame.type).returns(MethodFrame.type())
with assert_raises(Channel.InvalidClass):
self.ch.dispatch(frame)
def test_dispatch_method_frame_raises_invalidframetype(self):
frame = mock()
expect(frame.type).returns(HeaderFrame.type())
with assert_raises(Frame.InvalidFrameType):
self.ch.dispatch(frame)
def test_close(self):
expect(self.ch._send_close)
self.ch.close()
def test_send_heartbeat_when_no_heartbeat(self):
stub(self.ch.send_frame)
self.ch.connection._heartbeat = None
self.ch.send_heartbeat()
def test_send_heartbeat_when_not_sent_yet(self):
mock(connection, 'time')
self.ch.connection._heartbeat = 3
self.ch._last_heartbeat_send = 0
expect(connection.time.time).returns(4200.3).times(2)
expect(self.ch.send_frame).args(HeartbeatFrame)
self.ch.send_heartbeat()
assert_equals(4200.3, self.ch._last_heartbeat_send)
def test_send_heartbeat_when_sent_long_ago(self):
mock(connection, 'time')
self.ch.connection._heartbeat = 3
self.ch._last_heartbeat_send = 4196
expect(connection.time.time).returns(4200.3).times(2)
expect(self.ch.send_frame).args(HeartbeatFrame)
self.ch.send_heartbeat()
assert_equals(4200.3, self.ch._last_heartbeat_send)
def test_send_heart_when_sent_recently(self):
mock(connection, 'time')
self.ch.connection._heartbeat = 3
self.ch._last_heartbeat_send = 4199
expect(connection.time.time).returns(4200.3)
stub(self.ch.send_frame)
self.ch.send_heartbeat()
assert_equals(4199, self.ch._last_heartbeat_send)
def test_recv_start(self):
expect(self.ch._send_start_ok)
self.ch.connection._closed = 'maybe'
self.ch._recv_start('frame')
assert_false(self.ch.connection._closed)
def test_send_start_ok(self):
self.ch.connection._properties = 'props'
self.ch.connection._login_method = 'please'
self.ch.connection._login_response = 'thanks'
self.ch.connection._locale = 'home'
with expect(mock(connection, 'Writer')).returns(mock()) as writer:
expect(writer.write_table).args('props')
expect(writer.write_shortstr).args('please')
expect(writer.write_longstr).args('thanks')
expect(writer.write_shortstr).args('home')
expect(mock(connection, 'MethodFrame')).args(
0, 10, 11, writer).returns('frame')
expect(self.ch.send_frame).args('frame')
expect(self.ch.add_synchronous_cb).args(self.ch._recv_tune)
self.ch._send_start_ok()
def test_recv_tune_when_no_broker_max_and_defined_heartbeat(self):
self.ch.connection._channel_max = 42
self.ch.connection._frame_max = 43
self.ch.connection._heartbeat = 8
frame = mock()
expect(frame.args.read_short).returns(0)
expect(frame.args.read_long).returns(0)
expect(self.ch._send_tune_ok)
expect(self.ch._send_open)
expect(self.ch.send_heartbeat)
self.ch._recv_tune(frame)
assert_equals(42, self.ch.connection._channel_max)
assert_equals(43, self.ch.connection._frame_max)
assert_equals(8, self.ch.connection._heartbeat)
def test_recv_tune_when_broker_max_and_undefined_heartbeat(self):
self.ch.connection._channel_max = 42
self.ch.connection._frame_max = 43
self.ch.connection._heartbeat = None
frame = mock()
expect(frame.args.read_short).returns(500)
expect(frame.args.read_long).returns(501)
expect(frame.args.read_short).returns(7)
expect(self.ch._send_tune_ok)
expect(self.ch._send_open)
expect(self.ch.send_heartbeat)
self.ch._recv_tune(frame)
assert_equals(500, self.ch.connection._channel_max)
assert_equals(501, self.ch.connection._frame_max)
assert_equals(7, self.ch.connection._heartbeat)
def test_send_tune_ok_when_heartbeat(self):
self.ch.connection._channel_max = 42
self.ch.connection._frame_max = 43
self.ch.connection._heartbeat = 8
with expect(mock(connection, 'Writer')).returns(mock()) as writer:
expect(writer.write_short).args(42)
expect(writer.write_long).args(43)
expect(writer.write_short).args(8)
expect(mock(connection, 'MethodFrame')).args(
0, 10, 31, writer).returns('frame')
expect(self.ch.send_frame).args('frame')
self.ch._send_tune_ok()
def test_send_tune_ok_when_no_heartbeat(self):
self.ch.connection._channel_max = 42
self.ch.connection._frame_max = 43
self.ch.connection._heartbeat = None
with expect(mock(connection, 'Writer')).returns(mock()) as writer:
expect(writer.write_short).args(42)
expect(writer.write_long).args(43)
expect(writer.write_short).args(0)
expect(mock(connection, 'MethodFrame')).args(
0, 10, 31, writer).returns('frame')
expect(self.ch.send_frame).args('frame')
self.ch._send_tune_ok()
def test_recv_secure(self):
expect(self.ch._send_open)
self.ch._recv_secure('frame')
def test_send_open(self):
self.connection._vhost = '/foo'
with expect(mock(connection, 'Writer')).returns(mock()) as writer:
expect(writer.write_shortstr).args('/foo')
expect(writer.write_shortstr).args('')
expect(writer.write_bit).args(True)
expect(mock(connection, 'MethodFrame')).args(
0, 10, 40, writer).returns('frame')
expect(self.ch.send_frame).args('frame')
expect(self.ch.add_synchronous_cb).args(self.ch._recv_open_ok)
self.ch._send_open()
def test_recv_open_ok(self):
self.ch.connection._connected = False
expect(self.ch.connection._flush_buffered_frames)
expect(self.ch.connection._callback_open)
self.ch._recv_open_ok('frame')
assert_true(self.ch.connection._connected)
def test_send_close(self):
self.ch.connection._close_info = {
'reply_code': 42,
'reply_text': 'wrong answer' * 60,
'class_id': 4,
'method_id': 20,
}
with expect(mock(connection, 'Writer')).returns(mock()) as writer:
expect(writer.write_short).args(42)
expect(writer.write_shortstr).args(('wrong answer' * 60)[:255])
expect(writer.write_short).args(4)
expect(writer.write_short).args(20)
expect(mock(connection, 'MethodFrame')).args(
0, 10, 50, writer).returns('frame')
expect(self.ch.send_frame).args('frame')
expect(self.ch.add_synchronous_cb).args(self.ch._recv_close_ok)
self.ch._send_close()
def test_recv_close(self):
self.ch.connection._closed = False
frame = mock()
expect(frame.args.read_short).returns(42)
expect(frame.args.read_shortstr).returns('wrong answer')
expect(frame.args.read_short).returns(4)
expect(frame.args.read_short).returns(20)
expect(self.ch._send_close_ok)
expect(self.ch.connection.disconnect)
expect(self.ch.connection._callback_close)
self.ch._recv_close(frame)
assert_equals(self.ch.connection._close_info, {
'reply_code': 42,
'reply_text': 'wrong answer',
'class_id': 4,
'method_id': 20,
})
assert_true(self.ch.connection._closed)
def test_send_close_ok(self):
expect(mock(connection, 'MethodFrame')).args(
0, 10, 51).returns('frame')
expect(self.ch.send_frame).args('frame')
self.ch._send_close_ok()
def test_recv_close_ok(self):
self.ch.connection._closed = False
expect(self.ch.connection.disconnect)
expect(self.ch.connection._callback_close)
self.ch._recv_close_ok('frame')
assert_true(self.ch.connection._closed)
| bsd-3-clause | -1,640,058,306,771,636,200 | 37.069869 | 113 | 0.620469 | false |
apporc/neutron | neutron/db/db_base_plugin_v2.py | 1 | 64808 | # Copyright (c) 2012 OpenStack Foundation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import functools
import netaddr
from oslo_config import cfg
from oslo_db import exception as db_exc
from oslo_log import log as logging
from oslo_utils import excutils
from oslo_utils import uuidutils
from sqlalchemy import and_
from sqlalchemy import event
from neutron._i18n import _, _LE, _LI
from neutron.api.rpc.agentnotifiers import l3_rpc_agent_api
from neutron.api.v2 import attributes
from neutron.callbacks import events
from neutron.callbacks import exceptions
from neutron.callbacks import registry
from neutron.callbacks import resources
from neutron.common import constants
from neutron.common import exceptions as n_exc
from neutron.common import ipv6_utils
from neutron.common import utils
from neutron import context as ctx
from neutron.db import api as db_api
from neutron.db import db_base_plugin_common
from neutron.db import ipam_non_pluggable_backend
from neutron.db import ipam_pluggable_backend
from neutron.db import models_v2
from neutron.db import rbac_db_mixin as rbac_mixin
from neutron.db import rbac_db_models as rbac_db
from neutron.db import sqlalchemyutils
from neutron.extensions import l3
from neutron import ipam
from neutron.ipam import subnet_alloc
from neutron import manager
from neutron import neutron_plugin_base_v2
from neutron.notifiers import nova as nova_notifier
from neutron.plugins.common import constants as service_constants
LOG = logging.getLogger(__name__)
# Ports with the following 'device_owner' values will not prevent
# network deletion. If delete_network() finds that all ports on a
# network have these owners, it will explicitly delete each port
# and allow network deletion to continue. Similarly, if delete_subnet()
# finds out that all existing IP Allocations are associated with ports
# with these owners, it will allow subnet deletion to proceed with the
# IP allocations being cleaned up by cascade.
AUTO_DELETE_PORT_OWNERS = [constants.DEVICE_OWNER_DHCP]
DNS_DOMAIN_DEFAULT = 'openstacklocal.'
FQDN_MAX_LEN = 255
def _check_subnet_not_used(context, subnet_id):
try:
kwargs = {'context': context, 'subnet_id': subnet_id}
registry.notify(
resources.SUBNET, events.BEFORE_DELETE, None, **kwargs)
except exceptions.CallbackFailure as e:
raise n_exc.SubnetInUse(subnet_id=subnet_id, reason=e)
class NeutronDbPluginV2(db_base_plugin_common.DbBasePluginCommon,
neutron_plugin_base_v2.NeutronPluginBaseV2,
rbac_mixin.RbacPluginMixin):
"""V2 Neutron plugin interface implementation using SQLAlchemy models.
Whenever a non-read call happens the plugin will call an event handler
class method (e.g., network_created()). The result is that this class
can be sub-classed by other classes that add custom behaviors on certain
events.
"""
# This attribute specifies whether the plugin supports or not
# bulk/pagination/sorting operations. Name mangling is used in
# order to ensure it is qualified by class
__native_bulk_support = True
__native_pagination_support = True
__native_sorting_support = True
def __init__(self):
self.set_ipam_backend()
if cfg.CONF.notify_nova_on_port_status_changes:
# NOTE(arosen) These event listeners are here to hook into when
# port status changes and notify nova about their change.
self.nova_notifier = nova_notifier.Notifier()
event.listen(models_v2.Port, 'after_insert',
self.nova_notifier.send_port_status)
event.listen(models_v2.Port, 'after_update',
self.nova_notifier.send_port_status)
event.listen(models_v2.Port.status, 'set',
self.nova_notifier.record_port_status_changed)
for e in (events.BEFORE_CREATE, events.BEFORE_UPDATE,
events.BEFORE_DELETE):
registry.subscribe(self.validate_network_rbac_policy_change,
rbac_mixin.RBAC_POLICY, e)
def validate_network_rbac_policy_change(self, resource, event, trigger,
context, object_type, policy,
**kwargs):
"""Validates network RBAC policy changes.
On creation, verify that the creator is an admin or that it owns the
network it is sharing.
On update and delete, make sure the tenant losing access does not have
resources that depend on that access.
"""
if object_type != 'network':
# we only care about network policies
return
# The object a policy targets cannot be changed so we can look
# at the original network for the update event as well.
net = self._get_network(context, policy['object_id'])
if event in (events.BEFORE_CREATE, events.BEFORE_UPDATE):
# we still have to verify that the caller owns the network because
# _get_network will succeed on a shared network
if not context.is_admin and net['tenant_id'] != context.tenant_id:
msg = _("Only admins can manipulate policies on networks "
"they do not own.")
raise n_exc.InvalidInput(error_message=msg)
tenant_to_check = None
if event == events.BEFORE_UPDATE:
new_tenant = kwargs['policy_update']['target_tenant']
if policy['target_tenant'] != new_tenant:
tenant_to_check = policy['target_tenant']
if event == events.BEFORE_DELETE:
tenant_to_check = policy['target_tenant']
if tenant_to_check:
self.ensure_no_tenant_ports_on_network(net['id'], net['tenant_id'],
tenant_to_check)
def ensure_no_tenant_ports_on_network(self, network_id, net_tenant_id,
tenant_id):
ctx_admin = ctx.get_admin_context()
rb_model = rbac_db.NetworkRBAC
other_rbac_entries = self._model_query(ctx_admin, rb_model).filter(
and_(rb_model.object_id == network_id,
rb_model.action == 'access_as_shared'))
ports = self._model_query(ctx_admin, models_v2.Port).filter(
models_v2.Port.network_id == network_id)
if tenant_id == '*':
# for the wildcard we need to get all of the rbac entries to
# see if any allow the remaining ports on the network.
other_rbac_entries = other_rbac_entries.filter(
rb_model.target_tenant != tenant_id)
# any port with another RBAC entry covering it or one belonging to
# the same tenant as the network owner is ok
allowed_tenants = [entry['target_tenant']
for entry in other_rbac_entries]
allowed_tenants.append(net_tenant_id)
ports = ports.filter(
~models_v2.Port.tenant_id.in_(allowed_tenants))
else:
# if there is a wildcard rule, we can return early because it
# allows any ports
query = other_rbac_entries.filter(rb_model.target_tenant == '*')
if query.count():
return
ports = ports.filter(models_v2.Port.tenant_id == tenant_id)
if ports.count():
raise n_exc.InvalidSharedSetting(network=network_id)
def set_ipam_backend(self):
if cfg.CONF.ipam_driver:
self.ipam = ipam_pluggable_backend.IpamPluggableBackend()
else:
self.ipam = ipam_non_pluggable_backend.IpamNonPluggableBackend()
def _validate_host_route(self, route, ip_version):
try:
netaddr.IPNetwork(route['destination'])
netaddr.IPAddress(route['nexthop'])
except netaddr.core.AddrFormatError:
err_msg = _("Invalid route: %s") % route
raise n_exc.InvalidInput(error_message=err_msg)
except ValueError:
# netaddr.IPAddress would raise this
err_msg = _("Invalid route: %s") % route
raise n_exc.InvalidInput(error_message=err_msg)
self._validate_ip_version(ip_version, route['nexthop'], 'nexthop')
self._validate_ip_version(ip_version, route['destination'],
'destination')
def _validate_shared_update(self, context, id, original, updated):
# The only case that needs to be validated is when 'shared'
# goes from True to False
if updated['shared'] == original.shared or updated['shared']:
return
ports = self._model_query(
context, models_v2.Port).filter(
and_(
models_v2.Port.network_id == id,
models_v2.Port.device_owner !=
constants.DEVICE_OWNER_ROUTER_GW,
models_v2.Port.device_owner !=
constants.DEVICE_OWNER_FLOATINGIP))
subnets = self._model_query(
context, models_v2.Subnet).filter(
models_v2.Subnet.network_id == id)
tenant_ids = set([port['tenant_id'] for port in ports] +
[subnet['tenant_id'] for subnet in subnets])
# raise if multiple tenants found or if the only tenant found
# is not the owner of the network
if (len(tenant_ids) > 1 or len(tenant_ids) == 1 and
tenant_ids.pop() != original.tenant_id):
raise n_exc.InvalidSharedSetting(network=original.name)
def _validate_ipv6_attributes(self, subnet, cur_subnet):
if cur_subnet:
self._validate_ipv6_update_dhcp(subnet, cur_subnet)
return
ra_mode_set = attributes.is_attr_set(subnet.get('ipv6_ra_mode'))
address_mode_set = attributes.is_attr_set(
subnet.get('ipv6_address_mode'))
self._validate_ipv6_dhcp(ra_mode_set, address_mode_set,
subnet['enable_dhcp'])
if ra_mode_set and address_mode_set:
self._validate_ipv6_combination(subnet['ipv6_ra_mode'],
subnet['ipv6_address_mode'])
if address_mode_set or ra_mode_set:
self._validate_eui64_applicable(subnet)
def _validate_eui64_applicable(self, subnet):
# Per RFC 4862, section 5.5.3, prefix length and interface
# id together should be equal to 128. Currently neutron supports
# EUI64 interface id only, thus limiting the prefix
# length to be 64 only.
if ipv6_utils.is_auto_address_subnet(subnet):
if netaddr.IPNetwork(subnet['cidr']).prefixlen != 64:
msg = _('Invalid CIDR %s for IPv6 address mode. '
'OpenStack uses the EUI-64 address format, '
'which requires the prefix to be /64.')
raise n_exc.InvalidInput(
error_message=(msg % subnet['cidr']))
def _validate_ipv6_combination(self, ra_mode, address_mode):
if ra_mode != address_mode:
msg = _("ipv6_ra_mode set to '%(ra_mode)s' with ipv6_address_mode "
"set to '%(addr_mode)s' is not valid. "
"If both attributes are set, they must be the same value"
) % {'ra_mode': ra_mode, 'addr_mode': address_mode}
raise n_exc.InvalidInput(error_message=msg)
def _validate_ipv6_dhcp(self, ra_mode_set, address_mode_set, enable_dhcp):
if (ra_mode_set or address_mode_set) and not enable_dhcp:
msg = _("ipv6_ra_mode or ipv6_address_mode cannot be set when "
"enable_dhcp is set to False.")
raise n_exc.InvalidInput(error_message=msg)
def _validate_ipv6_update_dhcp(self, subnet, cur_subnet):
if ('enable_dhcp' in subnet and not subnet['enable_dhcp']):
msg = _("Cannot disable enable_dhcp with "
"ipv6 attributes set")
ra_mode_set = attributes.is_attr_set(subnet.get('ipv6_ra_mode'))
address_mode_set = attributes.is_attr_set(
subnet.get('ipv6_address_mode'))
if ra_mode_set or address_mode_set:
raise n_exc.InvalidInput(error_message=msg)
old_ra_mode_set = attributes.is_attr_set(
cur_subnet.get('ipv6_ra_mode'))
old_address_mode_set = attributes.is_attr_set(
cur_subnet.get('ipv6_address_mode'))
if old_ra_mode_set or old_address_mode_set:
raise n_exc.InvalidInput(error_message=msg)
def _create_bulk(self, resource, context, request_items):
objects = []
collection = "%ss" % resource
items = request_items[collection]
context.session.begin(subtransactions=True)
try:
for item in items:
obj_creator = getattr(self, 'create_%s' % resource)
objects.append(obj_creator(context, item))
context.session.commit()
except Exception:
context.session.rollback()
with excutils.save_and_reraise_exception():
LOG.error(_LE("An exception occurred while creating "
"the %(resource)s:%(item)s"),
{'resource': resource, 'item': item})
return objects
def create_network_bulk(self, context, networks):
return self._create_bulk('network', context, networks)
def create_network(self, context, network):
"""Handle creation of a single network."""
# single request processing
n = network['network']
# NOTE(jkoelker) Get the tenant_id outside of the session to avoid
# unneeded db action if the operation raises
tenant_id = self._get_tenant_id_for_create(context, n)
with context.session.begin(subtransactions=True):
args = {'tenant_id': tenant_id,
'id': n.get('id') or uuidutils.generate_uuid(),
'name': n['name'],
'admin_state_up': n['admin_state_up'],
'mtu': n.get('mtu', constants.DEFAULT_NETWORK_MTU),
'status': n.get('status', constants.NET_STATUS_ACTIVE)}
network = models_v2.Network(**args)
if n['shared']:
entry = rbac_db.NetworkRBAC(
network=network, action='access_as_shared',
target_tenant='*', tenant_id=network['tenant_id'])
context.session.add(entry)
context.session.add(network)
return self._make_network_dict(network, process_extensions=False,
context=context)
def update_network(self, context, id, network):
n = network['network']
with context.session.begin(subtransactions=True):
network = self._get_network(context, id)
# validate 'shared' parameter
if 'shared' in n:
entry = None
for item in network.rbac_entries:
if (item.action == 'access_as_shared' and
item.target_tenant == '*'):
entry = item
break
setattr(network, 'shared', True if entry else False)
self._validate_shared_update(context, id, network, n)
update_shared = n.pop('shared')
if update_shared and not entry:
entry = rbac_db.NetworkRBAC(
network=network, action='access_as_shared',
target_tenant='*', tenant_id=network['tenant_id'])
context.session.add(entry)
elif not update_shared and entry:
context.session.delete(entry)
context.session.expire(network, ['rbac_entries'])
network.update(n)
return self._make_network_dict(network, context=context)
def delete_network(self, context, id):
with context.session.begin(subtransactions=True):
network = self._get_network(context, id)
context.session.query(models_v2.Port).filter_by(
network_id=id).filter(
models_v2.Port.device_owner.
in_(AUTO_DELETE_PORT_OWNERS)).delete(synchronize_session=False)
port_in_use = context.session.query(models_v2.Port).filter_by(
network_id=id).first()
if port_in_use:
raise n_exc.NetworkInUse(net_id=id)
# clean up subnets
subnets = self._get_subnets_by_network(context, id)
for subnet in subnets:
self.delete_subnet(context, subnet['id'])
context.session.delete(network)
def get_network(self, context, id, fields=None):
network = self._get_network(context, id)
return self._make_network_dict(network, fields, context=context)
def get_networks(self, context, filters=None, fields=None,
sorts=None, limit=None, marker=None,
page_reverse=False):
marker_obj = self._get_marker_obj(context, 'network', limit, marker)
make_network_dict = functools.partial(self._make_network_dict,
context=context)
return self._get_collection(context, models_v2.Network,
make_network_dict,
filters=filters, fields=fields,
sorts=sorts,
limit=limit,
marker_obj=marker_obj,
page_reverse=page_reverse)
def get_networks_count(self, context, filters=None):
return self._get_collection_count(context, models_v2.Network,
filters=filters)
def create_subnet_bulk(self, context, subnets):
return self._create_bulk('subnet', context, subnets)
def _validate_ip_version(self, ip_version, addr, name):
"""Check IP field of a subnet match specified ip version."""
ip = netaddr.IPNetwork(addr)
if ip.version != ip_version:
data = {'name': name,
'addr': addr,
'ip_version': ip_version}
msg = _("%(name)s '%(addr)s' does not match "
"the ip_version '%(ip_version)s'") % data
raise n_exc.InvalidInput(error_message=msg)
def _validate_subnet(self, context, s, cur_subnet=None):
"""Validate a subnet spec."""
# This method will validate attributes which may change during
# create_subnet() and update_subnet().
# The method requires the subnet spec 's' has 'ip_version' field.
# If 's' dict does not have 'ip_version' field in an API call
# (e.g., update_subnet()), you need to set 'ip_version' field
# before calling this method.
ip_ver = s['ip_version']
if attributes.is_attr_set(s.get('cidr')):
self._validate_ip_version(ip_ver, s['cidr'], 'cidr')
# TODO(watanabe.isao): After we found a way to avoid the re-sync
# from the agent side, this restriction could be removed.
if cur_subnet:
dhcp_was_enabled = cur_subnet.enable_dhcp
else:
dhcp_was_enabled = False
if s.get('enable_dhcp') and not dhcp_was_enabled:
subnet_prefixlen = netaddr.IPNetwork(s['cidr']).prefixlen
error_message = _("Subnet has a prefix length that is "
"incompatible with DHCP service enabled.")
if ((ip_ver == 4 and subnet_prefixlen > 30) or
(ip_ver == 6 and subnet_prefixlen > 126)):
raise n_exc.InvalidInput(error_message=error_message)
net = netaddr.IPNetwork(s['cidr'])
if net.is_multicast():
error_message = _("Multicast IP subnet is not supported "
"if enable_dhcp is True.")
raise n_exc.InvalidInput(error_message=error_message)
elif net.is_loopback():
error_message = _("Loopback IP subnet is not supported "
"if enable_dhcp is True.")
raise n_exc.InvalidInput(error_message=error_message)
if attributes.is_attr_set(s.get('gateway_ip')):
self._validate_ip_version(ip_ver, s['gateway_ip'], 'gateway_ip')
if (cfg.CONF.force_gateway_on_subnet and
not ipam.utils.check_gateway_in_subnet(
s['cidr'], s['gateway_ip'])):
error_message = _("Gateway is not valid on subnet")
raise n_exc.InvalidInput(error_message=error_message)
# Ensure the gateway IP is not assigned to any port
# skip this check in case of create (s parameter won't have id)
# NOTE(salv-orlando): There is slight chance of a race, when
# a subnet-update and a router-interface-add operation are
# executed concurrently
if cur_subnet and not ipv6_utils.is_ipv6_pd_enabled(s):
alloc_qry = context.session.query(models_v2.IPAllocation)
allocated = alloc_qry.filter_by(
ip_address=cur_subnet['gateway_ip'],
subnet_id=cur_subnet['id']).first()
if allocated and allocated['port_id']:
raise n_exc.GatewayIpInUse(
ip_address=cur_subnet['gateway_ip'],
port_id=allocated['port_id'])
if attributes.is_attr_set(s.get('dns_nameservers')):
if len(s['dns_nameservers']) > cfg.CONF.max_dns_nameservers:
raise n_exc.DNSNameServersExhausted(
subnet_id=s.get('id', _('new subnet')),
quota=cfg.CONF.max_dns_nameservers)
for dns in s['dns_nameservers']:
try:
netaddr.IPAddress(dns)
except Exception:
raise n_exc.InvalidInput(
error_message=(_("Error parsing dns address %s") %
dns))
self._validate_ip_version(ip_ver, dns, 'dns_nameserver')
if attributes.is_attr_set(s.get('host_routes')):
if len(s['host_routes']) > cfg.CONF.max_subnet_host_routes:
raise n_exc.HostRoutesExhausted(
subnet_id=s.get('id', _('new subnet')),
quota=cfg.CONF.max_subnet_host_routes)
# check if the routes are all valid
for rt in s['host_routes']:
self._validate_host_route(rt, ip_ver)
if ip_ver == 4:
if attributes.is_attr_set(s.get('ipv6_ra_mode')):
raise n_exc.InvalidInput(
error_message=(_("ipv6_ra_mode is not valid when "
"ip_version is 4")))
if attributes.is_attr_set(s.get('ipv6_address_mode')):
raise n_exc.InvalidInput(
error_message=(_("ipv6_address_mode is not valid when "
"ip_version is 4")))
if ip_ver == 6:
self._validate_ipv6_attributes(s, cur_subnet)
def _validate_subnet_for_pd(self, subnet):
"""Validates that subnet parameters are correct for IPv6 PD"""
if (subnet.get('ip_version') != constants.IP_VERSION_6):
reason = _("Prefix Delegation can only be used with IPv6 "
"subnets.")
raise n_exc.BadRequest(resource='subnets', msg=reason)
mode_list = [constants.IPV6_SLAAC,
constants.DHCPV6_STATELESS,
attributes.ATTR_NOT_SPECIFIED]
ra_mode = subnet.get('ipv6_ra_mode')
if ra_mode not in mode_list:
reason = _("IPv6 RA Mode must be SLAAC or Stateless for "
"Prefix Delegation.")
raise n_exc.BadRequest(resource='subnets', msg=reason)
address_mode = subnet.get('ipv6_address_mode')
if address_mode not in mode_list:
reason = _("IPv6 Address Mode must be SLAAC or Stateless for "
"Prefix Delegation.")
raise n_exc.BadRequest(resource='subnets', msg=reason)
def _update_router_gw_ports(self, context, network, subnet):
l3plugin = manager.NeutronManager.get_service_plugins().get(
service_constants.L3_ROUTER_NAT)
if l3plugin:
gw_ports = self._get_router_gw_ports_by_network(context,
network['id'])
router_ids = [p['device_id'] for p in gw_ports]
ctx_admin = context.elevated()
ext_subnets_dict = {s['id']: s for s in network['subnets']}
for id in router_ids:
router = l3plugin.get_router(ctx_admin, id)
external_gateway_info = router['external_gateway_info']
# Get all stateful (i.e. non-SLAAC/DHCPv6-stateless) fixed ips
fips = [f for f in external_gateway_info['external_fixed_ips']
if not ipv6_utils.is_auto_address_subnet(
ext_subnets_dict[f['subnet_id']])]
num_fips = len(fips)
# Don't add the fixed IP to the port if it already
# has a stateful fixed IP of the same IP version
if num_fips > 1:
continue
if num_fips == 1 and netaddr.IPAddress(
fips[0]['ip_address']).version == subnet['ip_version']:
continue
external_gateway_info['external_fixed_ips'].append(
{'subnet_id': subnet['id']})
info = {'router': {'external_gateway_info':
external_gateway_info}}
l3plugin.update_router(context, id, info)
def _create_subnet(self, context, subnet, subnetpool_id):
s = subnet['subnet']
with context.session.begin(subtransactions=True):
network = self._get_network(context, s["network_id"])
subnet, ipam_subnet = self.ipam.allocate_subnet(context,
network,
s,
subnetpool_id)
if hasattr(network, 'external') and network.external:
self._update_router_gw_ports(context,
network,
subnet)
# If this subnet supports auto-addressing, then update any
# internal ports on the network with addresses for this subnet.
if ipv6_utils.is_auto_address_subnet(subnet):
self.ipam.add_auto_addrs_on_network_ports(context, subnet,
ipam_subnet)
return self._make_subnet_dict(subnet, context=context)
def _get_subnetpool_id(self, context, subnet):
"""Returns the subnetpool id for this request
If the pool id was explicitly set in the request then that will be
returned, even if it is None.
Otherwise, the default pool for the IP version requested will be
returned. This will either be a pool id or None (the default for each
configuration parameter). This implies that the ip version must be
either set implicitly with a specific cidr or explicitly using
ip_version attribute.
:param subnet: The subnet dict from the request
"""
subnetpool_id = subnet.get('subnetpool_id',
attributes.ATTR_NOT_SPECIFIED)
if subnetpool_id != attributes.ATTR_NOT_SPECIFIED:
return subnetpool_id
cidr = subnet.get('cidr')
if attributes.is_attr_set(cidr):
ip_version = netaddr.IPNetwork(cidr).version
else:
ip_version = subnet.get('ip_version')
if not attributes.is_attr_set(ip_version):
msg = _('ip_version must be specified in the absence of '
'cidr and subnetpool_id')
raise n_exc.BadRequest(resource='subnets', msg=msg)
if ip_version == 6 and cfg.CONF.ipv6_pd_enabled:
return constants.IPV6_PD_POOL_ID
subnetpool = self.get_default_subnetpool(context, ip_version)
if subnetpool:
return subnetpool['id']
# Until the default_subnet_pool config options are removed in the N
# release, check for them after get_default_subnetpool returns None.
# TODO(john-davidge): Remove after Mitaka release.
if ip_version == 4:
return cfg.CONF.default_ipv4_subnet_pool
return cfg.CONF.default_ipv6_subnet_pool
def create_subnet(self, context, subnet):
s = subnet['subnet']
cidr = s.get('cidr', attributes.ATTR_NOT_SPECIFIED)
prefixlen = s.get('prefixlen', attributes.ATTR_NOT_SPECIFIED)
has_cidr = attributes.is_attr_set(cidr)
has_prefixlen = attributes.is_attr_set(prefixlen)
if has_cidr and has_prefixlen:
msg = _('cidr and prefixlen must not be supplied together')
raise n_exc.BadRequest(resource='subnets', msg=msg)
if has_cidr:
# turn the CIDR into a proper subnet
net = netaddr.IPNetwork(s['cidr'])
subnet['subnet']['cidr'] = '%s/%s' % (net.network, net.prefixlen)
s['tenant_id'] = self._get_tenant_id_for_create(context, s)
subnetpool_id = self._get_subnetpool_id(context, s)
if subnetpool_id:
self.ipam.validate_pools_with_subnetpool(s)
if subnetpool_id == constants.IPV6_PD_POOL_ID:
if has_cidr:
# We do not currently support requesting a specific
# cidr with IPv6 prefix delegation. Set the subnetpool_id
# to None and allow the request to continue as normal.
subnetpool_id = None
self._validate_subnet(context, s)
else:
prefix = constants.PROVISIONAL_IPV6_PD_PREFIX
subnet['subnet']['cidr'] = prefix
self._validate_subnet_for_pd(s)
else:
if not has_cidr:
msg = _('A cidr must be specified in the absence of a '
'subnet pool')
raise n_exc.BadRequest(resource='subnets', msg=msg)
self._validate_subnet(context, s)
return self._create_subnet(context, subnet, subnetpool_id)
def _update_allocation_pools(self, subnet):
"""Gets new allocation pools and formats them correctly"""
allocation_pools = self.ipam.generate_pools(subnet['cidr'],
subnet['gateway_ip'])
return [{'start': str(netaddr.IPAddress(p.first,
subnet['ip_version'])),
'end': str(netaddr.IPAddress(p.last, subnet['ip_version']))}
for p in allocation_pools]
def update_subnet(self, context, id, subnet):
"""Update the subnet with new info.
The change however will not be realized until the client renew the
dns lease or we support gratuitous DHCP offers
"""
s = subnet['subnet']
new_cidr = s.get('cidr')
db_subnet = self._get_subnet(context, id)
# Fill 'ip_version' and 'allocation_pools' fields with the current
# value since _validate_subnet() expects subnet spec has 'ip_version'
# and 'allocation_pools' fields.
s['ip_version'] = db_subnet.ip_version
s['cidr'] = db_subnet.cidr
s['id'] = db_subnet.id
s['tenant_id'] = db_subnet.tenant_id
s['subnetpool_id'] = db_subnet.subnetpool_id
self._validate_subnet(context, s, cur_subnet=db_subnet)
db_pools = [netaddr.IPRange(p['first_ip'], p['last_ip'])
for p in db_subnet.allocation_pools]
update_ports_needed = False
if new_cidr and ipv6_utils.is_ipv6_pd_enabled(s):
# This is an ipv6 prefix delegation-enabled subnet being given an
# updated cidr by the process_prefix_update RPC
s['cidr'] = new_cidr
update_ports_needed = True
net = netaddr.IPNetwork(s['cidr'], s['ip_version'])
# Update gateway_ip and allocation pools based on new cidr
s['gateway_ip'] = utils.get_first_host_ip(net, s['ip_version'])
s['allocation_pools'] = self._update_allocation_pools(s)
range_pools = None
if s.get('allocation_pools') is not None:
# Convert allocation pools to IPRange to simplify future checks
range_pools = self.ipam.pools_to_ip_range(s['allocation_pools'])
self.ipam.validate_allocation_pools(range_pools, s['cidr'])
s['allocation_pools'] = range_pools
# If either gateway_ip or allocation_pools were specified
gateway_ip = s.get('gateway_ip', db_subnet.gateway_ip)
gateway_ip_changed = gateway_ip != db_subnet.gateway_ip
if gateway_ip_changed or s.get('allocation_pools') is not None:
pools = range_pools if range_pools is not None else db_pools
if gateway_ip:
self.ipam.validate_gw_out_of_pools(gateway_ip, pools)
if gateway_ip_changed:
# Provide pre-update notification not to break plugins that don't
# support gateway ip change
kwargs = {'context': context, 'subnet_id': id,
'network_id': db_subnet.network_id}
registry.notify(resources.SUBNET_GATEWAY, events.BEFORE_UPDATE,
self, **kwargs)
with context.session.begin(subtransactions=True):
subnet, changes = self.ipam.update_db_subnet(context, id, s,
db_pools)
result = self._make_subnet_dict(subnet, context=context)
# Keep up with fields that changed
result.update(changes)
if update_ports_needed:
# Find ports that have not yet been updated
# with an IP address by Prefix Delegation, and update them
ports = self.get_ports(context)
routers = []
for port in ports:
fixed_ips = []
new_port = {'port': port}
for ip in port['fixed_ips']:
if ip['subnet_id'] == s['id']:
fixed_ip = {'subnet_id': s['id']}
if "router_interface" in port['device_owner']:
routers.append(port['device_id'])
fixed_ip['ip_address'] = s['gateway_ip']
fixed_ips.append(fixed_ip)
if fixed_ips:
new_port['port']['fixed_ips'] = fixed_ips
self.update_port(context, port['id'], new_port)
# Send router_update to l3_agent
if routers:
l3_rpc_notifier = l3_rpc_agent_api.L3AgentNotifyAPI()
l3_rpc_notifier.routers_updated(context, routers)
if gateway_ip_changed:
kwargs = {'context': context, 'subnet_id': id,
'network_id': db_subnet.network_id}
registry.notify(resources.SUBNET_GATEWAY, events.AFTER_UPDATE,
self, **kwargs)
return result
def _subnet_check_ip_allocations(self, context, subnet_id):
return (context.session.query(models_v2.IPAllocation).
filter_by(subnet_id=subnet_id).join(models_v2.Port).first())
def _subnet_get_user_allocation(self, context, subnet_id):
"""Check if there are any user ports on subnet and return first."""
# need to join with ports table as IPAllocation's port
# is not joined eagerly and thus producing query which yields
# incorrect results
return (context.session.query(models_v2.IPAllocation).
filter_by(subnet_id=subnet_id).join(models_v2.Port).
filter(~models_v2.Port.device_owner.
in_(AUTO_DELETE_PORT_OWNERS)).first())
def _subnet_check_ip_allocations_internal_router_ports(self, context,
subnet_id):
# Do not delete the subnet if IP allocations for internal
# router ports still exist
allocs = context.session.query(models_v2.IPAllocation).filter_by(
subnet_id=subnet_id).join(models_v2.Port).filter(
models_v2.Port.device_owner.in_(
constants.ROUTER_INTERFACE_OWNERS)
).first()
if allocs:
LOG.debug("Subnet %s still has internal router ports, "
"cannot delete", subnet_id)
raise n_exc.SubnetInUse(subnet_id=id)
def delete_subnet(self, context, id):
with context.session.begin(subtransactions=True):
subnet = self._get_subnet(context, id)
# Make sure the subnet isn't used by other resources
_check_subnet_not_used(context, id)
# Delete all network owned ports
qry_network_ports = (
context.session.query(models_v2.IPAllocation).
filter_by(subnet_id=subnet['id']).
join(models_v2.Port))
# Remove network owned ports, and delete IP allocations
# for IPv6 addresses which were automatically generated
# via SLAAC
is_auto_addr_subnet = ipv6_utils.is_auto_address_subnet(subnet)
if is_auto_addr_subnet:
self._subnet_check_ip_allocations_internal_router_ports(
context, id)
else:
qry_network_ports = (
qry_network_ports.filter(models_v2.Port.device_owner.
in_(AUTO_DELETE_PORT_OWNERS)))
network_ports = qry_network_ports.all()
if network_ports:
for port in network_ports:
context.session.delete(port)
# Check if there are more IP allocations, unless
# is_auto_address_subnet is True. In that case the check is
# unnecessary. This additional check not only would be wasteful
# for this class of subnet, but is also error-prone since when
# the isolation level is set to READ COMMITTED allocations made
# concurrently will be returned by this query
if not is_auto_addr_subnet:
alloc = self._subnet_check_ip_allocations(context, id)
if alloc:
LOG.info(_LI("Found port (%(port_id)s, %(ip)s) having IP "
"allocation on subnet "
"%(subnet)s, cannot delete"),
{'ip': alloc.ip_address,
'port_id': alloc.port_id,
'subnet': id})
raise n_exc.SubnetInUse(subnet_id=id)
context.session.delete(subnet)
# Delete related ipam subnet manually,
# since there is no FK relationship
self.ipam.delete_subnet(context, id)
def get_subnet(self, context, id, fields=None):
subnet = self._get_subnet(context, id)
return self._make_subnet_dict(subnet, fields, context=context)
def get_subnets(self, context, filters=None, fields=None,
sorts=None, limit=None, marker=None,
page_reverse=False):
return self._get_subnets(context, filters, fields, sorts, limit,
marker, page_reverse)
def get_subnets_count(self, context, filters=None):
return self._get_collection_count(context, models_v2.Subnet,
filters=filters)
def get_subnets_by_network(self, context, network_id):
return [self._make_subnet_dict(subnet_db) for subnet_db in
self._get_subnets_by_network(context, network_id)]
def _create_subnetpool_prefix(self, context, cidr, subnetpool_id):
prefix_args = {'cidr': cidr, 'subnetpool_id': subnetpool_id}
subnetpool_prefix = models_v2.SubnetPoolPrefix(**prefix_args)
context.session.add(subnetpool_prefix)
def _validate_address_scope_id(self, context, address_scope_id,
subnetpool_id, sp_prefixes):
"""Validate the address scope before associating.
Subnetpool can associate with an address scope if
- the tenant user is the owner of both the subnetpool and
address scope
- the admin is associating the subnetpool with the shared
address scope
- there is no prefix conflict with the existing subnetpools
associated with the address scope.
"""
if not attributes.is_attr_set(address_scope_id):
return
if not self.is_address_scope_owned_by_tenant(context,
address_scope_id):
raise n_exc.IllegalSubnetPoolAssociationToAddressScope(
subnetpool_id=subnetpool_id, address_scope_id=address_scope_id)
subnetpools = self._get_subnetpools_by_address_scope_id(
context, address_scope_id)
new_set = netaddr.IPSet(sp_prefixes)
for sp in subnetpools:
if sp.id == subnetpool_id:
continue
sp_set = netaddr.IPSet([prefix['cidr'] for prefix in sp.prefixes])
if sp_set.intersection(new_set):
raise n_exc.AddressScopePrefixConflict()
def _check_subnetpool_update_allowed(self, context, subnetpool_id,
address_scope_id):
"""Check if the subnetpool can be updated or not.
If the subnetpool is associated to a shared address scope not owned
by the tenant, then the subnetpool cannot be updated.
"""
if not self.is_address_scope_owned_by_tenant(context,
address_scope_id):
msg = _("subnetpool %(subnetpool_id)s cannot be updated when"
" associated with shared address scope "
"%(address_scope_id)s") % {
'subnetpool_id': subnetpool_id,
'address_scope_id': address_scope_id}
raise n_exc.IllegalSubnetPoolUpdate(reason=msg)
def _check_default_subnetpool_exists(self, context, ip_version):
"""Check if a default already exists for the given IP version.
There can only be one default subnetpool for each IP family. Raise an
InvalidInput error if a default has already been set.
"""
if self.get_default_subnetpool(context, ip_version):
msg = _("A default subnetpool for this IP family has already "
"been set. Only one default may exist per IP family")
raise n_exc.InvalidInput(error_message=msg)
def create_subnetpool(self, context, subnetpool):
"""Create a subnetpool"""
sp = subnetpool['subnetpool']
sp_reader = subnet_alloc.SubnetPoolReader(sp)
if sp_reader.address_scope_id is attributes.ATTR_NOT_SPECIFIED:
sp_reader.address_scope_id = None
if sp_reader.is_default:
self._check_default_subnetpool_exists(context,
sp_reader.ip_version)
self._validate_address_scope_id(context, sp_reader.address_scope_id,
id, sp_reader.prefixes)
tenant_id = self._get_tenant_id_for_create(context, sp)
with context.session.begin(subtransactions=True):
pool_args = {'tenant_id': tenant_id,
'id': sp_reader.id,
'name': sp_reader.name,
'ip_version': sp_reader.ip_version,
'default_prefixlen':
sp_reader.default_prefixlen,
'min_prefixlen': sp_reader.min_prefixlen,
'max_prefixlen': sp_reader.max_prefixlen,
'is_default': sp_reader.is_default,
'shared': sp_reader.shared,
'default_quota': sp_reader.default_quota,
'address_scope_id': sp_reader.address_scope_id}
subnetpool = models_v2.SubnetPool(**pool_args)
context.session.add(subnetpool)
for prefix in sp_reader.prefixes:
self._create_subnetpool_prefix(context,
prefix,
subnetpool.id)
return self._make_subnetpool_dict(subnetpool)
def _update_subnetpool_prefixes(self, context, prefix_list, id):
with context.session.begin(subtransactions=True):
context.session.query(models_v2.SubnetPoolPrefix).filter_by(
subnetpool_id=id).delete()
for prefix in prefix_list:
model_prefix = models_v2.SubnetPoolPrefix(cidr=prefix,
subnetpool_id=id)
context.session.add(model_prefix)
def _updated_subnetpool_dict(self, model, new_pool):
updated = {}
new_prefixes = new_pool.get('prefixes', attributes.ATTR_NOT_SPECIFIED)
orig_prefixes = [str(x.cidr) for x in model['prefixes']]
if new_prefixes is not attributes.ATTR_NOT_SPECIFIED:
orig_set = netaddr.IPSet(orig_prefixes)
new_set = netaddr.IPSet(new_prefixes)
if not orig_set.issubset(new_set):
msg = _("Existing prefixes must be "
"a subset of the new prefixes")
raise n_exc.IllegalSubnetPoolPrefixUpdate(msg=msg)
new_set.compact()
updated['prefixes'] = [str(x.cidr) for x in new_set.iter_cidrs()]
else:
updated['prefixes'] = orig_prefixes
for key in ['id', 'name', 'ip_version', 'min_prefixlen',
'max_prefixlen', 'default_prefixlen', 'is_default',
'shared', 'default_quota', 'address_scope_id']:
self._write_key(key, updated, model, new_pool)
return updated
def _write_key(self, key, update, orig, new_dict):
new_val = new_dict.get(key, attributes.ATTR_NOT_SPECIFIED)
if new_val is not attributes.ATTR_NOT_SPECIFIED:
update[key] = new_dict[key]
else:
update[key] = orig[key]
def update_subnetpool(self, context, id, subnetpool):
"""Update a subnetpool"""
new_sp = subnetpool['subnetpool']
with context.session.begin(subtransactions=True):
orig_sp = self._get_subnetpool(context, id)
updated = self._updated_subnetpool_dict(orig_sp, new_sp)
updated['tenant_id'] = orig_sp.tenant_id
reader = subnet_alloc.SubnetPoolReader(updated)
if reader.is_default and not orig_sp.is_default:
self._check_default_subnetpool_exists(context,
reader.ip_version)
if orig_sp.address_scope_id:
self._check_subnetpool_update_allowed(context, id,
orig_sp.address_scope_id)
self._validate_address_scope_id(context, reader.address_scope_id,
id, reader.prefixes)
orig_sp.update(self._filter_non_model_columns(
reader.subnetpool,
models_v2.SubnetPool))
self._update_subnetpool_prefixes(context,
reader.prefixes,
id)
for key in ['min_prefixlen', 'max_prefixlen', 'default_prefixlen']:
updated['key'] = str(updated[key])
return updated
def get_subnetpool(self, context, id, fields=None):
"""Retrieve a subnetpool."""
subnetpool = self._get_subnetpool(context, id)
return self._make_subnetpool_dict(subnetpool, fields)
def get_subnetpools(self, context, filters=None, fields=None,
sorts=None, limit=None, marker=None,
page_reverse=False):
"""Retrieve list of subnetpools."""
marker_obj = self._get_marker_obj(context, 'subnetpool', limit, marker)
collection = self._get_collection(context, models_v2.SubnetPool,
self._make_subnetpool_dict,
filters=filters, fields=fields,
sorts=sorts,
limit=limit,
marker_obj=marker_obj,
page_reverse=page_reverse)
return collection
def get_default_subnetpool(self, context, ip_version):
"""Retrieve the default subnetpool for the given IP version."""
filters = {'is_default': [True],
'ip_version': [ip_version]}
subnetpool = self.get_subnetpools(context, filters=filters)
if subnetpool:
return subnetpool[0]
def delete_subnetpool(self, context, id):
"""Delete a subnetpool."""
with context.session.begin(subtransactions=True):
subnetpool = self._get_subnetpool(context, id)
subnets = self._get_subnets_by_subnetpool(context, id)
if subnets:
reason = _("Subnet pool has existing allocations")
raise n_exc.SubnetPoolDeleteError(reason=reason)
context.session.delete(subnetpool)
def _check_mac_addr_update(self, context, port, new_mac, device_owner):
if (device_owner and
device_owner.startswith(constants.DEVICE_OWNER_NETWORK_PREFIX)):
raise n_exc.UnsupportedPortDeviceOwner(
op=_("mac address update"), port_id=id,
device_owner=device_owner)
def create_port_bulk(self, context, ports):
return self._create_bulk('port', context, ports)
def _get_dns_domain(self):
if not cfg.CONF.dns_domain:
return ''
if cfg.CONF.dns_domain.endswith('.'):
return cfg.CONF.dns_domain
return '%s.' % cfg.CONF.dns_domain
def _get_request_dns_name(self, port):
dns_domain = self._get_dns_domain()
if ((dns_domain and dns_domain != DNS_DOMAIN_DEFAULT)):
return port.get('dns_name', '')
return ''
def _get_dns_names_for_port(self, context, ips, request_dns_name):
dns_assignment = []
dns_domain = self._get_dns_domain()
if request_dns_name:
request_fqdn = request_dns_name
if not request_dns_name.endswith('.'):
request_fqdn = '%s.%s' % (request_dns_name, dns_domain)
for ip in ips:
if request_dns_name:
hostname = request_dns_name
fqdn = request_fqdn
else:
hostname = 'host-%s' % ip['ip_address'].replace(
'.', '-').replace(':', '-')
fqdn = hostname
if dns_domain:
fqdn = '%s.%s' % (hostname, dns_domain)
dns_assignment.append({'ip_address': ip['ip_address'],
'hostname': hostname,
'fqdn': fqdn})
return dns_assignment
def _create_port_with_mac(self, context, network_id, port_data,
mac_address):
try:
# since this method could either be used within or outside the
# transaction, use convenience method to avoid passing a flag
with db_api.autonested_transaction(context.session):
db_port = models_v2.Port(mac_address=mac_address, **port_data)
context.session.add(db_port)
return db_port
except db_exc.DBDuplicateEntry:
raise n_exc.MacAddressInUse(net_id=network_id, mac=mac_address)
def _create_port(self, context, network_id, port_data):
max_retries = cfg.CONF.mac_generation_retries
for i in range(max_retries):
mac = self._generate_mac()
try:
return self._create_port_with_mac(
context, network_id, port_data, mac)
except n_exc.MacAddressInUse:
LOG.debug('Generated mac %(mac_address)s exists on '
'network %(network_id)s',
{'mac_address': mac, 'network_id': network_id})
LOG.error(_LE("Unable to generate mac address after %s attempts"),
max_retries)
raise n_exc.MacAddressGenerationFailure(net_id=network_id)
def create_port(self, context, port):
p = port['port']
port_id = p.get('id') or uuidutils.generate_uuid()
network_id = p['network_id']
# NOTE(jkoelker) Get the tenant_id outside of the session to avoid
# unneeded db action if the operation raises
tenant_id = self._get_tenant_id_for_create(context, p)
if p.get('device_owner'):
self._enforce_device_owner_not_router_intf_or_device_id(
context, p.get('device_owner'), p.get('device_id'), tenant_id)
port_data = dict(tenant_id=tenant_id,
name=p['name'],
id=port_id,
network_id=network_id,
admin_state_up=p['admin_state_up'],
status=p.get('status', constants.PORT_STATUS_ACTIVE),
device_id=p['device_id'],
device_owner=p['device_owner'])
if 'dns_name' in p:
request_dns_name = self._get_request_dns_name(p)
port_data['dns_name'] = request_dns_name
with context.session.begin(subtransactions=True):
# Ensure that the network exists.
self._get_network(context, network_id)
# Create the port
if p['mac_address'] is attributes.ATTR_NOT_SPECIFIED:
db_port = self._create_port(context, network_id, port_data)
p['mac_address'] = db_port['mac_address']
else:
db_port = self._create_port_with_mac(
context, network_id, port_data, p['mac_address'])
ips = self.ipam.allocate_ips_for_port_and_store(context, port,
port_id)
if 'dns_name' in p:
dns_assignment = []
if ips:
dns_assignment = self._get_dns_names_for_port(
context, ips, request_dns_name)
if 'dns_name' in p:
db_port['dns_assignment'] = dns_assignment
return self._make_port_dict(db_port, process_extensions=False)
def _validate_port_for_update(self, context, db_port, new_port, new_mac):
changed_owner = 'device_owner' in new_port
current_owner = (new_port.get('device_owner') or
db_port['device_owner'])
changed_device_id = new_port.get('device_id') != db_port['device_id']
current_device_id = new_port.get('device_id') or db_port['device_id']
if current_owner and changed_device_id or changed_owner:
self._enforce_device_owner_not_router_intf_or_device_id(
context, current_owner, current_device_id,
db_port['tenant_id'])
if new_mac and new_mac != db_port['mac_address']:
self._check_mac_addr_update(context, db_port,
new_mac, current_owner)
def _get_dns_names_for_updated_port(self, context, original_ips,
original_dns_name, request_dns_name,
changes):
if changes.original or changes.add or changes.remove:
return self._get_dns_names_for_port(
context, changes.original + changes.add,
request_dns_name or original_dns_name)
if original_ips:
return self._get_dns_names_for_port(
context, original_ips,
request_dns_name or original_dns_name)
return []
def update_port(self, context, id, port):
new_port = port['port']
with context.session.begin(subtransactions=True):
port = self._get_port(context, id)
if 'dns-integration' in self.supported_extension_aliases:
original_ips = self._make_fixed_ip_dict(port['fixed_ips'])
original_dns_name = port.get('dns_name', '')
request_dns_name = self._get_request_dns_name(new_port)
if not request_dns_name:
new_port['dns_name'] = ''
new_mac = new_port.get('mac_address')
self._validate_port_for_update(context, port, new_port, new_mac)
changes = self.ipam.update_port_with_ips(context, port,
new_port, new_mac)
if 'dns-integration' in self.supported_extension_aliases:
dns_assignment = self._get_dns_names_for_updated_port(
context, original_ips, original_dns_name,
request_dns_name, changes)
result = self._make_port_dict(port)
# Keep up with fields that changed
if changes.original or changes.add or changes.remove:
result['fixed_ips'] = self._make_fixed_ip_dict(
changes.original + changes.add)
if 'dns-integration' in self.supported_extension_aliases:
result['dns_assignment'] = dns_assignment
return result
def delete_port(self, context, id):
with context.session.begin(subtransactions=True):
self.ipam.delete_port(context, id)
def delete_ports_by_device_id(self, context, device_id, network_id=None):
query = (context.session.query(models_v2.Port.id)
.enable_eagerloads(False)
.filter(models_v2.Port.device_id == device_id))
if network_id:
query = query.filter(models_v2.Port.network_id == network_id)
port_ids = [p[0] for p in query]
for port_id in port_ids:
try:
self.delete_port(context, port_id)
except n_exc.PortNotFound:
# Don't raise if something else concurrently deleted the port
LOG.debug("Ignoring PortNotFound when deleting port '%s'. "
"The port has already been deleted.",
port_id)
def _get_dns_name_for_port_get(self, context, port):
if port['fixed_ips']:
return self._get_dns_names_for_port(
context, port['fixed_ips'],
port['dns_name'])
return []
def get_port(self, context, id, fields=None):
port = self._get_port(context, id)
if (('dns-integration' in self.supported_extension_aliases and
'dns_name' in port)):
port['dns_assignment'] = self._get_dns_name_for_port_get(context,
port)
return self._make_port_dict(port, fields)
def _get_ports_query(self, context, filters=None, sorts=None, limit=None,
marker_obj=None, page_reverse=False):
Port = models_v2.Port
IPAllocation = models_v2.IPAllocation
if not filters:
filters = {}
query = self._model_query(context, Port)
fixed_ips = filters.pop('fixed_ips', {})
ip_addresses = fixed_ips.get('ip_address')
subnet_ids = fixed_ips.get('subnet_id')
if ip_addresses or subnet_ids:
query = query.join(Port.fixed_ips)
if ip_addresses:
query = query.filter(IPAllocation.ip_address.in_(ip_addresses))
if subnet_ids:
query = query.filter(IPAllocation.subnet_id.in_(subnet_ids))
query = self._apply_filters_to_query(query, Port, filters, context)
if limit and page_reverse and sorts:
sorts = [(s[0], not s[1]) for s in sorts]
query = sqlalchemyutils.paginate_query(query, Port, limit,
sorts, marker_obj)
return query
def get_ports(self, context, filters=None, fields=None,
sorts=None, limit=None, marker=None,
page_reverse=False):
marker_obj = self._get_marker_obj(context, 'port', limit, marker)
query = self._get_ports_query(context, filters=filters,
sorts=sorts, limit=limit,
marker_obj=marker_obj,
page_reverse=page_reverse)
items = []
for c in query:
if (('dns-integration' in self.supported_extension_aliases and
'dns_name' in c)):
c['dns_assignment'] = self._get_dns_name_for_port_get(context,
c)
items.append(self._make_port_dict(c, fields))
if limit and page_reverse:
items.reverse()
return items
def get_ports_count(self, context, filters=None):
return self._get_ports_query(context, filters).count()
def _enforce_device_owner_not_router_intf_or_device_id(self, context,
device_owner,
device_id,
tenant_id):
"""Prevent tenants from replacing the device id of router ports with
a router uuid belonging to another tenant.
"""
if device_owner not in constants.ROUTER_INTERFACE_OWNERS:
return
if not context.is_admin:
# check to make sure device_id does not match another tenants
# router.
if device_id:
if hasattr(self, 'get_router'):
try:
ctx_admin = context.elevated()
router = self.get_router(ctx_admin, device_id)
except l3.RouterNotFound:
return
else:
l3plugin = (
manager.NeutronManager.get_service_plugins().get(
service_constants.L3_ROUTER_NAT))
if l3plugin:
try:
ctx_admin = context.elevated()
router = l3plugin.get_router(ctx_admin,
device_id)
except l3.RouterNotFound:
return
else:
# raise as extension doesn't support L3 anyways.
raise n_exc.DeviceIDNotOwnedByTenant(
device_id=device_id)
if tenant_id != router['tenant_id']:
raise n_exc.DeviceIDNotOwnedByTenant(device_id=device_id)
| apache-2.0 | -2,228,443,469,237,160,700 | 46.201748 | 79 | 0.556737 | false |
tmtowtdi/django | mysite/polls/admin.py | 1 | 1401 |
from django.contrib import admin
from polls.models import Question, Choice
class ChoiceInLine( admin.TabularInline ):
model = Choice
extra = 3
class QuestionAdmin( admin.ModelAdmin ):
### By default, the Question object's str() is displayed on the "list of
### questions" page. Tell it to display a little more data.
list_display = ( 'question_text', 'pub_date', 'was_published_recently' )
### This adds a sidebar div to the right that lets the user filter the
### displayed questions - only show those published today, this week, this
### month, etc.
list_filter = [ 'pub_date' ]
### Adds a search box up top
search_fields = [ 'question_text' ]
### The questions displayed will be automatically paginated, by default
### 100 per page. We can change that number per page to whatever we want.
list_per_page = 50
### See polls/models.py for some settings on how we're controlling display
### and sorting of the was_published_recently column.
### Re-order the fields as they display on the admin page, adding a
### fieldset.
fieldsets = [
(None, { 'fields': ['question_text'] }),
('Date Information', { 'fields': ['pub_date' ], 'classes': ['collapse'] }),
]
inlines = [ ChoiceInLine ]
admin.site.register( Question, QuestionAdmin )
| artistic-2.0 | -2,186,924,659,745,510,700 | 33.170732 | 91 | 0.630978 | false |
bsipocz/astropy | astropy/utils/misc.py | 1 | 40318 | # -*- coding: utf-8 -*-
# Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
A "grab bag" of relatively small general-purpose utilities that don't have
a clear module/package to live in.
"""
import abc
import copy
import contextlib
import difflib
import inspect
import json
import os
import signal
import sys
import traceback
import unicodedata
import locale
import threading
import re
from itertools import zip_longest
from contextlib import contextmanager
from collections import defaultdict, OrderedDict
from astropy.utils.decorators import deprecated
__all__ = ['isiterable', 'silence', 'format_exception', 'NumpyRNGContext',
'find_api_page', 'is_path_hidden', 'walk_skip_hidden',
'JsonCustomEncoder', 'indent', 'InheritDocstrings',
'OrderedDescriptor', 'OrderedDescriptorContainer', 'set_locale',
'ShapedLikeNDArray', 'check_broadcast', 'IncompatibleShapeError',
'dtype_bytes_or_chars']
def isiterable(obj):
"""Returns `True` if the given object is iterable."""
try:
iter(obj)
return True
except TypeError:
return False
def indent(s, shift=1, width=4):
"""Indent a block of text. The indentation is applied to each line."""
indented = '\n'.join(' ' * (width * shift) + l if l else ''
for l in s.splitlines())
if s[-1] == '\n':
indented += '\n'
return indented
class _DummyFile:
"""A noop writeable object."""
def write(self, s):
pass
@contextlib.contextmanager
def silence():
"""A context manager that silences sys.stdout and sys.stderr."""
old_stdout = sys.stdout
old_stderr = sys.stderr
sys.stdout = _DummyFile()
sys.stderr = _DummyFile()
yield
sys.stdout = old_stdout
sys.stderr = old_stderr
def format_exception(msg, *args, **kwargs):
"""
Given an exception message string, uses new-style formatting arguments
``{filename}``, ``{lineno}``, ``{func}`` and/or ``{text}`` to fill in
information about the exception that occurred. For example:
try:
1/0
except:
raise ZeroDivisionError(
format_except('A divide by zero occurred in {filename} at '
'line {lineno} of function {func}.'))
Any additional positional or keyword arguments passed to this function are
also used to format the message.
.. note::
This uses `sys.exc_info` to gather up the information needed to fill
in the formatting arguments. Since `sys.exc_info` is not carried
outside a handled exception, it's not wise to use this
outside of an ``except`` clause - if it is, this will substitute
'<unkonwn>' for the 4 formatting arguments.
"""
tb = traceback.extract_tb(sys.exc_info()[2], limit=1)
if len(tb) > 0:
filename, lineno, func, text = tb[0]
else:
filename = lineno = func = text = '<unknown>'
return msg.format(*args, filename=filename, lineno=lineno, func=func,
text=text, **kwargs)
class NumpyRNGContext:
"""
A context manager (for use with the ``with`` statement) that will seed the
numpy random number generator (RNG) to a specific value, and then restore
the RNG state back to whatever it was before.
This is primarily intended for use in the astropy testing suit, but it
may be useful in ensuring reproducibility of Monte Carlo simulations in a
science context.
Parameters
----------
seed : int
The value to use to seed the numpy RNG
Examples
--------
A typical use case might be::
with NumpyRNGContext(<some seed value you pick>):
from numpy import random
randarr = random.randn(100)
... run your test using `randarr` ...
#Any code using numpy.random at this indent level will act just as it
#would have if it had been before the with statement - e.g. whatever
#the default seed is.
"""
def __init__(self, seed):
self.seed = seed
def __enter__(self):
from numpy import random
self.startstate = random.get_state()
random.seed(self.seed)
def __exit__(self, exc_type, exc_value, traceback):
from numpy import random
random.set_state(self.startstate)
def find_api_page(obj, version=None, openinbrowser=True, timeout=None):
"""
Determines the URL of the API page for the specified object, and
optionally open that page in a web browser.
.. note::
You must be connected to the internet for this to function even if
``openinbrowser`` is `False`, unless you provide a local version of
the documentation to ``version`` (e.g., ``file:///path/to/docs``).
Parameters
----------
obj
The object to open the docs for or its fully-qualified name
(as a str).
version : str
The doc version - either a version number like '0.1', 'dev' for
the development/latest docs, or a URL to point to a specific
location that should be the *base* of the documentation. Defaults to
latest if you are on aren't on a release, otherwise, the version you
are on.
openinbrowser : bool
If `True`, the `webbrowser` package will be used to open the doc
page in a new web browser window.
timeout : number, optional
The number of seconds to wait before timing-out the query to
the astropy documentation. If not given, the default python
stdlib timeout will be used.
Returns
-------
url : str
The loaded URL
Raises
------
ValueError
If the documentation can't be found
"""
import webbrowser
import urllib.request
from zlib import decompress
if (not isinstance(obj, str) and
hasattr(obj, '__module__') and
hasattr(obj, '__name__')):
obj = obj.__module__ + '.' + obj.__name__
elif inspect.ismodule(obj):
obj = obj.__name__
if version is None:
from astropy import version
if version.release:
version = 'v' + version.version
else:
version = 'dev'
if '://' in version:
if version.endswith('index.html'):
baseurl = version[:-10]
elif version.endswith('/'):
baseurl = version
else:
baseurl = version + '/'
elif version == 'dev' or version == 'latest':
baseurl = 'http://devdocs.astropy.org/'
else:
baseurl = f'https://docs.astropy.org/en/{version}/'
# Custom request headers; see
# https://github.com/astropy/astropy/issues/8990
req = urllib.request.Request(
baseurl + 'objects.inv', headers={'User-Agent': f'Astropy/{version}'})
if timeout is None:
uf = urllib.request.urlopen(req)
else:
uf = urllib.request.urlopen(req, timeout=timeout)
try:
oiread = uf.read()
# need to first read/remove the first four lines, which have info before
# the compressed section with the actual object inventory
idx = -1
headerlines = []
for _ in range(4):
oldidx = idx
idx = oiread.index(b'\n', oldidx + 1)
headerlines.append(oiread[(oldidx+1):idx].decode('utf-8'))
# intersphinx version line, project name, and project version
ivers, proj, vers, compr = headerlines
if 'The remainder of this file is compressed using zlib' not in compr:
raise ValueError('The file downloaded from {} does not seem to be'
'the usual Sphinx objects.inv format. Maybe it '
'has changed?'.format(baseurl + 'objects.inv'))
compressed = oiread[(idx+1):]
finally:
uf.close()
decompressed = decompress(compressed).decode('utf-8')
resurl = None
for l in decompressed.strip().splitlines():
ls = l.split()
name = ls[0]
loc = ls[3]
if loc.endswith('$'):
loc = loc[:-1] + name
if name == obj:
resurl = baseurl + loc
break
if resurl is None:
raise ValueError(f'Could not find the docs for the object {obj}')
elif openinbrowser:
webbrowser.open(resurl)
return resurl
def signal_number_to_name(signum):
"""
Given an OS signal number, returns a signal name. If the signal
number is unknown, returns ``'UNKNOWN'``.
"""
# Since these numbers and names are platform specific, we use the
# builtin signal module and build a reverse mapping.
signal_to_name_map = dict((k, v) for v, k in signal.__dict__.items()
if v.startswith('SIG'))
return signal_to_name_map.get(signum, 'UNKNOWN')
if sys.platform == 'win32':
import ctypes
def _has_hidden_attribute(filepath):
"""
Returns True if the given filepath has the hidden attribute on
MS-Windows. Based on a post here:
https://stackoverflow.com/questions/284115/cross-platform-hidden-file-detection
"""
if isinstance(filepath, bytes):
filepath = filepath.decode(sys.getfilesystemencoding())
try:
attrs = ctypes.windll.kernel32.GetFileAttributesW(filepath)
result = bool(attrs & 2) and attrs != -1
except AttributeError:
result = False
return result
else:
def _has_hidden_attribute(filepath):
return False
def is_path_hidden(filepath):
"""
Determines if a given file or directory is hidden.
Parameters
----------
filepath : str
The path to a file or directory
Returns
-------
hidden : bool
Returns `True` if the file is hidden
"""
name = os.path.basename(os.path.abspath(filepath))
if isinstance(name, bytes):
is_dotted = name.startswith(b'.')
else:
is_dotted = name.startswith('.')
return is_dotted or _has_hidden_attribute(filepath)
def walk_skip_hidden(top, onerror=None, followlinks=False):
"""
A wrapper for `os.walk` that skips hidden files and directories.
This function does not have the parameter ``topdown`` from
`os.walk`: the directories must always be recursed top-down when
using this function.
See also
--------
os.walk : For a description of the parameters
"""
for root, dirs, files in os.walk(
top, topdown=True, onerror=onerror,
followlinks=followlinks):
# These lists must be updated in-place so os.walk will skip
# hidden directories
dirs[:] = [d for d in dirs if not is_path_hidden(d)]
files[:] = [f for f in files if not is_path_hidden(f)]
yield root, dirs, files
class JsonCustomEncoder(json.JSONEncoder):
"""Support for data types that JSON default encoder
does not do.
This includes:
* Numpy array or number
* Complex number
* Set
* Bytes
* astropy.UnitBase
* astropy.Quantity
Examples
--------
>>> import json
>>> import numpy as np
>>> from astropy.utils.misc import JsonCustomEncoder
>>> json.dumps(np.arange(3), cls=JsonCustomEncoder)
'[0, 1, 2]'
"""
def default(self, obj):
from astropy import units as u
import numpy as np
if isinstance(obj, u.Quantity):
return dict(value=obj.value, unit=obj.unit.to_string())
if isinstance(obj, (np.number, np.ndarray)):
return obj.tolist()
elif isinstance(obj, complex):
return [obj.real, obj.imag]
elif isinstance(obj, set):
return list(obj)
elif isinstance(obj, bytes): # pragma: py3
return obj.decode()
elif isinstance(obj, (u.UnitBase, u.FunctionUnitBase)):
if obj == u.dimensionless_unscaled:
obj = 'dimensionless_unit'
else:
return obj.to_string()
return json.JSONEncoder.default(self, obj)
def strip_accents(s):
"""
Remove accents from a Unicode string.
This helps with matching "ångström" to "angstrom", for example.
"""
return ''.join(
c for c in unicodedata.normalize('NFD', s)
if unicodedata.category(c) != 'Mn')
def did_you_mean(s, candidates, n=3, cutoff=0.8, fix=None):
"""
When a string isn't found in a set of candidates, we can be nice
to provide a list of alternatives in the exception. This
convenience function helps to format that part of the exception.
Parameters
----------
s : str
candidates : sequence of str or dict of str keys
n : int
The maximum number of results to include. See
`difflib.get_close_matches`.
cutoff : float
In the range [0, 1]. Possibilities that don't score at least
that similar to word are ignored. See
`difflib.get_close_matches`.
fix : callable
A callable to modify the results after matching. It should
take a single string and return a sequence of strings
containing the fixed matches.
Returns
-------
message : str
Returns the string "Did you mean X, Y, or Z?", or the empty
string if no alternatives were found.
"""
if isinstance(s, str):
s = strip_accents(s)
s_lower = s.lower()
# Create a mapping from the lower case name to all capitalization
# variants of that name.
candidates_lower = {}
for candidate in candidates:
candidate_lower = candidate.lower()
candidates_lower.setdefault(candidate_lower, [])
candidates_lower[candidate_lower].append(candidate)
# The heuristic here is to first try "singularizing" the word. If
# that doesn't match anything use difflib to find close matches in
# original, lower and upper case.
if s_lower.endswith('s') and s_lower[:-1] in candidates_lower:
matches = [s_lower[:-1]]
else:
matches = difflib.get_close_matches(
s_lower, candidates_lower, n=n, cutoff=cutoff)
if len(matches):
capitalized_matches = set()
for match in matches:
capitalized_matches.update(candidates_lower[match])
matches = capitalized_matches
if fix is not None:
mapped_matches = []
for match in matches:
mapped_matches.extend(fix(match))
matches = mapped_matches
matches = list(set(matches))
matches = sorted(matches)
if len(matches) == 1:
matches = matches[0]
else:
matches = (', '.join(matches[:-1]) + ' or ' +
matches[-1])
return f'Did you mean {matches}?'
return ''
@deprecated('4.0', alternative='Sphinx>=1.7 automatically inherits docstring')
class InheritDocstrings(type):
"""
This metaclass makes methods of a class automatically have their
docstrings filled in from the methods they override in the base
class.
If the class uses multiple inheritance, the docstring will be
chosen from the first class in the bases list, in the same way as
methods are normally resolved in Python. If this results in
selecting the wrong docstring, the docstring will need to be
explicitly included on the method.
For example::
>>> import warnings
>>> from astropy.utils.misc import InheritDocstrings
>>> with warnings.catch_warnings():
... # Ignore deprecation warning
... warnings.simplefilter('ignore')
... class A(metaclass=InheritDocstrings):
... def wiggle(self):
... "Wiggle the thingamajig"
... pass
... class B(A):
... def wiggle(self):
... pass
>>> B.wiggle.__doc__
u'Wiggle the thingamajig'
"""
def __init__(cls, name, bases, dct):
def is_public_member(key):
return (
(key.startswith('__') and key.endswith('__')
and len(key) > 4) or
not key.startswith('_'))
for key, val in dct.items():
if ((inspect.isfunction(val) or inspect.isdatadescriptor(val)) and
is_public_member(key) and
val.__doc__ is None):
for base in cls.__mro__[1:]:
super_method = getattr(base, key, None)
if super_method is not None:
val.__doc__ = super_method.__doc__
break
super().__init__(name, bases, dct)
class OrderedDescriptor(metaclass=abc.ABCMeta):
"""
Base class for descriptors whose order in the class body should be
preserved. Intended for use in concert with the
`OrderedDescriptorContainer` metaclass.
Subclasses of `OrderedDescriptor` must define a value for a class attribute
called ``_class_attribute_``. This is the name of a class attribute on the
*container* class for these descriptors, which will be set to an
`~collections.OrderedDict` at class creation time. This
`~collections.OrderedDict` will contain a mapping of all class attributes
that were assigned instances of the `OrderedDescriptor` subclass, to the
instances themselves. See the documentation for
`OrderedDescriptorContainer` for a concrete example.
Optionally, subclasses of `OrderedDescriptor` may define a value for a
class attribute called ``_name_attribute_``. This should be the name of
an attribute on instances of the subclass. When specified, during
creation of a class containing these descriptors, the name attribute on
each instance will be set to the name of the class attribute it was
assigned to on the class.
.. note::
Although this class is intended for use with *descriptors* (i.e.
classes that define any of the ``__get__``, ``__set__``, or
``__delete__`` magic methods), this base class is not itself a
descriptor, and technically this could be used for classes that are
not descriptors too. However, use with descriptors is the original
intended purpose.
"""
# This id increments for each OrderedDescriptor instance created, so they
# are always ordered in the order they were created. Class bodies are
# guaranteed to be executed from top to bottom. Not sure if this is
# thread-safe though.
_nextid = 1
@property
@abc.abstractmethod
def _class_attribute_(self):
"""
Subclasses should define this attribute to the name of an attribute on
classes containing this subclass. That attribute will contain the mapping
of all instances of that `OrderedDescriptor` subclass defined in the class
body. If the same descriptor needs to be used with different classes,
each with different names of this attribute, multiple subclasses will be
needed.
"""
_name_attribute_ = None
"""
Subclasses may optionally define this attribute to specify the name of an
attribute on instances of the class that should be filled with the
instance's attribute name at class creation time.
"""
def __init__(self, *args, **kwargs):
# The _nextid attribute is shared across all subclasses so that
# different subclasses of OrderedDescriptors can be sorted correctly
# between themselves
self.__order = OrderedDescriptor._nextid
OrderedDescriptor._nextid += 1
super().__init__()
def __lt__(self, other):
"""
Defined for convenient sorting of `OrderedDescriptor` instances, which
are defined to sort in their creation order.
"""
if (isinstance(self, OrderedDescriptor) and
isinstance(other, OrderedDescriptor)):
try:
return self.__order < other.__order
except AttributeError:
raise RuntimeError(
'Could not determine ordering for {} and {}; at least '
'one of them is not calling super().__init__ in its '
'__init__.'.format(self, other))
else:
return NotImplemented
class OrderedDescriptorContainer(type):
"""
Classes should use this metaclass if they wish to use `OrderedDescriptor`
attributes, which are class attributes that "remember" the order in which
they were defined in the class body.
Every subclass of `OrderedDescriptor` has an attribute called
``_class_attribute_``. For example, if we have
.. code:: python
class ExampleDecorator(OrderedDescriptor):
_class_attribute_ = '_examples_'
Then when a class with the `OrderedDescriptorContainer` metaclass is
created, it will automatically be assigned a class attribute ``_examples_``
referencing an `~collections.OrderedDict` containing all instances of
``ExampleDecorator`` defined in the class body, mapped to by the names of
the attributes they were assigned to.
When subclassing a class with this metaclass, the descriptor dict (i.e.
``_examples_`` in the above example) will *not* contain descriptors
inherited from the base class. That is, this only works by default with
decorators explicitly defined in the class body. However, the subclass
*may* define an attribute ``_inherit_decorators_`` which lists
`OrderedDescriptor` classes that *should* be added from base classes.
See the examples section below for an example of this.
Examples
--------
>>> from astropy.utils import OrderedDescriptor, OrderedDescriptorContainer
>>> class TypedAttribute(OrderedDescriptor):
... \"\"\"
... Attributes that may only be assigned objects of a specific type,
... or subclasses thereof. For some reason we care about their order.
... \"\"\"
...
... _class_attribute_ = 'typed_attributes'
... _name_attribute_ = 'name'
... # A default name so that instances not attached to a class can
... # still be repr'd; useful for debugging
... name = '<unbound>'
...
... def __init__(self, type):
... # Make sure not to forget to call the super __init__
... super().__init__()
... self.type = type
...
... def __get__(self, obj, objtype=None):
... if obj is None:
... return self
... if self.name in obj.__dict__:
... return obj.__dict__[self.name]
... else:
... raise AttributeError(self.name)
...
... def __set__(self, obj, value):
... if not isinstance(value, self.type):
... raise ValueError('{0}.{1} must be of type {2!r}'.format(
... obj.__class__.__name__, self.name, self.type))
... obj.__dict__[self.name] = value
...
... def __delete__(self, obj):
... if self.name in obj.__dict__:
... del obj.__dict__[self.name]
... else:
... raise AttributeError(self.name)
...
... def __repr__(self):
... if isinstance(self.type, tuple) and len(self.type) > 1:
... typestr = '({0})'.format(
... ', '.join(t.__name__ for t in self.type))
... else:
... typestr = self.type.__name__
... return '<{0}(name={1}, type={2})>'.format(
... self.__class__.__name__, self.name, typestr)
...
Now let's create an example class that uses this ``TypedAttribute``::
>>> class Point2D(metaclass=OrderedDescriptorContainer):
... x = TypedAttribute((float, int))
... y = TypedAttribute((float, int))
...
... def __init__(self, x, y):
... self.x, self.y = x, y
...
>>> p1 = Point2D(1.0, 2.0)
>>> p1.x
1.0
>>> p1.y
2.0
>>> p2 = Point2D('a', 'b') # doctest: +IGNORE_EXCEPTION_DETAIL
Traceback (most recent call last):
...
ValueError: Point2D.x must be of type (float, int>)
We see that ``TypedAttribute`` works more or less as advertised, but
there's nothing special about that. Let's see what
`OrderedDescriptorContainer` did for us::
>>> Point2D.typed_attributes
OrderedDict([('x', <TypedAttribute(name=x, type=(float, int))>),
('y', <TypedAttribute(name=y, type=(float, int))>)])
If we create a subclass, it does *not* by default add inherited descriptors
to ``typed_attributes``::
>>> class Point3D(Point2D):
... z = TypedAttribute((float, int))
...
>>> Point3D.typed_attributes
OrderedDict([('z', <TypedAttribute(name=z, type=(float, int))>)])
However, if we specify ``_inherit_descriptors_`` from ``Point2D`` then
it will do so::
>>> class Point3D(Point2D):
... _inherit_descriptors_ = (TypedAttribute,)
... z = TypedAttribute((float, int))
...
>>> Point3D.typed_attributes
OrderedDict([('x', <TypedAttribute(name=x, type=(float, int))>),
('y', <TypedAttribute(name=y, type=(float, int))>),
('z', <TypedAttribute(name=z, type=(float, int))>)])
.. note::
Hopefully it is clear from these examples that this construction
also allows a class of type `OrderedDescriptorContainer` to use
multiple different `OrderedDescriptor` classes simultaneously.
"""
_inherit_descriptors_ = ()
def __init__(cls, cls_name, bases, members):
descriptors = defaultdict(list)
seen = set()
inherit_descriptors = ()
descr_bases = {}
for mro_cls in cls.__mro__:
for name, obj in mro_cls.__dict__.items():
if name in seen:
# Checks if we've already seen an attribute of the given
# name (if so it will override anything of the same name in
# any base class)
continue
seen.add(name)
if (not isinstance(obj, OrderedDescriptor) or
(inherit_descriptors and
not isinstance(obj, inherit_descriptors))):
# The second condition applies when checking any
# subclasses, to see if we can inherit any descriptors of
# the given type from subclasses (by default inheritance is
# disabled unless the class has _inherit_descriptors_
# defined)
continue
if obj._name_attribute_ is not None:
setattr(obj, obj._name_attribute_, name)
# Don't just use the descriptor's class directly; instead go
# through its MRO and find the class on which _class_attribute_
# is defined directly. This way subclasses of some
# OrderedDescriptor *may* override _class_attribute_ and have
# its own _class_attribute_, but by default all subclasses of
# some OrderedDescriptor are still grouped together
# TODO: It might be worth clarifying this in the docs
if obj.__class__ not in descr_bases:
for obj_cls_base in obj.__class__.__mro__:
if '_class_attribute_' in obj_cls_base.__dict__:
descr_bases[obj.__class__] = obj_cls_base
descriptors[obj_cls_base].append((obj, name))
break
else:
# Make sure to put obj first for sorting purposes
obj_cls_base = descr_bases[obj.__class__]
descriptors[obj_cls_base].append((obj, name))
if not getattr(mro_cls, '_inherit_descriptors_', False):
# If _inherit_descriptors_ is undefined then we don't inherit
# any OrderedDescriptors from any of the base classes, and
# there's no reason to continue through the MRO
break
else:
inherit_descriptors = mro_cls._inherit_descriptors_
for descriptor_cls, instances in descriptors.items():
instances.sort()
instances = OrderedDict((key, value) for value, key in instances)
setattr(cls, descriptor_cls._class_attribute_, instances)
super(OrderedDescriptorContainer, cls).__init__(cls_name, bases,
members)
def get_parameters(members):
"""
Looks for ordered descriptors in a class definition and
copies such definitions in two new class attributes,
one being a dictionary of these objects keyed by their
attribute names, and the other a simple list of those names.
"""
pdict = OrderedDict()
for name, obj in members.items():
if (not isinstance(obj, OrderedDescriptor)):
continue
if obj._name_attribute_ is not None:
setattr(obj, '_name', name)
pdict[name] = obj
# members['_parameter_vals_'] = pdict
members['_parameters_'] = pdict
LOCALE_LOCK = threading.Lock()
@contextmanager
def set_locale(name):
"""
Context manager to temporarily set the locale to ``name``.
An example is setting locale to "C" so that the C strtod()
function will use "." as the decimal point to enable consistent
numerical string parsing.
Note that one cannot nest multiple set_locale() context manager
statements as this causes a threading lock.
This code taken from https://stackoverflow.com/questions/18593661/how-do-i-strftime-a-date-object-in-a-different-locale.
Parameters
==========
name : str
Locale name, e.g. "C" or "fr_FR".
"""
name = str(name)
with LOCALE_LOCK:
saved = locale.setlocale(locale.LC_ALL)
if saved == name:
# Don't do anything if locale is already the requested locale
yield
else:
try:
locale.setlocale(locale.LC_ALL, name)
yield
finally:
locale.setlocale(locale.LC_ALL, saved)
class ShapedLikeNDArray(metaclass=abc.ABCMeta):
"""Mixin class to provide shape-changing methods.
The class proper is assumed to have some underlying data, which are arrays
or array-like structures. It must define a ``shape`` property, which gives
the shape of those data, as well as an ``_apply`` method that creates a new
instance in which a `~numpy.ndarray` method has been applied to those.
Furthermore, for consistency with `~numpy.ndarray`, it is recommended to
define a setter for the ``shape`` property, which, like the
`~numpy.ndarray.shape` property allows in-place reshaping the internal data
(and, unlike the ``reshape`` method raises an exception if this is not
possible).
This class also defines default implementations for ``ndim`` and ``size``
properties, calculating those from the ``shape``. These can be overridden
by subclasses if there are faster ways to obtain those numbers.
"""
# Note to developers: if new methods are added here, be sure to check that
# they work properly with the classes that use this, such as Time and
# BaseRepresentation, i.e., look at their ``_apply`` methods and add
# relevant tests. This is particularly important for methods that imply
# copies rather than views of data (see the special-case treatment of
# 'flatten' in Time).
@property
@abc.abstractmethod
def shape(self):
"""The shape of the instance and underlying arrays."""
@abc.abstractmethod
def _apply(method, *args, **kwargs):
"""Create a new instance, with ``method`` applied to underlying data.
The method is any of the shape-changing methods for `~numpy.ndarray`
(``reshape``, ``swapaxes``, etc.), as well as those picking particular
elements (``__getitem__``, ``take``, etc.). It will be applied to the
underlying arrays (e.g., ``jd1`` and ``jd2`` in `~astropy.time.Time`),
with the results used to create a new instance.
Parameters
----------
method : str
Method to be applied to the instance's internal data arrays.
args : tuple
Any positional arguments for ``method``.
kwargs : dict
Any keyword arguments for ``method``.
"""
@property
def ndim(self):
"""The number of dimensions of the instance and underlying arrays."""
return len(self.shape)
@property
def size(self):
"""The size of the object, as calculated from its shape."""
size = 1
for sh in self.shape:
size *= sh
return size
@property
def isscalar(self):
return self.shape == ()
def __len__(self):
if self.isscalar:
raise TypeError("Scalar {!r} object has no len()"
.format(self.__class__.__name__))
return self.shape[0]
def __bool__(self):
"""Any instance should evaluate to True, except when it is empty."""
return self.size > 0
def __getitem__(self, item):
try:
return self._apply('__getitem__', item)
except IndexError:
if self.isscalar:
raise TypeError('scalar {!r} object is not subscriptable.'
.format(self.__class__.__name__))
else:
raise
def __iter__(self):
if self.isscalar:
raise TypeError('scalar {!r} object is not iterable.'
.format(self.__class__.__name__))
# We cannot just write a generator here, since then the above error
# would only be raised once we try to use the iterator, rather than
# upon its definition using iter(self).
def self_iter():
for idx in range(len(self)):
yield self[idx]
return self_iter()
def copy(self, *args, **kwargs):
"""Return an instance containing copies of the internal data.
Parameters are as for :meth:`~numpy.ndarray.copy`.
"""
return self._apply('copy', *args, **kwargs)
def reshape(self, *args, **kwargs):
"""Returns an instance containing the same data with a new shape.
Parameters are as for :meth:`~numpy.ndarray.reshape`. Note that it is
not always possible to change the shape of an array without copying the
data (see :func:`~numpy.reshape` documentation). If you want an error
to be raise if the data is copied, you should assign the new shape to
the shape attribute (note: this may not be implemented for all classes
using ``ShapedLikeNDArray``).
"""
return self._apply('reshape', *args, **kwargs)
def ravel(self, *args, **kwargs):
"""Return an instance with the array collapsed into one dimension.
Parameters are as for :meth:`~numpy.ndarray.ravel`. Note that it is
not always possible to unravel an array without copying the data.
If you want an error to be raise if the data is copied, you should
should assign shape ``(-1,)`` to the shape attribute.
"""
return self._apply('ravel', *args, **kwargs)
def flatten(self, *args, **kwargs):
"""Return a copy with the array collapsed into one dimension.
Parameters are as for :meth:`~numpy.ndarray.flatten`.
"""
return self._apply('flatten', *args, **kwargs)
def transpose(self, *args, **kwargs):
"""Return an instance with the data transposed.
Parameters are as for :meth:`~numpy.ndarray.transpose`. All internal
data are views of the data of the original.
"""
return self._apply('transpose', *args, **kwargs)
@property
def T(self):
"""Return an instance with the data transposed.
Parameters are as for :attr:`~numpy.ndarray.T`. All internal
data are views of the data of the original.
"""
if self.ndim < 2:
return self
else:
return self.transpose()
def swapaxes(self, *args, **kwargs):
"""Return an instance with the given axes interchanged.
Parameters are as for :meth:`~numpy.ndarray.swapaxes`:
``axis1, axis2``. All internal data are views of the data of the
original.
"""
return self._apply('swapaxes', *args, **kwargs)
def diagonal(self, *args, **kwargs):
"""Return an instance with the specified diagonals.
Parameters are as for :meth:`~numpy.ndarray.diagonal`. All internal
data are views of the data of the original.
"""
return self._apply('diagonal', *args, **kwargs)
def squeeze(self, *args, **kwargs):
"""Return an instance with single-dimensional shape entries removed
Parameters are as for :meth:`~numpy.ndarray.squeeze`. All internal
data are views of the data of the original.
"""
return self._apply('squeeze', *args, **kwargs)
def take(self, indices, axis=None, mode='raise'):
"""Return a new instance formed from the elements at the given indices.
Parameters are as for :meth:`~numpy.ndarray.take`, except that,
obviously, no output array can be given.
"""
return self._apply('take', indices, axis=axis, mode=mode)
class IncompatibleShapeError(ValueError):
def __init__(self, shape_a, shape_a_idx, shape_b, shape_b_idx):
super().__init__(shape_a, shape_a_idx, shape_b, shape_b_idx)
def check_broadcast(*shapes):
"""
Determines whether two or more Numpy arrays can be broadcast with each
other based on their shape tuple alone.
Parameters
----------
*shapes : tuple
All shapes to include in the comparison. If only one shape is given it
is passed through unmodified. If no shapes are given returns an empty
`tuple`.
Returns
-------
broadcast : `tuple`
If all shapes are mutually broadcastable, returns a tuple of the full
broadcast shape.
"""
if len(shapes) == 0:
return ()
elif len(shapes) == 1:
return shapes[0]
reversed_shapes = (reversed(shape) for shape in shapes)
full_shape = []
for dims in zip_longest(*reversed_shapes, fillvalue=1):
max_dim = 1
max_dim_idx = None
for idx, dim in enumerate(dims):
if dim == 1:
continue
if max_dim == 1:
# The first dimension of size greater than 1
max_dim = dim
max_dim_idx = idx
elif dim != max_dim:
raise IncompatibleShapeError(
shapes[max_dim_idx], max_dim_idx, shapes[idx], idx)
full_shape.append(max_dim)
return tuple(full_shape[::-1])
def dtype_bytes_or_chars(dtype):
"""
Parse the number out of a dtype.str value like '<U5' or '<f8'.
See #5819 for discussion on the need for this function for getting
the number of characters corresponding to a string dtype.
Parameters
----------
dtype : numpy dtype object
Input dtype
Returns
-------
bytes_or_chars : int or None
Bits (for numeric types) or characters (for string types)
"""
match = re.search(r'(\d+)$', dtype.str)
out = int(match.group(1)) if match else None
return out
def pizza(): # pragma: no cover
"""
Open browser loaded with pizza options near you.
*Disclaimers: Payments not included. Astropy is not
responsible for any liability from using this function.*
.. note:: Accuracy depends on your browser settings.
"""
import webbrowser
webbrowser.open('https://www.google.com/search?q=pizza+near+me')
| bsd-3-clause | -8,344,829,424,143,748,000 | 33.369991 | 124 | 0.597331 | false |
CoreSecurity/pysap | pysap/utils/fields.py | 1 | 12226 | # ===========
# pysap - Python library for crafting SAP's network protocols packets
#
# SECUREAUTH LABS. Copyright (C) 2021 SecureAuth Corporation. All rights reserved.
#
# The library was designed and developed by Martin Gallo from
# the SecureAuth's Innovation Labs team.
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# ==============
# Standard imports
import struct
from datetime import datetime
# External imports
from scapy.config import conf
from scapy.packet import Packet
from scapy.asn1fields import (ASN1F_CHOICE, ASN1F_field, ASN1_Error, ASN1F_badsequence, BER_Decoding_Error)
from scapy.volatile import (RandNum, RandTermString, RandBin)
from scapy.fields import (MultiEnumField, StrLenField, Field, StrFixedLenField, StrField, PacketListField, LongField)
def saptimestamp_to_datetime(timestamp):
"""Converts a timestamp in "SAP format" to a datetime object. Time zone
looks to be fixed at GMT+1."""
return datetime.utcfromtimestamp((int(timestamp) & 0xFFFFFFFF) + 1000000000)
class PacketNoPadded(Packet):
"""Regular scapy packet with no padding.
"""
def extract_padding(self, s):
return '', s
class RandByteReduced(RandNum):
"""RandByte that only returns random values between 0 and x2a. Used while
performing some fuzz to reduce the test cases space.
"""
def __init__(self):
RandNum.__init__(self, 0, 0x2a)
class ByteMultiEnumKeysField(MultiEnumField):
"""MultiEnumField that picks a reduced number of values. Used for fuzzing
Byte fields with reduced number of values.
"""
def randval(self):
return RandByteReduced()
class MutablePacketField(StrLenField):
"""Packet field that mutates the class according to a list of evaluators.
The evaluators are run against the packet and given to a class getter.
If the class can't be found, the field is treated as a StrLenField.
"""
__slots__ = ["length_from", "evaluators", "_get_class"]
def __init__(self, name, default, length_from, get_class, evaluators=None):
"""
:param length_from: function to obtain the field length
:type length_from: C{callable}
:param get_class: function to obtain the class
:type get_class: C{callable}
:param evaluators: evaluators
:type evaluators: ``list`` of C{callable}
"""
StrLenField.__init__(self, name, default, length_from=length_from)
self.evaluators = evaluators or []
self._get_class = get_class
def get_class(self, pkt):
# Run the evaluators on the actual packet
values = [evaluator(pkt) for evaluator in self.evaluators]
# Return the class using the function provided
return self._get_class(pkt, *values)
def i2m(self, pkt, i):
cls = self.get_class(pkt)
if cls is not None:
return str(i)
else:
return StrLenField.i2m(self, pkt, i)
def m2i(self, pkt, m):
cls = self.get_class(pkt)
if cls is not None:
return cls(m)
else:
return StrLenField.m2i(self, pkt, m)
class StrNullFixedLenField(StrFixedLenField):
"""Packet field that has a fixed length and is conditionally null-terminated.
"""
__slots__ = ["length_from", "max_length", "null_terminated"]
def __init__(self, name, default, length=None, length_from=None, max_length=None, null_terminated=None):
if null_terminated:
self.null_terminated = null_terminated
else:
self.null_terminated = lambda pkt: True
self.max_length = max_length or 200
StrFixedLenField.__init__(self, name, default, length=length, length_from=length_from)
def i2repr(self, pkt, v):
if self.null_terminated(pkt):
if type(v) is str:
v = v.rstrip("\0")
return repr(v)
return StrFixedLenField.i2repr(self, pkt, v)
def getfield(self, pkt, s):
if self.null_terminated(pkt):
l = self.length_from(pkt) - 1
return s[l + 1:], self.m2i(pkt, s[:l])
return StrFixedLenField.getfield(self, pkt, s)
def addfield(self, pkt, s, val):
if self.null_terminated(pkt):
l = self.length_from(pkt) - 1
return s + struct.pack("%is" % l, self.i2m(pkt, val)) + "\x00"
return StrFixedLenField.addfield(self, pkt, s, val)
def randval(self):
if self.null_terminated:
try:
l = self.length_from(None) - 1
except:
l = RandTermString(RandNum(0, self.max_length), "\x00")
return RandBin(l)
return StrFixedLenField.randval(self)
class StrFixedLenPaddedField(StrFixedLenField):
"""Packet field that has a fixed length and is padded with a
given character.
"""
__slots__ = ["length_from", "padd"]
def __init__(self, name, default, length=None, length_from=None, padd=" "):
StrFixedLenField.__init__(self, name, default, length, length_from)
self.padd = padd
def getfield(self, pkt, s):
l = self.length_from(pkt)
return s[l:], self.m2i(pkt, s[:l])
def addfield(self, pkt, s, val):
l = self.length_from(pkt)
val += self.padd * l
return StrFixedLenField.addfield(self, pkt, s, val)
class StrNullFixedLenPaddedField(StrFixedLenField):
"""Packet field that has a fixed length and is padded with a
given character and null terminated.
"""
__slots__ = ["length_from", "padd"]
def __init__(self, name, default, length=None, length_from=None, padd=" "):
StrFixedLenField.__init__(self, name, default, length, length_from)
self.padd = padd
def getfield(self, pkt, s):
l = self.length_from(pkt)
lz = s.find("\x00")
if lz < l:
return s[l + 1:], self.m2i(pkt, s[:lz])
return s[l + 1:], self.m2i(pkt, s[:l])
def addfield(self, pkt, s, val):
l = self.length_from(pkt)
val += self.padd * l
return StrFixedLenField.addfield(self, pkt, s, val)
class IntToStrField(Field):
"""Custom field from int to str values, with a variable length
"""
__slots__ = ["length", "format"]
def __init__(self, name, default, length=11):
"""Initialize the field with a variable length. The 'machine'
representation is a string field and the 'internal' repr.
is a numeric value.
"""
Field.__init__(self, name, default, "%ds" % length)
# Stores the length of the field
self.length = length
# Stores the conversion format between representations
self.format = "%" + "%d" % length + "d"
def m2i(self, pkt, x):
return str(x)
def i2m(self, pkt, x):
return self.format % int(x)
def i2count(self, pkt, x):
return x
class StrEncodedPaddedField(StrField):
__slots__ = ["remain", "encoding", "padd"]
def __init__(self, name, default, encoding="utf-16", padd="\x0c",
fmt="H", remain=0):
StrField.__init__(self, name, default, fmt, remain)
self.encoding = encoding
self.padd = padd
def h2i(self, pkt, x):
if x:
x = x.encode(self.encoding)
return x
def i2h(self, pkt, x):
if x:
x = x.decode(self.encoding)
return x
def addfield(self, pkt, s, val):
return s + self.i2m(pkt, val) + self.padd
def getfield(self, pkt, s):
l = s.find(self.padd)
if l < 0:
return "", s
return s[l + 1:], self.m2i(pkt, s[:l])
class PacketListStopField(PacketListField):
"""Custom field that contains a list of packets until a 'stop' condition is met.
"""
__slots__ = ["count_from", "length_from", "stop"]
def __init__(self, name, default, cls, count_from=None, length_from=None, stop=None):
PacketListField.__init__(self, name, default, cls, count_from=count_from, length_from=length_from)
self.stop = stop
def getfield(self, pkt, s):
c = l = None
if self.length_from is not None:
l = self.length_from(pkt)
elif self.count_from is not None:
c = self.count_from(pkt)
lst = []
ret = ""
remain = s
if l is not None:
remain, ret = s[:l], s[l:]
while remain:
if c is not None:
if c <= 0:
break
c -= 1
try:
p = self.m2i(pkt, remain)
except Exception:
if conf.debug_dissector:
raise
p = conf.raw_layer(load=remain)
remain = ""
else:
if conf.padding_layer in p:
pad = p[conf.padding_layer]
remain = pad.load
del (pad.underlayer.payload)
else:
remain = ""
lst.append(p)
# Evaluate the stop condition
if self.stop and self.stop(p):
break
return remain + ret, lst
class AdjustableFieldLenField(Field):
__slots__ = ["length_of", "count_of", "adjust"]
def __init__(self, name, default, length_of=None):
Field.__init__(self, name, default, ">H")
self.length_of = length_of
def i2m(self, pkt, x):
if x is None:
fld, fval = pkt.getfield_and_val(self.length_of)
x = fld.i2len(pkt, fval)
return x
def addfield(self, pkt, s, val):
i2m = self.i2m(pkt, val)
fmt = "B"
padd = ""
if i2m > 0xf0:
fmt = ">H"
padd = struct.pack("B", 0xff)
return s + padd + struct.pack(fmt, i2m)
def getfield(self, pkt, s):
if struct.unpack("B", s[:1])[0] == 0xff:
return s[3:], self.m2i(pkt, struct.unpack(">H", s[1:3])[0])
else:
return s[1:], self.m2i(pkt, struct.unpack("B", s[:1])[0])
class ASN1F_CHOICE_SAFE(ASN1F_CHOICE):
def __init__(self, name, default, *args, **kwargs):
if "implicit_tag" in kwargs:
err_msg = "ASN1F_CHOICE has been called with an implicit_tag"
raise ASN1_Error(err_msg)
self.implicit_tag = None
for kwarg in ["context", "explicit_tag"]:
if kwarg in kwargs:
setattr(self, kwarg, kwargs[kwarg])
else:
setattr(self, kwarg, None)
ASN1F_field.__init__(self, name, None, context=self.context,
explicit_tag=self.explicit_tag)
self.default = default
self.current_choice = None
self.choices = args
def m2i(self, pkt, s):
"""Try to safely extract an ASN1_Packet from the choices list.
:raise ASN1_Error: if unable to parse the packet using any of the given choices
"""
if len(s) == 0:
raise ASN1_Error("ASN1F_CHOICE: got empty string")
for choice in self.choices:
try:
return self.extract_packet(choice, s)
except (ASN1_Error, ASN1F_badsequence, BER_Decoding_Error):
pass
raise ASN1_Error
class TimestampField(LongField):
"""Timestamp field"""
def i2h(self, pkt, x):
dt = datetime.utcfromtimestamp(x)
return dt.strftime("%Y-%m-%d %H:%M:%S UTC")
class LESignedByteField(Field):
def __init__(self, name, default):
Field.__init__(self, name, default, "<b")
class LESignedShortField(Field):
def __init__(self, name, default):
Field.__init__(self, name, default, "<h")
class LESignedLongField(Field):
def __init__(self, name, default):
Field.__init__(self, name, default, "<q")
| gpl-2.0 | 3,712,130,716,518,683,000 | 31.515957 | 117 | 0.586128 | false |
mbiokyle29/pipelines | EBseq/ebseq_extras.py | 1 | 3684 | import os.path
import re
# don't use slots since we only have a few of these guys
class _sampleRec():
def __init__(self, name, mean, std, condition):
self.name = name
self.mean = int(mean)
self.std = int(std)
self.condition = int(condition)
class EbseqExtras():
def __init__(self, log):
self.log = log
self.samples = []
self.conditions = {}
def read_configuration(self, conf):
if os.path.isfile(conf):
try:
with open(conf, "r") as fh:
for line in fh:
self._build_rec(line)
except IOError as e:
self.log.error("IOError thrown trying to read %s conf file, perhap permissions?", conf)
raise SystemExit
else:
self.log.error("It appears %s does not exist", conf)
raise SystemExit
def _build_rec(self, line):
# <sample><frag-mean><frag-sd><cond>
rec = _sampleRec(*line.split("\t"))
self.samples.append(rec)
if rec.condition in self.conditions:
self.conditions[rec.condition].append(rec)
else:
self.conditions[rec.condition] = [rec]
def gen_fastq_list(self):
results = []
for sample in self.samples:
results.append(sample.name)
return results
def gen_sample_list(self):
sample_str = ""
for cond in sorted(self.conditions.keys()):
for rec in self.conditions[cond]:
name = re.sub(r"\.fastq", ".genes.results", rec.name)
sample_str += name+" "
return sample_str.rstrip()
def get_mean_length(self, file):
base = os.path.splitext(file)[0]
for sample in self.samples:
sample_base = os.path.splitext(sample.name)[0
]
if base == sample_base:
return sample.mean
# if it wasnt found
raise SystemError
def gen_cond_string(self):
# if conditions has {1}:[2], {2}:[2], {3}:[2]
# we want 2,2,2
cond_str = ""
for condition in sorted(self.conditions.keys()):
cond_str += str(len(self.conditions[condition]))+","
return cond_str.rstrip(",")
def report_error(self, message):
# Create a text/plain message
email_body = []
email_body.append("Hello, Kyle\n")
email_body.append("Pipeline failed with the following error: ")
email_body.append(message)
# grab the log file name from the log
# we add the file handler first
# so its here
log_file = self.log.handlers[0].baseFilename
email_body.append("\n#######################################################")
email_body.append("# PIPELINE LOG #")
email_body.append("#######################################################")
with open(log_file, "r") as log:
for line in log:
email_body.append(line.rstrip())
msg = MIMEText("\n".join(email_body))
# header stuff
# no one else cares but me!
root = "[email protected]"
me = "[email protected]"
subject = "RSEM/EBseq pipeline failure report: {}".format(time.strftime("%d/%m/%Y"))
msg['Subject'] = subject
msg['From'] = root
msg['To'] = me
# Send the message via our own SMTP server, but don't include the
# envelope header.
s = smtplib.SMTP('localhost')
s.sendmail(root, [me], msg.as_string())
s.quit() | mit | -1,907,952,357,245,468,400 | 29.708333 | 103 | 0.51683 | false |
whummer/moto | tests/test_sns/test_topics_boto3.py | 1 | 7274 | from __future__ import unicode_literals
import boto3
import six
import json
import sure # noqa
from botocore.exceptions import ClientError
from moto import mock_sns
from moto.sns.models import DEFAULT_TOPIC_POLICY, DEFAULT_EFFECTIVE_DELIVERY_POLICY, DEFAULT_PAGE_SIZE
@mock_sns
def test_create_and_delete_topic():
conn = boto3.client("sns", region_name="us-east-1")
for topic_name in ('some-topic', '-some-topic-', '_some-topic_', 'a' * 256):
conn.create_topic(Name=topic_name)
topics_json = conn.list_topics()
topics = topics_json["Topics"]
topics.should.have.length_of(1)
topics[0]['TopicArn'].should.equal(
"arn:aws:sns:{0}:123456789012:{1}"
.format(conn._client_config.region_name, topic_name)
)
# Delete the topic
conn.delete_topic(TopicArn=topics[0]['TopicArn'])
# And there should now be 0 topics
topics_json = conn.list_topics()
topics = topics_json["Topics"]
topics.should.have.length_of(0)
@mock_sns
def test_create_topic_with_attributes():
conn = boto3.client("sns", region_name="us-east-1")
conn.create_topic(Name='some-topic-with-attribute', Attributes={'DisplayName': 'test-topic'})
topics_json = conn.list_topics()
topic_arn = topics_json["Topics"][0]['TopicArn']
attributes = conn.get_topic_attributes(TopicArn=topic_arn)['Attributes']
attributes['DisplayName'].should.equal('test-topic')
@mock_sns
def test_create_topic_should_be_indempodent():
conn = boto3.client("sns", region_name="us-east-1")
topic_arn = conn.create_topic(Name="some-topic")['TopicArn']
conn.set_topic_attributes(
TopicArn=topic_arn,
AttributeName="DisplayName",
AttributeValue="should_be_set"
)
topic_display_name = conn.get_topic_attributes(
TopicArn=topic_arn
)['Attributes']['DisplayName']
topic_display_name.should.be.equal("should_be_set")
#recreate topic to prove indempodentcy
topic_arn = conn.create_topic(Name="some-topic")['TopicArn']
topic_display_name = conn.get_topic_attributes(
TopicArn=topic_arn
)['Attributes']['DisplayName']
topic_display_name.should.be.equal("should_be_set")
@mock_sns
def test_get_missing_topic():
conn = boto3.client("sns", region_name="us-east-1")
conn.get_topic_attributes.when.called_with(
TopicArn="a-fake-arn").should.throw(ClientError)
@mock_sns
def test_create_topic_must_meet_constraints():
conn = boto3.client("sns", region_name="us-east-1")
common_random_chars = [':', ";", "!", "@", "|", "^", "%"]
for char in common_random_chars:
conn.create_topic.when.called_with(
Name="no%s_invalidchar" % char).should.throw(ClientError)
conn.create_topic.when.called_with(
Name="no spaces allowed").should.throw(ClientError)
@mock_sns
def test_create_topic_should_be_of_certain_length():
conn = boto3.client("sns", region_name="us-east-1")
too_short = ""
conn.create_topic.when.called_with(
Name=too_short).should.throw(ClientError)
too_long = "x" * 257
conn.create_topic.when.called_with(
Name=too_long).should.throw(ClientError)
@mock_sns
def test_create_topic_in_multiple_regions():
for region in ['us-west-1', 'us-west-2']:
conn = boto3.client("sns", region_name=region)
conn.create_topic(Name="some-topic")
list(conn.list_topics()["Topics"]).should.have.length_of(1)
@mock_sns
def test_topic_corresponds_to_region():
for region in ['us-east-1', 'us-west-2']:
conn = boto3.client("sns", region_name=region)
conn.create_topic(Name="some-topic")
topics_json = conn.list_topics()
topic_arn = topics_json["Topics"][0]['TopicArn']
topic_arn.should.equal(
"arn:aws:sns:{0}:123456789012:some-topic".format(region))
@mock_sns
def test_topic_attributes():
conn = boto3.client("sns", region_name="us-east-1")
conn.create_topic(Name="some-topic")
topics_json = conn.list_topics()
topic_arn = topics_json["Topics"][0]['TopicArn']
attributes = conn.get_topic_attributes(TopicArn=topic_arn)['Attributes']
attributes["TopicArn"].should.equal(
"arn:aws:sns:{0}:123456789012:some-topic"
.format(conn._client_config.region_name)
)
attributes["Owner"].should.equal('123456789012')
json.loads(attributes["Policy"]).should.equal(DEFAULT_TOPIC_POLICY)
attributes["DisplayName"].should.equal("")
attributes["SubscriptionsPending"].should.equal('0')
attributes["SubscriptionsConfirmed"].should.equal('0')
attributes["SubscriptionsDeleted"].should.equal('0')
attributes["DeliveryPolicy"].should.equal("")
json.loads(attributes["EffectiveDeliveryPolicy"]).should.equal(
DEFAULT_EFFECTIVE_DELIVERY_POLICY)
# boto can't handle prefix-mandatory strings:
# i.e. unicode on Python 2 -- u"foobar"
# and bytes on Python 3 -- b"foobar"
if six.PY2:
policy = json.dumps({b"foo": b"bar"})
displayname = b"My display name"
delivery = json.dumps(
{b"http": {b"defaultHealthyRetryPolicy": {b"numRetries": 5}}})
else:
policy = json.dumps({u"foo": u"bar"})
displayname = u"My display name"
delivery = json.dumps(
{u"http": {u"defaultHealthyRetryPolicy": {u"numRetries": 5}}})
conn.set_topic_attributes(TopicArn=topic_arn,
AttributeName="Policy",
AttributeValue=policy)
conn.set_topic_attributes(TopicArn=topic_arn,
AttributeName="DisplayName",
AttributeValue=displayname)
conn.set_topic_attributes(TopicArn=topic_arn,
AttributeName="DeliveryPolicy",
AttributeValue=delivery)
attributes = conn.get_topic_attributes(TopicArn=topic_arn)['Attributes']
attributes["Policy"].should.equal('{"foo": "bar"}')
attributes["DisplayName"].should.equal("My display name")
attributes["DeliveryPolicy"].should.equal(
'{"http": {"defaultHealthyRetryPolicy": {"numRetries": 5}}}')
@mock_sns
def test_topic_paging():
conn = boto3.client("sns", region_name="us-east-1")
for index in range(DEFAULT_PAGE_SIZE + int(DEFAULT_PAGE_SIZE / 2)):
conn.create_topic(Name="some-topic_" + str(index))
response = conn.list_topics()
topics_list = response["Topics"]
next_token = response["NextToken"]
len(topics_list).should.equal(DEFAULT_PAGE_SIZE)
int(next_token).should.equal(DEFAULT_PAGE_SIZE)
response = conn.list_topics(NextToken=next_token)
topics_list = response["Topics"]
response.shouldnt.have("NextToken")
topics_list.should.have.length_of(int(DEFAULT_PAGE_SIZE / 2))
@mock_sns
def test_add_remove_permissions():
conn = boto3.client('sns', region_name='us-east-1')
response = conn.create_topic(Name='testpermissions')
conn.add_permission(
TopicArn=response['TopicArn'],
Label='Test1234',
AWSAccountId=['999999999999'],
ActionName=['AddPermission']
)
conn.remove_permission(
TopicArn=response['TopicArn'],
Label='Test1234'
)
| apache-2.0 | 8,110,577,855,049,402,000 | 35.009901 | 102 | 0.643937 | false |
taigaio/taiga-back | taiga/export_import/management/commands/dump_project.py | 1 | 2981 | # -*- coding: utf-8 -*-
# Copyright (C) 2014-present Taiga Agile LLC
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from django.core.management.base import BaseCommand, CommandError
from taiga.projects.models import Project
from taiga.export_import.services import render_project
import os
import gzip
class Command(BaseCommand):
help = "Export projects to a json file"
def add_arguments(self, parser):
parser.add_argument("project_slugs",
nargs="+",
help="<project_slug project_slug ...>")
parser.add_argument("-d", "--dst_dir",
action="store",
dest="dst_dir",
default="./",
metavar="DIR",
help="Directory to save the json files. ('./' by default)")
parser.add_argument("-f", "--format",
action="store",
dest="format",
default="plain",
metavar="[plain|gzip]",
help="Format to the output file plain json or gzipped json. ('plain' by default)")
def handle(self, *args, **options):
dst_dir = options["dst_dir"]
if not os.path.exists(dst_dir):
raise CommandError("Directory {} does not exist.".format(dst_dir))
if not os.path.isdir(dst_dir):
raise CommandError("'{}' must be a directory, not a file.".format(dst_dir))
project_slugs = options["project_slugs"]
for project_slug in project_slugs:
try:
project = Project.objects.get(slug=project_slug)
except Project.DoesNotExist:
raise CommandError("Project '{}' does not exist".format(project_slug))
if options["format"] == "gzip":
dst_file = os.path.join(dst_dir, "{}.json.gz".format(project_slug))
with gzip.GzipFile(dst_file, "wb") as f:
render_project(project, f)
else:
dst_file = os.path.join(dst_dir, "{}.json".format(project_slug))
with open(dst_file, "wb") as f:
render_project(project, f)
print("-> Generate dump of project '{}' in '{}'".format(project.name, dst_file))
| agpl-3.0 | 5,321,887,911,599,760,000 | 39.283784 | 110 | 0.571956 | false |
GoogleCloudPlatform/PerfKitBenchmarker | perfkitbenchmarker/linux_packages/maven.py | 1 | 4623 | # Copyright 2020 PerfKitBenchmarker Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License
"""Module containing maven installation functions."""
import os
import posixpath
from absl import flags
from perfkitbenchmarker import data
from perfkitbenchmarker import linux_packages
from six.moves.urllib.parse import urlparse
flags.DEFINE_string('maven_version', '3.6.3',
'The version of maven')
flags.DEFINE_string('maven_mirror_url', None,
'If specified, this URL will be used as a Maven mirror')
FLAGS = flags.FLAGS
MVN_URL = 'https://archive.apache.org/dist/maven/maven-{0}/{1}/binaries/apache-maven-{1}-bin.tar.gz'
MVN_DIR = posixpath.join(linux_packages.INSTALL_DIR, 'maven')
MVN_ENV_PATH = '/etc/profile.d/maven.sh'
MVN_ENV = '''
export JAVA_HOME={java_home}
export M2_HOME={maven_home}
export MAVEN_HOME={maven_home}
export PATH={maven_home}/bin:$PATH
'''
PACKAGE_NAME = 'maven'
PREPROVISIONED_DATA = {
'apache-maven-{0}-bin.tar.gz'.format('3.6.1'):
'2528c35a99c30f8940cc599ba15d34359d58bec57af58c1075519b8cd33b69e7',
'apache-maven-{0}-bin.tar.gz'.format('3.6.3'):
'26ad91d751b3a9a53087aefa743f4e16a17741d3915b219cf74112bf87a438c5'
}
PACKAGE_DATA_URL = {
'apache-maven-{0}-bin.tar.gz'.format('3.6.1'): MVN_URL.format('3', '3.6.1'),
'apache-maven-{0}-bin.tar.gz'.format('3.6.3'): MVN_URL.format('3', '3.6.3')
}
def GetRunCommand(arguments):
"""Return Maven run command including proxy settings."""
command = 'source {} && mvn {}'.format(MVN_ENV_PATH, arguments)
if FLAGS['http_proxy'].present:
parsed_url = urlparse(FLAGS.http_proxy)
http_proxy_params = ' -Dhttp.proxyHost={host} -Dhttp.proxyPort={port}'
command += http_proxy_params.format(
host=parsed_url.hostname, port=parsed_url.port)
if FLAGS['https_proxy'].present:
parsed_url = urlparse(FLAGS.https_proxy)
https_proxy_params = ' -Dhttps.proxyHost={host} -Dhttps.proxyPort={port}'
command += https_proxy_params.format(
host=parsed_url.hostname, port=parsed_url.port)
return command
def _GetJavaHome(vm):
out, _ = vm.RemoteCommand("java -XshowSettings:properties 2>&1 > /dev/null "
"| awk '/java.home/{print $3}'")
out = out.strip()
if '/jre' in out:
return out[:out.index('/jre')]
else:
return out
def AptInstall(vm):
_Install(vm)
def YumInstall(vm):
vm.InstallPackages('which')
_Install(vm)
def _Install(vm):
"""Install maven package."""
vm.Install('openjdk')
vm.Install('curl')
# Download and extract maven
maven_full_ver = FLAGS.maven_version
maven_major_ver = maven_full_ver[:maven_full_ver.index('.')]
maven_url = MVN_URL.format(maven_major_ver, maven_full_ver)
maven_tar = maven_url.split('/')[-1]
# will only work with preprovision_ignore_checksum
if maven_tar not in PREPROVISIONED_DATA:
PREPROVISIONED_DATA[maven_tar] = ''
PACKAGE_DATA_URL[maven_tar] = maven_url
maven_remote_path = posixpath.join(linux_packages.INSTALL_DIR, maven_tar)
vm.InstallPreprovisionedPackageData(PACKAGE_NAME, [maven_tar],
linux_packages.INSTALL_DIR)
vm.RemoteCommand(('mkdir -p {0} && '
'tar -C {0} --strip-components=1 -xzf {1}').format(
MVN_DIR, maven_remote_path))
java_home = _GetJavaHome(vm)
# Set env variables for maven
maven_env = MVN_ENV.format(java_home=java_home, maven_home=MVN_DIR)
cmd = 'echo "{0}" | sudo tee -a {1}'.format(maven_env, MVN_ENV_PATH)
vm.RemoteCommand(cmd)
if FLAGS.maven_mirror_url:
settings_local_path = data.ResourcePath(os.path.join(
'maven', 'settings.xml.j2'))
settings_remote_path = '~/.m2/settings.xml'
context = {
'maven_mirror_url': FLAGS.maven_mirror_url
}
vm.RemoteCommand('mkdir -p ~/.m2')
vm.RenderTemplate(settings_local_path, settings_remote_path, context)
def Uninstall(vm):
vm.Uninstall('openjdk')
vm.RemoteCommand('rm -rf {0}'.format(MVN_DIR), ignore_failure=True)
vm.RemoteCommand('sudo rm -f {0}'.format(MVN_ENV_PATH), ignore_failure=True)
| apache-2.0 | -5,603,522,190,831,945,000 | 34.022727 | 100 | 0.679645 | false |
mindbody/API-Examples | SDKs/Python/swagger_client/models/time_clock_report.py | 1 | 7634 | # coding: utf-8
"""
MINDBODY Public API
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) # noqa: E501
OpenAPI spec version: v6
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
from swagger_client.models.time_card_event import TimeCardEvent # noqa: F401,E501
class TimeClockReport(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'staff_id': 'int',
'task': 'str',
'hourly_rate': 'float',
'total_hours': 'float',
'total_pay': 'float',
'time_cards': 'list[TimeCardEvent]'
}
attribute_map = {
'staff_id': 'StaffId',
'task': 'Task',
'hourly_rate': 'HourlyRate',
'total_hours': 'TotalHours',
'total_pay': 'TotalPay',
'time_cards': 'TimeCards'
}
def __init__(self, staff_id=None, task=None, hourly_rate=None, total_hours=None, total_pay=None, time_cards=None): # noqa: E501
"""TimeClockReport - a model defined in Swagger""" # noqa: E501
self._staff_id = None
self._task = None
self._hourly_rate = None
self._total_hours = None
self._total_pay = None
self._time_cards = None
self.discriminator = None
if staff_id is not None:
self.staff_id = staff_id
if task is not None:
self.task = task
if hourly_rate is not None:
self.hourly_rate = hourly_rate
if total_hours is not None:
self.total_hours = total_hours
if total_pay is not None:
self.total_pay = total_pay
if time_cards is not None:
self.time_cards = time_cards
@property
def staff_id(self):
"""Gets the staff_id of this TimeClockReport. # noqa: E501
The ID of the requested staff member. # noqa: E501
:return: The staff_id of this TimeClockReport. # noqa: E501
:rtype: int
"""
return self._staff_id
@staff_id.setter
def staff_id(self, staff_id):
"""Sets the staff_id of this TimeClockReport.
The ID of the requested staff member. # noqa: E501
:param staff_id: The staff_id of this TimeClockReport. # noqa: E501
:type: int
"""
self._staff_id = staff_id
@property
def task(self):
"""Gets the task of this TimeClockReport. # noqa: E501
The staff member’s job title. # noqa: E501
:return: The task of this TimeClockReport. # noqa: E501
:rtype: str
"""
return self._task
@task.setter
def task(self, task):
"""Sets the task of this TimeClockReport.
The staff member’s job title. # noqa: E501
:param task: The task of this TimeClockReport. # noqa: E501
:type: str
"""
self._task = task
@property
def hourly_rate(self):
"""Gets the hourly_rate of this TimeClockReport. # noqa: E501
The hourly rate the business pays for this job. # noqa: E501
:return: The hourly_rate of this TimeClockReport. # noqa: E501
:rtype: float
"""
return self._hourly_rate
@hourly_rate.setter
def hourly_rate(self, hourly_rate):
"""Sets the hourly_rate of this TimeClockReport.
The hourly rate the business pays for this job. # noqa: E501
:param hourly_rate: The hourly_rate of this TimeClockReport. # noqa: E501
:type: float
"""
self._hourly_rate = hourly_rate
@property
def total_hours(self):
"""Gets the total_hours of this TimeClockReport. # noqa: E501
The sum of the hours worked by the staff member in this time card report. # noqa: E501
:return: The total_hours of this TimeClockReport. # noqa: E501
:rtype: float
"""
return self._total_hours
@total_hours.setter
def total_hours(self, total_hours):
"""Sets the total_hours of this TimeClockReport.
The sum of the hours worked by the staff member in this time card report. # noqa: E501
:param total_hours: The total_hours of this TimeClockReport. # noqa: E501
:type: float
"""
self._total_hours = total_hours
@property
def total_pay(self):
"""Gets the total_pay of this TimeClockReport. # noqa: E501
The total amount earned by the staff member for this time card report. # noqa: E501
:return: The total_pay of this TimeClockReport. # noqa: E501
:rtype: float
"""
return self._total_pay
@total_pay.setter
def total_pay(self, total_pay):
"""Sets the total_pay of this TimeClockReport.
The total amount earned by the staff member for this time card report. # noqa: E501
:param total_pay: The total_pay of this TimeClockReport. # noqa: E501
:type: float
"""
self._total_pay = total_pay
@property
def time_cards(self):
"""Gets the time_cards of this TimeClockReport. # noqa: E501
Information about when a staff member began and ended a task. # noqa: E501
:return: The time_cards of this TimeClockReport. # noqa: E501
:rtype: list[TimeCardEvent]
"""
return self._time_cards
@time_cards.setter
def time_cards(self, time_cards):
"""Sets the time_cards of this TimeClockReport.
Information about when a staff member began and ended a task. # noqa: E501
:param time_cards: The time_cards of this TimeClockReport. # noqa: E501
:type: list[TimeCardEvent]
"""
self._time_cards = time_cards
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(TimeClockReport, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, TimeClockReport):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| bsd-2-clause | -8,113,522,800,906,684,000 | 28.459459 | 132 | 0.573788 | false |
SymbiFlow/sv-tests | tools/runners/Yosys.py | 1 | 1388 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# Copyright (C) 2020 The SymbiFlow Authors.
#
# Use of this source code is governed by a ISC-style
# license that can be found in the LICENSE file or at
# https://opensource.org/licenses/ISC
#
# SPDX-License-Identifier: ISC
import os
from BaseRunner import BaseRunner
class Yosys(BaseRunner):
def __init__(self):
super().__init__("yosys", "yosys")
self.url = "http://www.clifford.at/yosys/"
def prepare_run_cb(self, tmp_dir, params):
run = os.path.join(tmp_dir, "run.sh")
scr = os.path.join(tmp_dir, 'scr.ys')
inc = ""
for incdir in params['incdirs']:
inc += f' -I {incdir}'
defs = ""
for define in params['defines']:
defs += f' -D {define}'
# prepare yosys script
with open(scr, 'w') as f:
for svf in params['files']:
f.write(f'read_verilog -sv {inc} {defs} {svf}\n')
# prepare wrapper script
with open(run, 'w') as f:
f.write('set -x\n')
f.write(f'cat {scr}\n')
f.write(f'{self.executable} -Q -T {scr}\n')
self.cmd = ['sh', run]
def get_version_cmd(self):
return [self.executable, "-V"]
def get_version(self):
version = super().get_version()
return " ".join([self.name, version.split()[1]])
| isc | -7,568,493,084,167,110,000 | 24.703704 | 65 | 0.54755 | false |
fedora-conary/conary | conary/trovetup.py | 1 | 7716 | #
# Copyright (c) SAS Institute Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from conary.deps import deps
from conary import errors
from conary import versions
from conary.lib.compat import namedtuple as _namedtuple
class TroveSpec(_namedtuple('TroveSpec', 'name version flavor')):
"""
A trove spec is a partial trove specification. It contains an optionally
optional name, an optional version specification, and an optional flavor.
The version specification may be a full version, a branch, a label,
a revision or partial revision, or a label plus a revision or partial
revision.
"""
__slots__ = ()
def __new__(cls, name, version=None, flavor=None,
allowEmptyName=True, withFrozenFlavor=False):
"""
@param name: the input string or tuple
@type name: string or tuple
@param version: optional version, if version not included in name
@type version: string
@param flavor: optional version, if version not included in name
@type flavor: string, or frozen flavor if C{withFrozenFlavor} is True.
@param allowEmptyName: if set, will accept an empty string and some
other variations.
@type allowEmptyName: bool
@param withFrozenFlavor: if set, will accept a frozen flavor
@type withFrozenFlavor: bool
@raise errors.TroveSpecError: Raised if the input string is not
a valid TroveSpec
"""
if isinstance(name, (tuple, list)):
# TroveSpec(sometuple)
name, version, flavor = name
elif version is None and flavor is None:
# TroveSpec('a=b[c]')
return cls.fromString(name, allowEmptyName=allowEmptyName,
withFrozenFlavor=withFrozenFlavor)
# TroveSpec(name, version, flavor)
if isinstance(flavor, basestring):
flavor = cls._thawFlavor(flavor, withFrozenFlavor)
return tuple.__new__(cls, (name, version, flavor))
def __repr__(self):
return 'TroveSpec(%r)' % (self.asString(True),)
def asString(self, withTimestamp=False):
if self.version is not None:
version = '=' + self.version
else:
version = ''
if self.flavor is not None:
flavor = '[' + str(self.flavor) + ']'
else:
flavor = ''
return ''.join((self.name, version, flavor))
__str__ = asString
@staticmethod
def _thawFlavor(flavor, withFrozenFlavor):
if withFrozenFlavor:
return deps.ThawFlavor(flavor)
return deps.parseFlavor(flavor)
@classmethod
def fromString(cls, specStr, allowEmptyName=True, withFrozenFlavor=False):
origSpecStr = specStr
# CNY-3219: strip leading and trailing whitespaces around job
# specification
specStr = specStr.strip()
if specStr.find('[') > 0 and specStr[-1] == ']':
specStr = specStr[:-1]
l = specStr.split('[')
if len(l) != 2:
raise errors.TroveSpecError(origSpecStr, "bad flavor spec")
specStr, flavorSpec = l
flavor = cls._thawFlavor(flavorSpec, withFrozenFlavor)
if flavor is None:
raise errors.TroveSpecError(origSpecStr, "bad flavor spec")
else:
flavor = None
if specStr.find("=") >= 0:
l = specStr.split("=")
if len(l) != 2:
raise errors.TroveSpecError(origSpecStr, "Too many ='s")
name, versionSpec = l
else:
name = specStr
versionSpec = None
if not name and not allowEmptyName:
raise errors.TroveSpecError(origSpecStr, 'Trove name is required')
return tuple.__new__(cls, (name, versionSpec, flavor))
class TroveTuple(_namedtuple('TroveTuple', 'name version flavor')):
"""
A trove tuple is a (name, version, flavor) tuple that uniquely identifies a
single trove. It is always an exact reference.
For a partial specification, see L{TroveSpec}.
"""
# NOTE to future developers: if a version of TroveTuple with timestampless
# versions becomes useful, subclass it instead of kludging this one to
# support both. You should really never be in a situation where you don't
# know whether your version has timestamps!
__slots__ = ()
hasTimestamp = True
_thawVerFunc = staticmethod(versions.ThawVersion)
_thawFlavFunc = staticmethod(deps.parseFlavor)
def __new__(cls, name, version=None, flavor=None):
if isinstance(name, (tuple, list)):
# TroveTuple(sometuple)
name, version, flavor = name
elif version is None and flavor is None:
# TroveTuple('a=b[c]')
return cls.fromString(name)
# TroveTuple(name, version, flavor)
if isinstance(version, basestring):
version = cls._thawVerFunc(version)
if isinstance(flavor, basestring):
flavor = cls._thawFlavFunc(flavor)
return tuple.__new__(cls, (name, version, flavor))
def __repr__(self):
return 'TroveTuple(%r)' % (self.asString(True),)
def asString(self, withTimestamp=False):
if withTimestamp:
ver = self.version.freeze()
else:
ver = self.version.asString()
return '%s=%s[%s]' % (self.name, ver, self.flavor)
__str__ = asString
@classmethod
def fromString(cls, ttstr, withFrozenFlavor=False):
try:
ttstr = _cast(ttstr)
except UnicodeEncodeError:
raise errors.ParseError("Trove tuple must be ASCII safe")
equals = ttstr.count('=')
left = ttstr.count('[')
right = ttstr.count(']')
if equals != 1 or left not in (0, 1) or right != left:
raise errors.ParseError("Not a valid trove tuple")
equals = ttstr.find('=')
left = ttstr.find('[')
right = ttstr.find(']')
name = ttstr[:equals]
if left < 0:
# No flavor.
assert right < 0
left = right = len(ttstr)
elif right != len(ttstr) - 1:
raise errors.ParseError("Not a valid trove tuple")
version = ttstr[equals + 1 : left]
flavor = ttstr[left + 1 : right]
if not version:
raise errors.ParseError("Not a valid trove tuple")
return cls(name, version, flavor)
class JobSpec(_namedtuple('JobSpec', 'name old new')):
"""
A job spec holds a single update request, including a name, optional old
version and flavor, and optional new version and flavor.
"""
__slots__ = ()
# TODO: Parsers, stringifiers, etc.
class JobTuple(_namedtuple('JobTuple', 'name old new absolute')):
"""
A job tuple represents a single trove job, consisting of a name, old
version and flavor, new version and flavor, and a flag indicating whether
the job is absolute.
"""
__slots__ = ()
# TODO: Parsers, stringifiers, etc.
def _cast(val):
"Return C{val.encode('ascii')} if it is a unicode, or C{val} otherwise."
if isinstance(val, unicode):
val = val.encode('ascii')
return val
| apache-2.0 | -8,419,735,200,402,609,000 | 34.232877 | 79 | 0.616252 | false |
allancaffee/scaly-mongo | scalymongo/structure_walker.py | 1 | 4037 | """
Structure Walker
================
A utility used to aid in structure validation.
"""
from inspect import isclass
from scalymongo.errors import ValidationError
class StructureWalker(object):
"""A helper class to recurse a :class:`dict`-like object in accordance with
a structure.
:param field_translator: should be function mapping the ``value`` and
``type_`` to the new value for a key.
"""
def __init__(self, field_validator):
self.field_validator = field_validator
def walk_dict(self, body, structure, path=None):
"""Validate a dictionary in accordance with `structure`.
A :class:`ValidationError` is raised if any fields in `body` are
not present in `structure`.
"""
_check_for_unknown_fields(body, structure, path)
for field, sub_structure in structure.iteritems():
if isclass(field):
field_type = field
# For structures like {<TYPE>: {<STRUCT>}} iterate values
# in the body with keys of <TYPE> and verify each against
# <STRUCT>.
for key, value in body.iteritems():
if isinstance(key, field_type):
self._recurse_or_validate_field(
value, sub_structure, _join(path, key))
if field in body:
self._recurse_or_validate_field(
body[field], sub_structure, _join(path, field))
def _recurse_or_validate_field(self, value, sub_structure, path):
if isinstance(sub_structure, list):
assert len(sub_structure) == 1
if isinstance(value, dict):
# If the structure is a dict this is fine so long as all of the
# keys are integers or the positional operator (`$`). This
# happens with the $set update modifier since we expand
# {'foo.0.bar': 1} to {'foo': {'0': {'bar': 1}}}
for key, value in value.iteritems():
assert key.isdigit() or key == '$'
self._recurse_or_validate_field(
value, sub_structure[0], _join(path, key))
else:
# Validate each value in the list against the specified content
# type.
for i, value in enumerate(value):
self._recurse_or_validate_field(
value, sub_structure[0], _join(path, i))
return
if isinstance(sub_structure, dict):
self.walk_dict(value, sub_structure, path)
return
self.field_validator(path, value, sub_structure)
def _check_for_unknown_fields(body, structure, path):
"""Check `body` for any keys not present in `structure`.
This only checks the first level of keys. Any keys from :class:`dict`s in
the `body`\ 's values will not be checked.
"""
type_keys = tuple([key for key in structure if isclass(key)])
existing_fields = set([key for key in body if not isclass(key)])
unknown_fields = existing_fields.difference(structure.keys())
# If there are valid types for a key filter out unknown fields that match a
# type.
if type_keys:
unknown_fields = [key for key in unknown_fields
if not isinstance(key, type_keys)]
if unknown_fields:
unknown_fields = ', '.join([repr(field) for field in unknown_fields])
if path:
err = ('Encountered field(s), in subdocument at {0},'
' not present in structure: {1}'.format(
path, unknown_fields))
else:
err = 'Encountered field(s) not present in structure: {0}'.format(
unknown_fields)
raise ValidationError(err)
def _join(head, tail):
"""Join `head` and `tail` with a dot.
If head is ``None`` only `tail` is returned.
"""
if head is None:
return tail
return '{0}.{1}'.format(head, tail)
| bsd-3-clause | 7,673,683,350,043,057,000 | 34.725664 | 79 | 0.567005 | false |
FedoraScientific/salome-smesh | src/Tools/blocFissure/gmu/rotTrans.py | 1 | 2187 | # -*- coding: utf-8 -*-
import logging
from geomsmesh import geompy
import math
from triedreBase import triedreBase
O, OX, OY, OZ = triedreBase()
# -----------------------------------------------------------------------------
# --- operateur de rotation translation d'un objet centré à l'origine
def rotTrans(objet, orientation, point, normal, trace = False):
"""
Déplacement par rotation translation d'un objet centré à l'origine, vers un point de la surface de la pièce saine
dans laquelle on insère le défaut.
@param objet : objet original centré à l'origine (geomObject)
@param orientation : rotation selon OX de l'objet original (degrés)
@param point : le point qui sera le centre de l'objet déplacé (geomObject), en général sur la surface de la pièce saine
@param normal : la normale à la surface de la pièce saine au point central (geomObject)
@return trans : objet transformé (geomObject)
"""
logging.info("start")
planXY = geompy.MakePlaneLCS(None, 2000, 1)
projXY = geompy.MakeProjection(normal, planXY)
[v1,v2] = geompy.ExtractShapes(projXY, geompy.ShapeType["VERTEX"], False)
xyz1 = geompy.PointCoordinates(v1)
xyz2 = geompy.PointCoordinates(v2)
x = xyz2[0] - xyz1[0]
y = xyz2[1] - xyz1[1]
sinalpha = y / math.sqrt(x*x + y*y)
cosalpha = x / math.sqrt(x*x + y*y)
alpha = math.asin(sinalpha)
if cosalpha < 0:
alpha = math.pi -alpha
beta = geompy.GetAngleRadians(OZ, normal)
[v1,v2] = geompy.ExtractShapes(normal, geompy.ShapeType["VERTEX"], False)
xyz1 = geompy.PointCoordinates(v1)
xyz2 = geompy.PointCoordinates(v2)
z = xyz2[2] - xyz1[2]
if z < 0:
beta = math.pi -beta
rot0 = geompy.MakeRotation(objet, OX, orientation*math.pi/180.0)
rot1 = geompy.MakeRotation(rot0, OZ, alpha)
axe2 = geompy.MakeRotation(OY, OZ, alpha)
rot2 = geompy.MakeRotation(rot1, axe2, beta -math.pi/2.)
logging.debug("alpha",alpha)
logging.debug("beta",beta)
if trace:
geompy.addToStudy( rot1, 'rot1' )
geompy.addToStudy( axe2, 'axe2' )
geompy.addToStudy( rot2, 'rot2' )
xyz = geompy.PointCoordinates(point)
trans = geompy.MakeTranslation(rot2, xyz[0], xyz[1], xyz[2])
return trans
| lgpl-2.1 | 1,145,417,926,614,904,200 | 37.035088 | 121 | 0.677122 | false |
Twi/amaya | amaya/base.py | 1 | 8526 | from exceptions import ConnectionError
from ircmess import IRCLine
from select import select
import socket
import ssl
class IRCBot:
"""
An IRCBot is a class that maintains a connection with a remote IRC server
and keeps track of channel members, information about the remote server,
and other things that the protocol gives that users might find useful.
"""
def __init__(self, host, port, ssl=False, nick="AmayaTest1", user="amaya",
gecos="Amaya 0.1", netname="ExampleNet", nickservpass=None,
encoding="UTF-8", sasl=False, debug=False, autojoin=[]):
"""
Args: remote host to connect to, port number to connect to
Keyword args:
- ssl: Whether or not to use SSL for the connection
- nick: nickname of bot
- user: ident the bot uses
- gecos: real name of the bot
- netname: Name of the network you're connecting to
- nickservpass: Password to use for authentication
- encoding: Character encoding to use
- sasl: Whether or not to attempt SASL authentication
"""
# Lots of variables, no way around this.
self.link = socket.socket()
self.link.connect((host, port))
self.__buf = ""
self.host = host
self.ssl = ssl
self.nick = nick
self.user = user
self.gecos = gecos
self.netname = netname
self.nickservpass = nickservpass
self.encoding = encoding
self.sasl = sasl
self.debug = debug
self.autojoin = []
self.servername = ""
self.ircdver = ""
self.snomask = ""
self.loggedinas = ""
self.ircdumodes = []
self.umodes = []
self.channels = {}
self.clients = {} # XXX: Is this a good idea?
self.isupport = {}
if self.ssl:
ssl.wrap_socket(self.link)
# Get a list of IRCv3 CAPs
self.send_line("CAP LS")
# Register with the remote server
self.send_line("NICK %s" % self.nick)
self.send_line("USER {0} {0} {0} :{1}".format(user, gecos))
def send_line(self, line):
"""
Takes in a raw line and sends it to the server. Don't use this without
good reason.
"""
if debug:
print(">>>", line)
self.link.send(bytes("%s\r\n" % line, "UTF-8"))
# The following functions are high level binds to common IRC client commands
def join(self, channel):
"""
Join a channel and set up the appropriate data structures.
"""
self.channels[channel.upper()] = {}
self.send_line("JOIN %s" % channel)
def part(self, channel, reason="Leaving"):
"""
Leave a channel and forget about it.
"""
del self.channels[channel.upper()]
self.send_line("PART %s :%s" % (channel, reason))
def message_like(self, kind, target, message):
"""
NOTICE and PRIVMSG are pretty similar commands. Handle both of them
the same.
"""
if message == "":
message = " "
self.send_line("%s %s :%s" % (kind, target, message))
def notice(self, target, message):
"""
Sends a NOTICE to someone. Please use this over PRIVMSG. Other bots
will not loop.
"""
self.message_like("NOTICE", target, message)
def privmsg(self, target, message):
"""
Sends a PRIVMSG to someone.
"""
self.message_like("PRIVMSG", target, message)
def ping(self, message="Amaya"):
"""
Send a PING to the remote server.
"""
self.send_line("PING :%" % message)
def change_nick(self, nickname):
"""
Request to change nickname
"""
self.expecting_nickchange = True
self.send_line("NICK %s" % nickname)
# Now is select() baggage and the line scraper
def process(self):
"""
Call this function when you have data on the socket.
"""
tbuf = self.link.recv(2048)
tbuf = self.__buf + tbuf.decode('UTF-8')
lines = tbuf.split("\r\n")
self.__buf = lines[-1]
lines = lines[:-1]
for line in lines:
self.process_line(line)
def process_line(self, line):
"""
Take a single line of traffic and process it.
"""
if debug:
print("<<<", line)
line = IRCLine(line)
if line.verb == "PING":
self.send_line("PONG :%s" % line.args[-1])
if hasattr(self, "on_%s" % line.verb):
func = getattr(self, "on_%s" % line.verb)
func(line)
# Base implementation of protocol verbs
# Numerics should be first and in numerical order
def on_001(self, line):
"""
RPL_WELCOME: This numeric is shown on registration. It shows the network
name.
"""
self.netname = line.args[-1].split()[3]
self.ping()
def on_004(self, line):
"""
RPL_MYINFO: This numeric shows the server name, ircd type and version,
as well as user and modes it supports.
"""
self.servername = line.args[0]
self.ircdver = line.args[1]
# Not scraping CMODES out here, 005 gives me a better place to find
# what has what syntax
self.ircdumodes = line.args[3]
# Apparently people care about +B that it's worth just setting it if
# available and not worrying about accidentally breaking some weird
# bot rule.
if "B" in self.ircdumodes:
self.send_line("MODE %s +B" % self.nick)
def on_005(self, line):
"""
RPL_ISUPPORT: Shows things that the server you are connected to supports.
This includes the list of prefixes and in some cases their meaning.
RPL_ISUPPORT strings vary from server to server, so best effort will be
made to support the most common ones, as well as the ones that the testnet
supports.
"""
isupport = line.args[1:]
for supp in isupport:
supp = supp.split("=")
if len(supp) == 1:
self.isupport[supp[0]] = None
else:
self.isupport[supp[0]] = supp[1]
def on_376(self, line):
"""
RPL_ENDMOTD: Sent once the server finishes its motd. Usually, this is
when channel joining is safe. But we are smarter than that, sometimes
servers don't have an MOTD.
"""
pass
def on_433(self, line):
"""
ERR_NICKINUSE: Sent from the server when a client tries to use a
nickname that another client is using. We should append an underscore
to our nick and request nickchange to that.
"""
self.nick += "_"
self.change_nick(self.nick)
def on_900(self, line):
"""
RPL_LOGGEDIN: Sent when the ircd logs you in via services sucessfully.
Some IRC daemons send this twice when you authenticate with sasl, but
other irc daemons only send this once.
"""
pass
# Put named verbs past here
def on_CAP(self, line):
if line.args[1] == "LS":
for cap in line.args[-1].split():
if cap == "sasl":
if self.sasl:
self.send_line("AUTHENTICATE PLAIN")
elif cap == "account-notify":
self.send_line("CAP REQ account-notify")
elif cap == "multi-prefix":
self.send_line("CAP REQ multi-prefix")
if not self.sasl:
self.send_line("CAP END")
def on_ERROR(self, line):
"""
ERROR is sent when the ircd kills off the connection forcibly.
This should error out with something spectacular.
"""
raise ConnectionError(line.args[-1])
def on_NICK(self, line):
"""
The server changed our nickname. If we are not expecting this, change
nickname back.
"""
if not self.expecting_nickchange:
self.change_nick(self.nick)
else:
self.nick = line.args[-1]
def on_PONG(self, line):
"""
The server replied to our PING message.
"""
if line.source == self.servername:
if len(self.channels) == 0:
for channel in self.autojoin:
self.join(channel)
| mit | 6,726,665,231,286,973,000 | 27.51505 | 82 | 0.554891 | false |
deanmalmgren/flo | tests/run.py | 1 | 1071 | #!/usr/bin/env python
"""Run the test suite that is specified in the .travis.yml file
"""
import os
import subprocess
import yaml
from flo.colors import green, red
root_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
def run_test(command):
wrapped_command = "cd %s && %s" % (root_dir, command)
pipe = subprocess.Popen(
wrapped_command, shell=True,
)
pipe.wait()
if pipe.returncode == 0:
print(green("TEST PASSED"))
else:
print(red("TEST FAILED"))
return pipe.returncode
# load the script tests from the .travis.yml file
with open(os.path.join(root_dir, '.travis.yml')) as stream:
travis_yml = yaml.load_all(stream.read())
config = travis_yml.next()
tests = config['script']
# run the tests
if isinstance(tests, (str, unicode)):
returncode = run_test(tests)
elif isinstance(tests, (list, tuple)):
returncode = 0
for test in tests:
returncode += run_test(test)
if returncode == 0:
print(green("ALL TESTS PASSED"))
else:
print(red("SOME TESTS FAILED, SEE ABOVE"))
| mit | -1,789,616,958,793,071,000 | 23.906977 | 70 | 0.65733 | false |
Deledrius/korman | korman/properties/modifiers/region.py | 1 | 15953 | # This file is part of Korman.
#
# Korman is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Korman is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Korman. If not, see <http://www.gnu.org/licenses/>.
import bpy
from bpy.props import *
from PyHSPlasma import *
from ...exporter import ExportError, ExportAssertionError
from ...helpers import TemporaryObject
from ... import idprops
from .base import PlasmaModifierProperties, PlasmaModifierLogicWiz
from ..prop_camera import PlasmaCameraProperties
from .physics import bounds_types
footstep_surface_ids = {
"dirt": 0,
# 1 = NULL
"puddle": 2,
# 3 = tile (NULL in MOUL)
"metal": 4,
"woodbridge": 5,
"rope": 6,
"grass": 7,
# 8 = NULL
"woodfloor": 9,
"rug": 10,
"stone": 11,
# 12 = NULL
# 13 = metal ladder (dupe of metal)
"woodladder": 14,
"water": 15,
# 16 = maintainer's glass (NULL in PotS)
# 17 = maintainer's metal grating (NULL in PotS)
# 18 = swimming (why would you want this?)
}
footstep_surfaces = [("dirt", "Dirt", "Dirt"),
("grass", "Grass", "Grass"),
("metal", "Metal", "Metal Catwalk"),
("puddle", "Puddle", "Shallow Water"),
("rope", "Rope", "Rope Ladder"),
("rug", "Rug", "Carpet Rug"),
("stone", "Stone", "Stone Tile"),
("water", "Water", "Deep Water"),
("woodbridge", "Wood Bridge", "Wood Bridge"),
("woodfloor", "Wood Floor", "Wood Floor"),
("woodladder", "Wood Ladder", "Wood Ladder")]
class PlasmaCameraRegion(PlasmaModifierProperties):
pl_id = "camera_rgn"
bl_category = "Region"
bl_label = "Camera Region"
bl_description = "Camera Region"
bl_icon = "CAMERA_DATA"
camera_type = EnumProperty(name="Camera Type",
description="What kind of camera should be used?",
items=[("auto_follow", "Auto Follow Camera", "Automatically generated follow camera"),
("manual", "Manual Camera", "User specified camera object")],
default="manual",
options=set())
camera_object = PointerProperty(name="Camera",
description="Switches to this camera",
type=bpy.types.Object,
poll=idprops.poll_camera_objects,
options=set())
auto_camera = PointerProperty(type=PlasmaCameraProperties, options=set())
def export(self, exporter, bo, so):
if self.camera_type == "manual":
if self.camera_object is None:
raise ExportError("Camera Modifier '{}' does not specify a valid camera object".format(self.id_data.name))
camera_so_key = exporter.mgr.find_create_key(plSceneObject, bl=self.camera_object)
camera_props = self.camera_object.data.plasma_camera.settings
else:
assert self.camera_type[:4] == "auto"
# Wheedoggy! We get to export the doggone camera now.
camera_props = self.auto_camera
camera_type = self.camera_type[5:]
exporter.camera.export_camera(so, bo, camera_type, camera_props)
camera_so_key = so.key
# Setup physical stuff
phys_mod = bo.plasma_modifiers.collision
simIface, physical = exporter.physics.generate_physical(bo, so, phys_mod.bounds, self.key_name)
physical.memberGroup = plSimDefs.kGroupDetector
physical.reportGroup = 1 << plSimDefs.kGroupAvatar
simIface.setProperty(plSimulationInterface.kPinned, True)
physical.setProperty(plSimulationInterface.kPinned, True)
# I don't feel evil enough to make this generate a logic tree...
msg = plCameraMsg()
msg.BCastFlags |= plMessage.kLocalPropagate | plMessage.kBCastByType
msg.setCmd(plCameraMsg.kRegionPushCamera)
msg.setCmd(plCameraMsg.kSetAsPrimary, camera_props.primary_camera)
msg.newCam = camera_so_key
region = exporter.mgr.find_create_object(plCameraRegionDetector, so=so)
region.addMessage(msg)
def harvest_actors(self):
if self.camera_type == "manual":
if self.camera_object is None:
raise ExportError("Camera Modifier '{}' does not specify a valid camera object".format(self.id_data.name))
camera = self.camera_object.data.plasma_camera.settings
else:
camera = self.auto_camera
return camera.harvest_actors()
class PlasmaFootstepRegion(PlasmaModifierProperties, PlasmaModifierLogicWiz):
pl_id = "footstep"
bl_category = "Region"
bl_label = "Footstep"
bl_description = "Footstep Region"
surface = EnumProperty(name="Surface",
description="What kind of surface are we walking on?",
items=footstep_surfaces,
default="stone")
bounds = EnumProperty(name="Region Bounds",
description="Physical object's bounds",
items=bounds_types,
default="hull")
def export(self, exporter, bo, so):
# Generate the logic nodes now
self.logicwiz(bo)
# Now, export the node tree
self.node_tree.export(exporter, bo, so)
def logicwiz(self, bo):
tree = self.node_tree
nodes = tree.nodes
nodes.clear()
# Region Sensor
volsens = nodes.new("PlasmaVolumeSensorNode")
volsens.name = "RegionSensor"
volsens.region_object = bo
volsens.bounds = self.bounds
volsens.find_input_socket("enter").allow = True
volsens.find_input_socket("exit").allow = True
# Responder
respmod = nodes.new("PlasmaResponderNode")
respmod.name = "Resp"
respmod.link_input(volsens, "satisfies", "condition")
respstate = nodes.new("PlasmaResponderStateNode")
respstate.link_input(respmod, "state_refs", "resp")
# ArmatureEffectStateMsg
msg = nodes.new("PlasmaFootstepSoundMsgNode")
msg.link_input(respstate, "msgs", "sender")
msg.surface = self.surface
@property
def key_name(self):
return "{}_FootRgn".format(self.id_data.name)
class PlasmaPanicLinkRegion(PlasmaModifierProperties):
pl_id = "paniclink"
bl_category = "Region"
bl_label = "Panic Link"
bl_description = "Panic Link Region"
play_anim = BoolProperty(name="Play Animation",
description="Play the link-out animation when panic linking",
default=True)
def export(self, exporter, bo, so):
phys_mod = bo.plasma_modifiers.collision
simIface, physical = exporter.physics.generate_physical(bo, so, phys_mod.bounds, self.key_name)
# Now setup the region detector properties
physical.memberGroup = plSimDefs.kGroupDetector
physical.reportGroup = 1 << plSimDefs.kGroupAvatar
# Finally, the panic link region proper
reg = exporter.mgr.add_object(plPanicLinkRegion, name=self.key_name, so=so)
reg.playLinkOutAnim = self.play_anim
@property
def key_name(self):
return "{}_PanicLinkRgn".format(self.id_data.name)
@property
def requires_actor(self):
return True
class PlasmaSoftVolume(idprops.IDPropMixin, PlasmaModifierProperties):
pl_id = "softvolume"
bl_category = "Region"
bl_label = "Soft Volume"
bl_description = "Soft-Boundary Region"
# Advanced
use_nodes = BoolProperty(name="Use Nodes",
description="Make this a node-based Soft Volume",
default=False)
node_tree = PointerProperty(name="Node Tree",
description="Node Tree detailing soft volume logic",
type=bpy.types.NodeTree)
# Basic
invert = BoolProperty(name="Invert",
description="Invert the soft region")
inside_strength = IntProperty(name="Inside", description="Strength inside the region",
subtype="PERCENTAGE", default=100, min=0, max=100)
outside_strength = IntProperty(name="Outside", description="Strength outside the region",
subtype="PERCENTAGE", default=0, min=0, max=100)
soft_distance = FloatProperty(name="Distance", description="Soft Distance",
default=0.0, min=0.0, max=500.0)
def _apply_settings(self, sv):
sv.insideStrength = self.inside_strength / 100.0
sv.outsideStrength = self.outside_strength / 100.0
def get_key(self, exporter, so=None):
"""Fetches the key appropriate for this Soft Volume"""
if so is None:
so = exporter.mgr.find_create_object(plSceneObject, bl=self.id_data)
if self.use_nodes:
tree = self.get_node_tree()
output = tree.find_output("PlasmaSoftVolumeOutputNode")
if output is None:
raise ExportError("SoftVolume '{}' Node Tree '{}' has no output node!".format(self.key_name, tree.name))
return output.get_key(exporter, so)
else:
pClass = plSoftVolumeInvert if self.invert else plSoftVolumeSimple
return exporter.mgr.find_create_key(pClass, bl=self.id_data, so=so)
def export(self, exporter, bo, so):
if self.use_nodes:
self._export_sv_nodes(exporter, bo, so)
else:
self._export_convex_region(exporter, bo, so)
def _export_convex_region(self, exporter, bo, so):
if bo.type != "MESH":
raise ExportError("SoftVolume '{}': Simple SoftVolumes can only be meshes!".format(bo.name))
# Grab the SoftVolume KO
sv = self.get_key(exporter, so).object
self._apply_settings(sv)
# If "invert" was checked, we got a SoftVolumeInvert, but we need to make a Simple for the
# region data to be exported into..
if isinstance(sv, plSoftVolumeInvert):
svSimple = exporter.mgr.find_create_object(plSoftVolumeSimple, bl=bo, so=so)
self._apply_settings(svSimple)
sv.addSubVolume(svSimple.key)
sv = svSimple
sv.softDist = self.soft_distance
# Initialize the plVolumeIsect. Currently, we only support convex isects. If you want parallel
# isects from empties, be my guest...
with TemporaryObject(bo.to_mesh(bpy.context.scene, True, "RENDER", calc_tessface=False), bpy.data.meshes.remove) as mesh:
mesh.transform(bo.matrix_world)
isect = plConvexIsect()
for i in mesh.vertices:
isect.addPlane(hsVector3(*i.normal), hsVector3(*i.co))
sv.volume = isect
def _export_sv_nodes(self, exporter, bo, so):
tree = self.get_node_tree()
if tree.name not in exporter.node_trees_exported:
exporter.node_trees_exported.add(tree.name)
tree.export(exporter, bo, so)
def get_node_tree(self):
if self.node_tree is None:
raise ExportError("SoftVolume '{}' does not specify a valid Node Tree!".format(self.key_name))
return self.node_tree
@classmethod
def _idprop_mapping(cls):
return {"node_tree": "node_tree_name"}
def _idprop_sources(self):
return {"node_tree_name": bpy.data.node_groups}
class PlasmaSubworldRegion(PlasmaModifierProperties):
pl_id = "subworld_rgn"
bl_category = "Region"
bl_label = "Subworld Region"
bl_description = "Subworld transition region"
subworld = PointerProperty(name="Subworld",
description="Subworld to transition into",
type=bpy.types.Object,
poll=idprops.poll_subworld_objects)
transition = EnumProperty(name="Transition",
description="When to transition to the new subworld",
items=[("enter", "On Enter", "Transition when the avatar enters the region"),
("exit", "On Exit", "Transition when the avatar exits the region")],
default="enter",
options=set())
def export(self, exporter, bo, so):
# Due to the fact that our subworld modifier can produce both RidingAnimatedPhysical
# and [HK|PX]Subworlds depending on the situation, this could get hairy, fast.
# Start by surveying the lay of the land.
from_sub, to_sub = bo.plasma_object.subworld, self.subworld
from_isded = exporter.physics.is_dedicated_subworld(from_sub)
to_isded = exporter.physics.is_dedicated_subworld(to_sub)
if 1:
def get_log_text(bo, isded):
main = "[Main World]" if bo is None else bo.name
sub = "Subworld" if isded or bo is None else "RidingAnimatedPhysical"
return main, sub
from_name, from_type = get_log_text(from_sub, from_isded)
to_name, to_type = get_log_text(to_sub, to_isded)
exporter.report.msg("Transition from '{}' ({}) to '{}' ({})",
from_name, from_type, to_name, to_type,
indent=2)
# I think the best solution here is to not worry about the excitement mentioned above.
# If we encounter anything truly interesting, we can fix it in CWE more easily IMO because
# the game actually knows more about the avatar's state than we do here in the exporter.
if to_isded or (from_isded and to_sub is None):
region = exporter.mgr.find_create_object(plSubworldRegionDetector, so=so)
if to_sub is not None:
region.subworld = exporter.mgr.find_create_key(plSceneObject, bl=to_sub)
region.onExit = self.transition == "exit"
else:
msg = plRideAnimatedPhysMsg()
msg.BCastFlags |= plMessage.kLocalPropagate | plMessage.kPropagateToModifiers
msg.sender = so.key
msg.entering = to_sub is not None
# In Cyan's PlasmaMAX RAP detector, it acts as more of a traditional region
# that changes us over to a dynamic character controller on region enter and
# reverts on region exit. We're going for an approach that is backwards compatible
# with subworlds, so our enter/exit regions are separate. Here, enter/exit message
# corresponds with when we should trigger the transition.
region = exporter.mgr.find_create_object(plRidingAnimatedPhysicalDetector, so=so)
if self.transition == "enter":
region.enterMsg = msg
elif self.transition == "exit":
region.exitMsg = msg
else:
raise ExportAssertionError()
# Fancy pants region collider type shit
simIface, physical = exporter.physics.generate_physical(bo, so, self.id_data.plasma_modifiers.collision.bounds, self.key_name)
physical.memberGroup = plSimDefs.kGroupDetector
physical.reportGroup |= 1 << plSimDefs.kGroupAvatar
| gpl-3.0 | -1,248,840,267,233,175,300 | 41.769437 | 134 | 0.603021 | false |
AuxinJeron/ACS-VRP | src/vrpmain.py | 1 | 1857 | from TsplibParser import parser as tspparser
from ArgParser import parser as argparser
from VRPCenter import VRPCenter
from TspPainter import tspPainter
import logging
# construct the logger
logger = logging.getLogger("logger")
logger.setLevel(logging.INFO)
logFormatter = logging.Formatter("%(asctime)s [%(threadName)s] [%(levelname)s] %(message)s")
consoleHandler = logging.StreamHandler()
consoleHandler.setFormatter(logFormatter)
logger.addHandler(consoleHandler)
def run(tspparser):
center = VRPCenter(tspparser)
logger.info("Nodes: ")
for i in range(1, len(tspparser.cities_coord)):
logger.info("Node " + str(i) + " coordinate is " + str(tspparser.cities_coord[i][0]) + ", " + str(tspparser.cities_coord[i][1]))
tspPainter.coord_mat = tspparser.cities_coord
tspPainter.drawMap()
logger.info("Lockers: ")
for i in range(0, len(tspparser.lockers)):
logger.info(tspparser.lockers[i])
tspPainter.drawLockers(tspparser.lockers)
logger.info("Delivers: ")
for i in range(0, len(tspparser.delivers)):
logger.info(tspparser.delivers[i])
logger.info("Demands: ")
demands = 0
for i in range(0, len(tspparser.demands)):
demands += tspparser.demands[i]
logger.info("Node {} {}".format(i, tspparser.demands[i]))
logger.info("Total demands is: {}".format(demands))
center.start()
def main():
args = argparser.parse_args()
tspparser.read_file(args.tsp_file[0])
logger.info("-------------------------------------------")
logger.info("Problem formulation information")
logger.info("-------------------------------------------")
logger.info("Name: " + tspparser.name)
logger.info("Comment: " + tspparser.comment)
logger.info("Type: " + tspparser.type)
# run vrp center
run(tspparser)
if __name__ == "__main__":
main() | apache-2.0 | 7,920,056,264,688,182,000 | 33.407407 | 136 | 0.649435 | false |
joshu/loan-eval-rabbit | async_logger.py | 1 | 2525 | from pika.adapters.twisted_connection import TwistedProtocolConnection
from pika.connection import ConnectionParameters
from twisted.internet import protocol, reactor, task
from twisted.python import log
import uuid
import json
import os
class Consumer(object):
def on_connected(self, connection):
d = connection.channel()
d.addCallback(self.got_channel)
d.addCallback(self.queue_declared)
d.addCallback(self.queue_bound)
d.addCallback(self.handle_deliveries)
d.addErrback(log.err)
def got_channel(self, channel):
self.channel = channel
return self.channel.queue_declare(exclusive=True)
def queue_declared(self, queue):
self._queue_name = queue.method.queue
self.channel.queue_bind(queue=self._queue_name,
exchange="topic_loan_eval",
routing_key="*.*.*")
def queue_bound(self, ignored):
return self.channel.basic_consume(queue=self._queue_name)
def handle_deliveries(self, queue_and_consumer_tag):
queue, consumer_tag = queue_and_consumer_tag
self.looping_call = task.LoopingCall(self.consume_from_queue, queue)
return self.looping_call.start(0)
def consume_from_queue(self, queue):
d = queue.get()
return d.addCallback(lambda result: self.handle_payload(*result))
def handle_payload(self, channel, method, properties, body):
self.logReceive(method.routing_key,body)
def serviceName(self):
file_name = (os.path.basename(__file__))
return file_name.split('.')[0]
def logReceive(self, routing_key, message):
eval_request = json.loads(message)
# print(message)
uid = uuid.UUID(eval_request["request_id"])
print " [%s] Received |%r|%r|%r" % (self.serviceName(),
str(uid),eval_request["timestamp"],routing_key)
if __name__ == "__main__":
consumer1 = Consumer()
# consumer2 = Consumer()
parameters = ConnectionParameters()
cc = protocol.ClientCreator(reactor,
TwistedProtocolConnection,
parameters)
d1 = cc.connectTCP("localhost", 5672)
d1.addCallback(lambda protocol: protocol.ready)
d1.addCallback(consumer1.on_connected)
d1.addErrback(log.err)
# d2 = cc.connectTCP("localhost", 5672)
# d2.addCallback(lambda protocol: protocol.ready)
# d2.addCallback(consumer2.on_connected)
# d2.addErrback(log.err)
reactor.run()
| mit | -1,238,929,204,795,590,700 | 31.371795 | 76 | 0.645545 | false |
vindimy/altcointip | src/main.py | 1 | 1232 | import cointipbot, traceback, time
class Main():
cb = None
def __init__(self):
'''
Unfortunately, we cannot instantiate CointipBot only once due to the way pifkoin works.
Pifkoin will open an HTTP connection to the RPC client which will eventually close when the bot completes its checks. Therefore, if we try to loop
CointipBot.main() without reinstanting the object, httplib will throw BadStatusLine because the connection is no longer valid and cannot be used.
May make a pull request to pifkoin to resolve this. If this does get resolved in pifkoin or you locally modify your pifkoin copy to resolve this, you can
uncomment the following line and comment the self.cb assignment in main(). This will ensure we do not need to reinstantiate CoinbotTip in every iteration.
'''
# self.cb = cointipbot.CointipBot()
def main(self):
self.cb = cointipbot.CointipBot()
self.cb.main()
def secondary(main):
try:
while True:
main.main();
except:
traceback.print_exc()
print('Resuming in 7 seconds')
time.sleep(7)
print('Resumed')
while True:
main = Main()
secondary(main) | gpl-2.0 | 630,895,149,168,113,300 | 33.25 | 162 | 0.668831 | false |
KeepSafe/translation-real-time-validaton | notifier/sync.py | 1 | 2907 | import asyncio
import logging
from . import const, compare
from .model import *
logger = logging.getLogger(__name__)
def _to_dc_items(wti_items, zendesk_items):
return [DynamicContentItem(key, wti_items.get(key), zendesk_item) for key, zendesk_item in zendesk_items.items()]
async def _get_all_translations(zendesk_dc, wti_client, dc_item, zendesk_locales):
tasks = [
wti_client.string(dc_item.wti_id, locale) for locale in zendesk_locales if locale != zendesk_dc.default_locale
]
data = await asyncio.gather(*tasks)
return [d for d in data if d]
def _default_translation(translations, default_locale):
for translation in translations:
if translation.locale == default_locale:
return translation.text
return ''
async def _update_item(zendesk_dc, wti_client, zendesk_locales, dc_item):
res = False
translations = await _get_all_translations(zendesk_dc, wti_client, dc_item, zendesk_locales)
text = dc_item.zendesk_item.text
if compare.is_different(_default_translation(translations, zendesk_dc.default_locale), text):
logger.info('updating wti item with key:%s', dc_item.key)
await wti_client.update_translation(dc_item.wti_id, text, zendesk_dc.default_locale, translations)
res = True
else:
logger.debug('item with key %s did not change', dc_item.key)
logger.info('updating dynamic content key:%s for locales:%s', dc_item.key,
list(map(lambda i: i.locale, translations)))
await zendesk_dc.update(dc_item, translations, zendesk_locales)
return res
async def _create_item(zendesk_dc, wti_client, zendesk_locales, dc_item):
logger.info('creating new wti item with key:%s', dc_item.key)
await wti_client.create_string(dc_item, zendesk_dc.default_locale)
return True
async def sync_zendesk(app):
zendesk_dc = app[const.ZENDESK_DC]
wti_client = app[const.WTI_DYNAMIC_CONTENT]
stats = app[const.STATS]
wti_items = await wti_client.strings_ids()
if not wti_items:
logger.error('no wti strings found')
return
zendesk_locales = await zendesk_dc.locales()
zendesk_items = await zendesk_dc.items(zendesk_locales)
dc_items = _to_dc_items(wti_items, zendesk_items)
logger.info('get %s items to process', len(dc_items))
stats.increment('sync.tick')
updated_keys = []
for dc_item in dc_items:
if dc_item.wti_id:
res = await _update_item(zendesk_dc, wti_client, zendesk_locales, dc_item)
if res:
updated_keys.append(dc_item.key)
else:
await _create_item(zendesk_dc, wti_client, zendesk_locales, dc_item)
updated_keys.append(dc_item.key)
if updated_keys:
await app[const.SLACK_NOTIFIER].notify(updated_keys)
stats.increment('sync.items', len(updated_keys))
logger.info('done updating content')
| apache-2.0 | 4,294,154,491,269,031,000 | 36.753247 | 118 | 0.675611 | false |
AnyChart/GraphicsJS | build.py | 1 | 14952 | #!/usr/bin/env python
# coding=utf-8
import os
import sys
import subprocess
import urllib
import zipfile
import platform
import shlex
import time
import json
import datetime
# =======================================================================================================================
# Project paths
# =======================================================================================================================
# COMPILER_VERSION = '20161024'
COMPILER_VERSION = '20180204'
PROJECT_PATH = os.path.abspath(os.path.dirname(__file__))
CONTRIB_PATH = os.path.join(PROJECT_PATH, 'contrib')
COMPILER_PATH = os.path.join(CONTRIB_PATH, 'compiler', 'closure-compiler-v%s.jar' % COMPILER_VERSION)
SRC_PATH = os.path.join(PROJECT_PATH, 'src')
OUT_PATH = os.path.join(PROJECT_PATH, 'out')
CLOSURE_LIBRARY_PATH = os.path.join(CONTRIB_PATH, 'closure-library')
CLOSURE_SOURCE_PATH = os.path.join(CLOSURE_LIBRARY_PATH, 'closure', 'goog')
CLOSURE_LINTER_WRAPPER_PATH = os.path.join(CONTRIB_PATH, 'closure-linter-wrapper')
CLOSURE_BIN_PATH = os.path.join(CLOSURE_LIBRARY_PATH, 'closure', 'bin')
DEPS_WRITER_PATH = os.path.join(CLOSURE_BIN_PATH, 'build', 'depswriter.py')
PYTHON = 'python'
# =======================================================================================================================
# Synchronize contributions.
# =======================================================================================================================
def __has_closure_library():
return os.path.exists(CLOSURE_LIBRARY_PATH)
def __has_closure_compiler():
return os.path.exists(COMPILER_PATH)
def __has_closure_linter_wrapper():
return os.path.exists(CLOSURE_LINTER_WRAPPER_PATH)
def __has_closure_linter():
has_lint = True
try:
subprocess.Popen(['gjslint'], shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
except StandardError:
has_lint = False
return has_lint
def __ensure_dir_exists(path):
if not os.path.exists(path):
os.mkdir(path)
def __need_sync_contrib():
return not __has_closure_library() \
or not __has_closure_compiler() \
or not __has_closure_linter_wrapper() \
or not __has_closure_linter()
def __sync_contrib():
t = time.time()
__ensure_dir_exists(CONTRIB_PATH)
subprocess.call(['git', 'submodule', 'init'])
subprocess.call(['git', 'submodule', 'update'])
# Download closure compiler
if not os.path.exists(COMPILER_PATH):
print 'Downloading Google Closure Compiler v.' + COMPILER_VERSION
try:
__download_and_unzip_from_http(
"http://dl.google.com/closure-compiler/compiler-%s.zip" % COMPILER_VERSION,
'compiler'
)
except StandardError as e:
print e
print 'Failed'
return False
# Install closure linter
if not __has_closure_linter():
if not __install_closure_linter():
return False
print 'Environment ready. Time spent: {:.3f}s\n'.format(time.time() - t)
return True
def __download_and_unzip_from_http(from_url, dir_name):
z_obj_path = os.path.join(CONTRIB_PATH, dir_name + '.zip')
# download zip archive from url
if not os.path.exists(z_obj_path):
urllib.urlretrieve(
from_url,
z_obj_path
)
# extract zip archive
target_path = os.path.join(CONTRIB_PATH, dir_name)
__ensure_dir_exists(target_path)
z_obj = zipfile.ZipFile(z_obj_path)
z_obj.extractall(path=target_path)
z_obj.close()
# remove archive file
os.remove(z_obj_path)
return True
def __install_closure_linter():
print 'Installing Google Closure Linter v.2.3.9'
commands = [] if platform.system() == 'Windows' else ['sudo']
commands.append('easy_install')
commands.append('https://closure-linter.googlecode.com/files/closure_linter-2.3.9.tar.gz')
try:
subprocess.call(commands)
except StandardError:
print 'Failed: you should install easy_install module for python first'
return False
print 'Success'
return True
def sync_required(func):
def wrapper():
if __need_sync_contrib():
__sync_contrib()
return func()
return wrapper
# =======================================================================================================================
# Build project
# =======================================================================================================================
def __get_version():
f = open(os.path.join(PROJECT_PATH, 'package.json'));
package_json = json.loads(f.read());
f.close()
return package_json['version']
def __get_file_overview():
return "/**\n * GraphicsJS is a lightweight JavaScript graphics library with an intuitive API, based on SVG/VML technology.\n * Version: %s (%s)\n * License: BSD 3-clause\n * Copyright: AnyChart.com %s. All rights reserved.\n */\n" % (__get_version(), datetime.datetime.now().strftime("%Y-%m-%d"), str(datetime.datetime.now().year))
def __getNotOptimizedCompilerArgs():
compilerArgs = [
'--compilation_level WHITESPACE_ONLY',
'--formatting PRETTY_PRINT'
]
return compilerArgs
def __getOptimizedCompilerArgs():
compilerArgs = [
'--charset UTF-8',
'--compilation_level ADVANCED_OPTIMIZATIONS',
'--process_closure_primitives',
'--language_in ECMASCRIPT3',
'--language_out ECMASCRIPT3',
'--hide_warnings_for "contrib/closure-library"',
'--assume_function_wrapper',
'--use_types_for_optimization true',
'--output_wrapper "' + __get_file_overview() + '(function(){%output%})();"',
'--env BROWSER',
'--extra_annotation_name "includeDoc"',
'--extra_annotation_name "illustration"',
'--extra_annotation_name "illustrationDesc"',
'--extra_annotation_name "ignoreDoc"',
'--extra_annotation_name "propertyDoc"',
'--extra_annotation_name "shortDescription"',
'--warning_level VERBOSE',
'--jscomp_warning accessControls',
'--jscomp_warning ambiguousFunctionDecl',
'--jscomp_warning checkDebuggerStatement',
'--jscomp_warning checkEventfulObjectDisposal',
'--jscomp_warning checkRegExp',
'--jscomp_warning checkTypes',
'--jscomp_warning checkVars',
'--jscomp_warning closureDepMethodUsageChecks',
'--jscomp_warning conformanceViolations',
'--jscomp_warning const',
'--jscomp_warning constantProperty',
'--jscomp_warning deprecated',
'--jscomp_warning deprecatedAnnotations',
'--jscomp_warning duplicate',
'--jscomp_warning duplicateMessage',
'--jscomp_warning es3',
'--jscomp_warning es5Strict',
'--jscomp_warning externsValidation',
'--jscomp_off extraRequire',
'--jscomp_warning fileoverviewTags',
'--jscomp_warning functionParams',
'--jscomp_warning globalThis',
'--jscomp_warning internetExplorerChecks',
'--jscomp_warning invalidCasts',
'--jscomp_warning misplacedTypeAnnotation',
'--jscomp_warning missingGetCssName',
'--jscomp_off missingOverride',
'--jscomp_warning missingPolyfill',
'--jscomp_warning missingProperties',
'--jscomp_warning missingProvide',
'--jscomp_warning missingRequire',
'--jscomp_warning missingReturn',
'--jscomp_warning msgDescriptions',
'--jscomp_off newCheckTypes',
'--jscomp_off newCheckTypesExtraChecks',
'--jscomp_off nonStandardJsDocs',
'--jscomp_off reportUnknownTypes',
'--jscomp_warning suspiciousCode',
'--jscomp_warning strictModuleDepCheck',
'--jscomp_warning typeInvalidation',
'--jscomp_warning undefinedNames',
'--jscomp_warning undefinedVars',
'--jscomp_warning unknownDefines',
'--jscomp_off unusedLocalVariables',
'--jscomp_off unusedPrivateMembers',
'--jscomp_warning uselessCode',
'--jscomp_off useOfGoogBase',
'--jscomp_warning underscore',
'--jscomp_warning visibility',
'--jscomp_warning lintChecks',
]
return compilerArgs
def __getDefaultCompilerArgs(outputFile):
result = [
'java -jar',
COMPILER_PATH,
'--js="%s"' % os.path.join(SRC_PATH, '**.js'),
'--js="%s"' % os.path.join(CLOSURE_SOURCE_PATH, '**.js'),
'--define "goog.DEBUG=false"',
'--js_output_file ' + outputFile,
'--dependency_mode=STRICT',
'--entry_point acgraphentry',
'--hide_warnings_for="goog"'
]
return result
@sync_required
def __compileBinary():
__ensure_dir_exists(OUT_PATH)
t = time.time()
outputFileName = os.path.join(OUT_PATH, 'graphics.min.js')
print 'Building optimized Graphics library js to ' + outputFileName
commands = __getDefaultCompilerArgs(outputFileName) + \
__getOptimizedCompilerArgs()
success = (__call_compiler(commands) == 0)
res = 'Success' if success else 'Failed'
print res + ". Time spent: {:.3f}s\n".format(time.time() - t)
return success
@sync_required
def __compilePlain():
__ensure_dir_exists(OUT_PATH)
t = time.time()
outputFileName = os.path.join(OUT_PATH, 'graphics.js')
print 'Building plain Graphics library js to ' + outputFileName
commands = __getDefaultCompilerArgs(outputFileName) + \
__getNotOptimizedCompilerArgs()
success = (__call_compiler(commands) == 0)
res = 'Success' if success else 'Failed'
print res + ". Time spent: {:.3f}s\n".format(time.time() - t)
return success
def __call_compiler(commands):
commands = " ".join(commands).replace('\\', '\\\\')
commands = shlex.split(commands)
# print commands
p = subprocess.Popen(commands, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
(output, err) = p.communicate()
retcode = p.poll()
if len(output) > 0:
print output
return retcode
# =======================================================================================================================
# Build deps
# =======================================================================================================================
@sync_required
def __buildDepsFromCommandLine():
t = time.time()
output_file = os.path.join(SRC_PATH, 'deps.js')
success = (__callDepsWriter(SRC_PATH, output_file, 'whole project') == 0)
res = 'Success' if success else 'Failed'
print res + ". Time spent: {:.3f}s\n".format(time.time() - t)
return success
def __callDepsWriter(root, output_file, bundle_name):
print 'Writing deps file to ' + output_file
return subprocess.call([
PYTHON,
DEPS_WRITER_PATH,
'--root_with_prefix=' + root + ' ' + os.path.relpath(root, CLOSURE_SOURCE_PATH),
'--output_file=' + output_file
])
# =======================================================================================================================
# Linter.
# =======================================================================================================================
@sync_required
def __lintFromCommandLine():
t = time.time()
success = (__callLinter(SRC_PATH) == 0)
res = 'Success' if success else 'Failed'
print res + ". Time spent: {:.3f}s\n".format(time.time() - t)
return success
def __callLinter(root):
print 'Linting ' + root + ' directory'
return subprocess.call([
PYTHON,
os.path.join(CLOSURE_LINTER_WRAPPER_PATH, 'gjslint.py'),
'--flagfile',
'gjslint.cfg',
'-r',
root
])
# =======================================================================================================================
# JSDoc auto fix.
# =======================================================================================================================
@sync_required
def __autofixFromCommandLine():
t = time.time()
success = (__callAutoFix(SRC_PATH) == 0)
res = 'Success' if success else 'Failed'
print res + ". Time spent: {:.3f}s\n".format(time.time() - t)
return res
def __callAutoFix(root):
print 'Trying to fix ' + root + ' directory'
return subprocess.call([
PYTHON,
os.path.join(CLOSURE_LINTER_WRAPPER_PATH, 'fixjsstyle.py'),
'--flagfile',
'gjslint.cfg',
'-r',
root
])
# =======================================================================================================================
# Help
# =======================================================================================================================
def __printHelp():
print "Build script commands:\n" \
"\n" \
"without params Prepares the environment, than lints and builds everything.\n" \
"\n" \
"contrib Prepares buildin environment.\n" \
"\n" \
"deps Build ./src/deps.js file, needed to run the library in uncompiled mode.\n" \
"\n" \
"compile Builds the library minified js to ./out/ directory.\n" \
"\n" \
"plain Builds the library as one file pretty-printed js to ./out/ directory.\n" \
"\n" \
"lint Lints library sources.\n" \
"\n" \
"autofix Tries to fix lint errors in library sources.\n"
# =======================================================================================================================
# Main
# =======================================================================================================================
def __execMainScript():
print ''
args = sys.argv
if len(args) == 1:
success = __sync_contrib() and \
__lintFromCommandLine() and \
__buildDepsFromCommandLine() and \
__compilePlain() and \
__compileBinary()
elif args[1] == 'contrib':
success = __sync_contrib()
elif args[1] == 'compile':
success = __compileBinary()
elif args[1] == 'plain':
success = __compilePlain()
elif args[1] == 'deps':
success = __buildDepsFromCommandLine()
elif args[1] == 'lint':
success = __lintFromCommandLine()
elif args[1] == 'autofix':
success = __autofixFromCommandLine()
else:
__printHelp()
success = True
return success
if __name__ == '__main__':
try:
success = __execMainScript()
except StandardError as e:
print e
success = False
sys.exit(0 if success else 1)
| bsd-3-clause | -1,710,062,974,786,245,600 | 33.451613 | 336 | 0.524144 | false |
evanhenri/memfog | src/database.py | 1 | 1615 | from sqlalchemy import Column, Integer, String, Text, create_engine
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker
Base = declarative_base()
class Database:
def __init__(self, db_fp):
# Create an engine that stores data in db found at db_path
engine = create_engine('sqlite:///{}'.format(db_fp))
# Create all tables in the engine
Base.metadata.create_all(engine)
DBSession = sessionmaker(bind=engine)
self.session = DBSession()
def bulk_insert(self, context):
self.session.bulk_save_objects(context.record)
self.session.commit()
def insert(self, context):
self.session.add(context.record)
self.session.commit()
def delete(self, context):
self.session.query(RecordMap).filter_by(row_id=context.record.row_id).delete()
self.session.commit()
def update(self, context):
fields = { k:v for k,v in vars(context.record).items() if k in context.altered_fields }
if len(fields) > 0:
self.session.query(RecordMap).filter_by(row_id=context.record.row_id).update(fields)
self.session.commit()
class RecordMap(Base):
__tablename__ = 'record'
row_id = Column('row_id', Integer, primary_key=True)
title = Column('title', String, nullable=False)
keywords = Column('keywords', String)
body = Column('body', Text)
def __init__(self, row_id=None, title='', keywords='', body=''):
self.row_id = row_id
self.title = title
self.keywords = keywords
self.body = body
| mit | -738,783,469,334,043,900 | 32.645833 | 96 | 0.643344 | false |
jeremydw/google-apputils-python | tests/datelib_unittest.py | 1 | 8310 | #!/usr/bin/env python
# Copyright 2002 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Unittest for datelib.py module."""
import datetime
import random
import time
import pytz
from google_apputils import basetest
from google_apputils import datelib
class TimestampUnitTest(basetest.TestCase):
seed = 1979
def testTzAwareSuccession(self):
a = datelib.Timestamp.now()
b = datelib.Timestamp.utcnow()
self.assertLessEqual(a, b)
def testTzRandomConversion(self):
random.seed(self.seed)
for unused_i in xrange(100):
stz = pytz.timezone(random.choice(pytz.all_timezones))
a = datelib.Timestamp.FromString('2008-04-12T10:00:00', stz)
b = a
for unused_j in xrange(100):
b = b.astimezone(pytz.timezone(random.choice(pytz.all_timezones)))
self.assertEqual(a, b)
random.seed()
def testMicroTimestampConversion(self):
"""Test that f1(f2(a)) == a."""
def IsEq(x):
self.assertEqual(
x, datelib.Timestamp.FromMicroTimestamp(x).AsMicroTimestamp())
IsEq(0)
IsEq(datelib.MAXIMUM_MICROSECOND_TIMESTAMP)
random.seed(self.seed)
for _ in xrange(100):
IsEq(random.randint(0, datelib.MAXIMUM_MICROSECOND_TIMESTAMP))
def testMicroTimestampKnown(self):
self.assertEqual(0, datelib.Timestamp.FromString(
'1970-01-01T00:00:00', pytz.UTC).AsMicroTimestamp())
self.assertEqual(
datelib.MAXIMUM_MICROSECOND_TIMESTAMP,
datelib.MAXIMUM_MICROSECOND_TIMESTAMP_AS_TS.AsMicroTimestamp())
def testMicroTimestampOrdering(self):
"""Test that cmp(a, b) == cmp(f1(a), f1(b))."""
def IsEq(a, b):
self.assertEqual(
cmp(a, b),
cmp(datelib.Timestamp.FromMicroTimestamp(a),
datelib.Timestamp.FromMicroTimestamp(b)))
random.seed(self.seed)
for unused_i in xrange(100):
IsEq(
random.randint(0, datelib.MAXIMUM_MICROSECOND_TIMESTAMP),
random.randint(0, datelib.MAXIMUM_MICROSECOND_TIMESTAMP))
def testCombine(self):
for tz in (datelib.UTC, datelib.US_PACIFIC):
self.assertEqual(
datelib.Timestamp(1970, 1, 1, 0, 0, 0, 0, tz),
datelib.Timestamp.combine(
datelib.datetime.date(1970, 1, 1),
datelib.datetime.time(0, 0, 0),
tz))
self.assertEqual(
datelib.Timestamp(9998, 12, 31, 23, 59, 59, 999999, tz),
datelib.Timestamp.combine(
datelib.datetime.date(9998, 12, 31),
datelib.datetime.time(23, 59, 59, 999999),
tz))
def testFromString1(self):
for string_zero in (
'1970-01-01 00:00:00',
'19700101T000000',
'1970-01-01T00:00:00'
):
for testtz in (datelib.UTC, datelib.US_PACIFIC):
self.assertEqual(
datelib.Timestamp.FromString(string_zero, testtz),
datelib.Timestamp(1970, 1, 1, 0, 0, 0, 0, testtz))
self.assertEqual(
datelib.Timestamp.FromString(
'1970-01-01T00:00:00+0000', datelib.US_PACIFIC),
datelib.Timestamp(1970, 1, 1, 0, 0, 0, 0, datelib.UTC))
startdate = datelib.Timestamp(2009, 1, 1, 3, 0, 0, 0, datelib.US_PACIFIC)
for day in xrange(1, 366):
self.assertEqual(
datelib.Timestamp.FromString(startdate.isoformat()),
startdate,
'FromString works for day %d since 2009-01-01' % day)
startdate += datelib.datetime.timedelta(days=1)
def testFromString2(self):
"""Test correctness of parsing the local time in a given timezone.
The result shall always be the same as tz.localize(naive_time).
"""
baseday = datelib.datetime.date(2009, 1, 1).toordinal()
for day_offset in xrange(0, 365):
day = datelib.datetime.date.fromordinal(baseday + day_offset)
naive_day = datelib.datetime.datetime.combine(
day, datelib.datetime.time(0, 45, 9))
naive_day_str = naive_day.strftime('%Y-%m-%dT%H:%M:%S')
self.assertEqual(
datelib.US_PACIFIC.localize(naive_day),
datelib.Timestamp.FromString(naive_day_str, tz=datelib.US_PACIFIC),
'FromString localizes time incorrectly')
def testFromStringInterval(self):
expected_date = datetime.datetime.utcnow() - datetime.timedelta(days=1)
expected_s = time.mktime(expected_date.utctimetuple())
actual_date = datelib.Timestamp.FromString('1d')
actual_s = time.mktime(actual_date.timetuple())
diff_seconds = actual_s - expected_s
self.assertBetween(diff_seconds, 0, 1)
self.assertRaises(
datelib.TimeParseError, datelib.Timestamp.FromString, 'wat')
def _EpochToDatetime(t, tz=None):
if tz is not None:
return datelib.datetime.datetime.fromtimestamp(t, tz)
else:
return datelib.datetime.datetime.utcfromtimestamp(t)
class DatetimeConversionUnitTest(basetest.TestCase):
def setUp(self):
self.pst = pytz.timezone('US/Pacific')
self.utc = pytz.utc
self.now = time.time()
def testDatetimeToUTCMicros(self):
self.assertEqual(
0, datelib.DatetimeToUTCMicros(_EpochToDatetime(0)))
self.assertEqual(
1001 * long(datelib._MICROSECONDS_PER_SECOND),
datelib.DatetimeToUTCMicros(_EpochToDatetime(1001)))
self.assertEqual(long(self.now * datelib._MICROSECONDS_PER_SECOND),
datelib.DatetimeToUTCMicros(_EpochToDatetime(self.now)))
# tzinfo shouldn't change the result
self.assertEqual(
0, datelib.DatetimeToUTCMicros(_EpochToDatetime(0, tz=self.pst)))
def testDatetimeToUTCMillis(self):
self.assertEqual(
0, datelib.DatetimeToUTCMillis(_EpochToDatetime(0)))
self.assertEqual(
1001 * 1000L, datelib.DatetimeToUTCMillis(_EpochToDatetime(1001)))
self.assertEqual(long(self.now * 1000),
datelib.DatetimeToUTCMillis(_EpochToDatetime(self.now)))
# tzinfo shouldn't change the result
self.assertEqual(
0, datelib.DatetimeToUTCMillis(_EpochToDatetime(0, tz=self.pst)))
def testUTCMicrosToDatetime(self):
self.assertEqual(_EpochToDatetime(0), datelib.UTCMicrosToDatetime(0))
self.assertEqual(_EpochToDatetime(1.000001),
datelib.UTCMicrosToDatetime(1000001))
self.assertEqual(_EpochToDatetime(self.now), datelib.UTCMicrosToDatetime(
long(self.now * datelib._MICROSECONDS_PER_SECOND)))
# Check timezone-aware comparisons
self.assertEqual(_EpochToDatetime(0, self.pst),
datelib.UTCMicrosToDatetime(0, tz=self.pst))
self.assertEqual(_EpochToDatetime(0, self.pst),
datelib.UTCMicrosToDatetime(0, tz=self.utc))
def testUTCMillisToDatetime(self):
self.assertEqual(_EpochToDatetime(0), datelib.UTCMillisToDatetime(0))
self.assertEqual(_EpochToDatetime(1.001), datelib.UTCMillisToDatetime(1001))
t = time.time()
dt = _EpochToDatetime(t)
# truncate sub-milli time
dt -= datelib.datetime.timedelta(microseconds=dt.microsecond % 1000)
self.assertEqual(dt, datelib.UTCMillisToDatetime(long(t * 1000)))
# Check timezone-aware comparisons
self.assertEqual(_EpochToDatetime(0, self.pst),
datelib.UTCMillisToDatetime(0, tz=self.pst))
self.assertEqual(_EpochToDatetime(0, self.pst),
datelib.UTCMillisToDatetime(0, tz=self.utc))
class MicrosecondsToSecondsUnitTest(basetest.TestCase):
def testConversionFromMicrosecondsToSeconds(self):
self.assertEqual(0.0, datelib.MicrosecondsToSeconds(0))
self.assertEqual(7.0, datelib.MicrosecondsToSeconds(7000000))
self.assertEqual(1.234567, datelib.MicrosecondsToSeconds(1234567))
self.assertEqual(12345654321.123456,
datelib.MicrosecondsToSeconds(12345654321123456))
if __name__ == '__main__':
basetest.main()
| apache-2.0 | 4,097,536,020,298,167,300 | 34.512821 | 80 | 0.676775 | false |
qbeenslee/Nepenthes-Server | data/db.py | 1 | 8333 | # coding:utf-8
'''
数据库
Author : qbeenslee
Created : 2014/10/10
'''
import time
import datetime
import sqlalchemy
from sqlalchemy import *
from sqlalchemy.orm import sessionmaker, relationship
from config import setting
from data.base_clazz import Base
def get_db():
'''
获取操作对象集合
:return:
'''
engine = sqlalchemy.create_engine(setting.DB_CONNECT_STRING, echo=setting.DEBUG)
Session = sessionmaker(bind=engine)
session = Session()
db = {'engine': engine, 'session': session}
return db
def get_session():
engine = sqlalchemy.create_engine(setting.DB_CONNECT_STRING, echo=setting.DEBUG)
Session = sessionmaker(bind=engine)
session = Session()
return session
class User(Base):
'''
用户信息
'''
__tablename__ = 'NEP_USER'
uid = Column(String(10), primary_key=True) # 用户ID永远不会变(由机器产生)
imei = Column(String(17), nullable=False) # 手机IEMI (一部手机只能注册一个账号)
nickname = Column(String(20), nullable=True, unique=True) # 用户名
email = Column(String(50), nullable=False, unique=True) # 邮箱
motto = Column(String(50), nullable=True) # 个性签名
wallpaper = Column(String(100), nullable=True) # 用户墙纸
avatar = Column(String(100), nullable=True) # 用户头像
verify_status = Column(Integer, nullable=True, default=0) # 验证状态
level = Column(Integer, nullable=True, default=0) # 等级
is_active = Column(Boolean, nullable=False, default=True) # 账户是否可用
last_loggin = Column(REAL, nullable=True) # 最后登录时间
join_datetime = Column(DateTime, nullable=False) # 注册时间
join_time = Column(REAL, nullable=False) # 注册时间
password = Column(String(60), nullable=False) # 格式化密码
class Log(Base):
'''
记录用户操作
'''
__tablename__ = 'NEP_LOG'
oid = Column(Integer, primary_key=True, autoincrement=True) # 操作ID自增长
master = Column(ForeignKey(User.uid), nullable=False) # 操作产生对象ID
otp = Column(Integer, nullable=True) # 操作类型
remark = Column(String(200), nullable=False) # 备注
log_time = Column(REAL, nullable=False)
class EmailVerify(Base):
'''
邮箱验证码存放
'''
__tablename__ = 'NEP_ENAIL_VERIFY'
vcode = Column(String(20), primary_key=True) # 验证码
invalid_time = Column(REAL, nullable=False) # 失效时间
master = Column(ForeignKey(User.uid), nullable=False) # 作用于的用户
class LoginVerify(Base):
'''
登录状态验证码存放
'''
__tablename__ = 'NEP_LOGIN_VERIFY'
token = Column(String(20), primary_key=True) # 效验码
master = Column(ForeignKey(User.uid), nullable=False) # 作用于的用户
invalid_time = Column(REAL, nullable=False) # 失效时长, default=now() + 604800.0
client = Column(String(20), nullable=True, unique=False) # 客户端代号
imei = Column(String(10), nullable=True, unique=True) # 一个加密密码只能用于一台手机登录
phone_model = Column(String(30), nullable=True) # 手机型号
class Share(Base):
'''
图片分享
'''
__tablename__ = 'NEP_SHARE'
sid = Column(String(20), primary_key=True) # 分享链接
master = Column(ForeignKey(User.uid), nullable=False) # poster ID
submit_time = Column(REAL, nullable=False) # 提交时间
change_time = Column(REAL, nullable=True) # 发生变化的时间
status_code = Column(Integer, nullable=True, default=0) # 分享状态(是否被禁止:0正常;1+不正常)
see_count = Column(Integer, nullable=True, default=0) # 被查看次数(热度)
cmt_count = Column(Integer, nullable=True, default=0) # 被评论次数
img_hash = Column(String(20), nullable=False) # 图片hash地址
img_type = Column(Integer, nullable=True, default=0) # 图片post类型(照相机原图,照相机美化图,相册图)
description = Column(String(140), nullable=True) # 描述
dsp_type = Column(Integer, nullable=True) # 描述类型( 来源于分享,or来源于系统)
subject = Column(String(20), nullable=True, default=u"分享") # 主题
hide_location = Column(Boolean, nullable=True, default=True) # 隐藏位置
place_name = Column(String(120), nullable=True) # 地点名称
img_width = Column(Integer, nullable=True, default=0) # 图片宽
img_height = Column(Integer, nullable=True, default=0) # 图片宽
praise_count = Column(Integer, nullable=True, default=0) # 赞 数量
is_deleted = Column(Boolean, nullable=True, default=False) # 删除标记
# children_1 = relationship("NEP_COMMENT", cascade="all, delete-orphan", passive_deletes=True)
# children_2 = relationship("NEP_FAVORITE", cascade="all, delete-orphan", passive_deletes=True)
# children_3 = relationship("NEP_PRAISE", cascade="all, delete-orphan", passive_deletes=True)
class Constact(Base):
'''
联系人备份(用于以后备份)
'''
__tablename__ = 'NEP_CONSTACT'
cid = Column(String(20), primary_key=True)
master = Column(ForeignKey(User.uid), nullable=False) # 机主ID
phone = Column(String(15), primary_key=True) # 手机号
name = Column(String(30), nullable=False) # 联系人名称/备注
class Location(Base):
'''
地理位置信息
(先时间后距离排序,距离加权重)
'''
__tablename__ = 'NEP_LOCATION'
sid = Column(String(20), primary_key=True) # 分享链接
master = Column(ForeignKey(User.uid), nullable=False) # po主ID
geohash = Column(String(100), nullable=True) # geo hash
ltime = Column(REAL, nullable=False) # po出时间
latitude = Column(String(10)) # north latitude北纬
longitude = Column(String(10)) # east longitude 东经
radius = Column(REAL, nullable=True, default=0.0) # 精度范围
level = Column(Integer, nullable=True) # 精度等级
class Comment(Base):
'''
评论
'''
__tablename__ = 'NEP_COMMENT'
cid = Column(String(20), primary_key=True) # 评论
sid = Column(ForeignKey(Share.sid, ondelete="CASCADE"), nullable=False) # Share ID
master = Column(ForeignKey(User.uid), nullable=False) # po主ID
status_code = Column(Integer, nullable=true) # 评论状态(是否被禁止)
index_code = Column(Integer, nullable=False) # 评论的次序
content = Column(String(140)) # 评论内容
submit_time = Column(REAL, nullable=False) # 提交时间
class Favorite(Base):
'''
分享收藏
'''
__tablename__ = 'NEP_FAVORITE'
fid = Column(String(20), primary_key=True) # 收藏序号
sid = Column(ForeignKey(Share.sid, ondelete="CASCADE"), nullable=False) # 分享链接
master = Column(ForeignKey(User.uid), nullable=False) # 收藏者ID
note = Column(String(140)) # 用户收藏时的标注或者分享描述
submit_time = Column(REAL, nullable=False) # 收藏时间
class Praise(Base):
'''
赞
'''
__tablename__ = 'NEP_PRAISE'
pid = Column(String(20), primary_key=True)
master = Column(ForeignKey(User.uid), nullable=False)
sid = Column(ForeignKey(Share.sid, ondelete="CASCADE"), nullable=False) # Share ID
submit_time = Column(REAL, nullable=False) # 提交时间
class Feedback(Base):
'''
反馈信息
'''
__tablename__ = 'NEP_FEEDBACK'
fid = Column(String(20), primary_key=True)
master = Column(ForeignKey(User.uid), nullable=False) # po主ID
ftime = Column(REAL, nullable=False) # po出时间
msg = Column(String(2000), nullable=False) # 反馈的内容,最大一千字
Warning_level = Column(String(20), nullable=True) # 警告类型
def create_db():
'''
首次运行创建db文件
'''
# os.remove(settings.DB_PATH)
# Base.metadata.create_all(engine)
engine = get_db()['engine']
Base.metadata.create_all(engine)
def test():
session = get_db()['session']
session().add(LoginVerify(master='dd', token='dd'))
if __name__ == '__main__':
create_db()
| gpl-3.0 | 5,364,762,305,518,218,000 | 32.458333 | 99 | 0.634422 | false |
akarol/cfme_tests | cfme/physical/physical_server.py | 1 | 11597 | # -*- coding: utf-8 -*-
"""A model of an Infrastructure PhysicalServer in CFME."""
import attr
from navmazing import NavigateToSibling, NavigateToAttribute
from cached_property import cached_property
from wrapanapi.lenovo import LenovoSystem
from cfme.common import PolicyProfileAssignable, WidgetasticTaggable
from cfme.common.physical_server_views import (
PhysicalServerDetailsView,
PhysicalServerManagePoliciesView,
PhysicalServersView,
PhysicalServerTimelinesView
)
from cfme.exceptions import (
ItemNotFound,
StatsDoNotMatch,
HostStatsNotContains,
ProviderHasNoProperty
)
from cfme.modeling.base import BaseEntity, BaseCollection
from cfme.utils.appliance.implementations.ui import CFMENavigateStep, navigate_to, navigator
from cfme.utils.log import logger
from cfme.utils.pretty import Pretty
from cfme.utils.providers import get_crud_by_name
from cfme.utils.update import Updateable
from cfme.utils.varmeth import variable
from cfme.utils.wait import wait_for
@attr.s
class PhysicalServer(BaseEntity, Updateable, Pretty, PolicyProfileAssignable, WidgetasticTaggable):
"""Model of an Physical Server in cfme.
Args:
name: Name of the physical server.
hostname: hostname of the physical server.
ip_address: The IP address as a string.
custom_ident: The custom identifiter.
Usage:
myhost = PhysicalServer(name='vmware')
myhost.create()
"""
pretty_attrs = ['name', 'hostname', 'ip_address', 'custom_ident']
name = attr.ib()
provider = attr.ib(default=None)
hostname = attr.ib(default=None)
ip_address = attr.ib(default=None)
custom_ident = attr.ib(default=None)
db_id = None
mgmt_class = LenovoSystem
INVENTORY_TO_MATCH = ['power_state']
STATS_TO_MATCH = ['cores_capacity', 'memory_capacity']
def load_details(self, refresh=False):
"""To be compatible with the Taggable and PolicyProfileAssignable mixins.
Args:
refresh (bool): Whether to perform the page refresh, defaults to False
"""
view = navigate_to(self, "Details")
if refresh:
view.browser.refresh()
view.flush_widget_cache()
def execute_button(self, button_group, button, handle_alert=False):
view = navigate_to(self, "Details")
view.toolbar.custom_button(button_group).item_select(button, handle_alert=handle_alert)
def power_on(self):
view = navigate_to(self, "Details")
view.toolbar.power.item_select("Power On", handle_alert=True)
def power_off(self):
view = navigate_to(self, "Details")
view.toolbar.power.item_select("Power Off", handle_alert=True)
@variable(alias='ui')
def power_state(self):
view = navigate_to(self, "Details")
return view.entities.summary("Power Management").get_text_of("Power State")
@variable(alias='ui')
def cores_capacity(self):
view = navigate_to(self, "Details")
return view.entities.summary("Properties").get_text_of("CPU total cores")
@variable(alias='ui')
def memory_capacity(self):
view = navigate_to(self, "Details")
return view.entities.summary("Properties").get_text_of("Total memory (mb)")
def refresh(self, cancel=False):
"""Perform 'Refresh Relationships and Power States' for the server.
Args:
cancel (bool): Whether the action should be cancelled, default to False
"""
view = navigate_to(self, "Details")
view.toolbar.configuration.item_select("Refresh Relationships and Power States",
handle_alert=cancel)
def wait_for_physical_server_state_change(self, desired_state, timeout=300):
"""Wait for PhysicalServer to come to desired state. This function waits just the needed amount of
time thanks to wait_for.
Args:
desired_state (str): 'on' or 'off'
timeout (int): Specify amount of time (in seconds) to wait until TimedOutError is raised
"""
view = navigate_to(self.parent, "All")
def _looking_for_state_change():
entity = view.entities.get_entity(name=self.name)
return "currentstate-{}".format(desired_state) in entity.data['state']
wait_for(_looking_for_state_change, fail_func=view.browser.refresh, num_sec=timeout)
@property
def exists(self):
"""Checks if the physical_server exists in the UI.
Returns: :py:class:`bool`
"""
view = navigate_to(self.parent, "All")
try:
view.entities.get_entity(name=self.name, surf_pages=True)
except ItemNotFound:
return False
else:
return True
@cached_property
def get_db_id(self):
if self.db_id is None:
self.db_id = self.appliance.physical_server_id(self.name)
return self.db_id
else:
return self.db_id
def wait_to_appear(self):
"""Waits for the server to appear in the UI."""
view = navigate_to(self.parent, "All")
logger.info("Waiting for the server to appear...")
wait_for(
lambda: self.exists,
message="Wait for the server to appear",
num_sec=1000,
fail_func=view.browser.refresh
)
def wait_for_delete(self):
"""Waits for the server to remove from the UI."""
view = navigate_to(self.parent, "All")
logger.info("Waiting for the server to delete...")
wait_for(
lambda: not self.exists,
message="Wait for the server to disappear",
num_sec=500,
fail_func=view.browser.refresh
)
def validate_stats(self, ui=False):
""" Validates that the detail page matches the physical server's information.
This method logs into the provider using the mgmt_system interface and collects
a set of statistics to be matched against the UI. An exception will be raised
if the stats retrieved from the UI do not match those retrieved from wrapanapi.
"""
# Make sure we are on the physical server detail page
if ui:
self.load_details()
# Retrieve the client and the stats and inventory to match
client = self.provider.mgmt
stats_to_match = self.STATS_TO_MATCH
inventory_to_match = self.INVENTORY_TO_MATCH
# Retrieve the stats and inventory from wrapanapi
server_stats = client.stats(*stats_to_match, requester=self)
server_inventory = client.inventory(*inventory_to_match, requester=self)
# Refresh the browser
if ui:
self.browser.selenium.refresh()
# Verify that the stats retrieved from wrapanapi match those retrieved
# from the UI
for stat in stats_to_match:
try:
cfme_stat = int(getattr(self, stat)(method='ui' if ui else None))
server_stat = int(server_stats[stat])
if server_stat != cfme_stat:
msg = "The {} stat does not match. (server: {}, server stat: {}, cfme stat: {})"
raise StatsDoNotMatch(msg.format(stat, self.name, server_stat, cfme_stat))
except KeyError:
raise HostStatsNotContains(
"Server stats information does not contain '{}'".format(stat))
except AttributeError:
raise ProviderHasNoProperty("Provider does not know how to get '{}'".format(stat))
# Verify that the inventory retrieved from wrapanapi match those retrieved
# from the UI
for inventory in inventory_to_match:
try:
cfme_inventory = getattr(self, inventory)(method='ui' if ui else None)
server_inventory = server_inventory[inventory]
if server_inventory != cfme_inventory:
msg = "The {} inventory does not match. (server: {}, server inventory: {}, " \
"cfme inventory: {})"
raise StatsDoNotMatch(msg.format(inventory, self.name, server_inventory,
cfme_inventory))
except KeyError:
raise HostStatsNotContains(
"Server inventory information does not contain '{}'".format(inventory))
except AttributeError:
msg = "Provider does not know how to get '{}'"
raise ProviderHasNoProperty(msg.format(inventory))
@attr.s
class PhysicalServerCollection(BaseCollection):
"""Collection object for the :py:class:`cfme.infrastructure.host.PhysicalServer`."""
ENTITY = PhysicalServer
def select_entity_rows(self, physical_servers):
""" Select all physical server objects """
physical_servers = list(physical_servers)
checked_physical_servers = list()
view = navigate_to(self, 'All')
for physical_server in physical_servers:
view.entities.get_entity(name=physical_server.name, surf_pages=True).check()
checked_physical_servers.append(physical_server)
return view
def all(self, provider):
"""returning all physical_servers objects"""
physical_server_table = self.appliance.db.client['physical_servers']
ems_table = self.appliance.db.client['ext_management_systems']
physical_server_query = (
self.appliance.db.client.session
.query(physical_server_table.name, ems_table.name)
.join(ems_table, physical_server_table.ems_id == ems_table.id))
provider = None
if self.filters.get('provider'):
provider = self.filters.get('provider')
physical_server_query = physical_server_query.filter(ems_table.name == provider.name)
physical_servers = []
for name, ems_name in physical_server_query.all():
physical_servers.append(self.instantiate(name=name,
provider=provider or get_crud_by_name(ems_name)))
return physical_servers
def power_on(self, *physical_servers):
view = self.select_entity_rows(physical_servers)
view.toolbar.power.item_select("Power On", handle_alert=True)
def power_off(self, *physical_servers):
view = self.select_entity_rows(physical_servers)
view.toolbar.power.item_select("Power Off", handle_alert=True)
@navigator.register(PhysicalServerCollection)
class All(CFMENavigateStep):
VIEW = PhysicalServersView
prerequisite = NavigateToAttribute("appliance.server", "LoggedIn")
def step(self):
self.prerequisite_view.navigation.select("Compute", "Physical Infrastructure", "Servers")
@navigator.register(PhysicalServer)
class Details(CFMENavigateStep):
VIEW = PhysicalServerDetailsView
prerequisite = NavigateToAttribute("parent", "All")
def step(self):
self.prerequisite_view.entities.get_entity(name=self.obj.name, surf_pages=True).click()
@navigator.register(PhysicalServer)
class ManagePolicies(CFMENavigateStep):
VIEW = PhysicalServerManagePoliciesView
prerequisite = NavigateToSibling("Details")
def step(self):
self.prerequisite_view.toolbar.policy.item_select("Manage Policies")
@navigator.register(PhysicalServer)
class Timelines(CFMENavigateStep):
VIEW = PhysicalServerTimelinesView
prerequisite = NavigateToSibling("Details")
def step(self):
self.prerequisite_view.toolbar.monitoring.item_select("Timelines")
| gpl-2.0 | -3,648,648,376,430,438,400 | 36.898693 | 106 | 0.644218 | false |
tadams42/sokoenginepy | tests/board/hexoban_board_spec.py | 1 | 16599 | import os
import pytest
from sokoenginepy import BoardConversionError, HexobanBoard, PuzzlesCollection
from ..test_helpers import TEST_RESOURCES_ROOT
from .autogenerated_board import HexobanBoardAutogeneratedSpecMixin
def load_parser_test_data():
input_file = os.path.join(
TEST_RESOURCES_ROOT, 'test_data', 'hexoban_parser_tests.sok'
)
collection = PuzzlesCollection()
collection.load(input_file)
retv = {}
for puzzle in collection.puzzles:
if puzzle.title == "row_added_type1_top" or puzzle.title == "row_added_type2_top":
puzzle.board = '-' + puzzle.board[1:]
if puzzle.title == "row_added_type1_bottom" or puzzle.title == "row_added_type2_bottom":
puzzle.board = puzzle.board[:-2] + '-' + puzzle.board[-1]
retv[puzzle.title] = puzzle.board.rstrip()
return retv
TEST_BOARDS = load_parser_test_data()
class DescribeHexobanBoard(HexobanBoardAutogeneratedSpecMixin):
def it_raises_on_illegal_scheme(self):
input = TEST_BOARDS['illegal_scheme1']
with pytest.raises(BoardConversionError):
HexobanBoard(board_str=input)
input = TEST_BOARDS['illegal_scheme2']
with pytest.raises(BoardConversionError):
HexobanBoard(board_str=input)
class describe_parser_tests:
def perform_parser_test(self, input, result, out_width, out_height):
board = HexobanBoard(board_str=input)
assert board.to_str(use_visible_floor=True) == result
assert board == HexobanBoard(board_str=result)
assert str(board) == str(HexobanBoard(board_str=result))
assert board.width == out_width
assert board.height == out_height
def it_parses_scheme1_type1(self):
self.perform_parser_test(
input=TEST_BOARDS['input_scheme1_type1'],
result=TEST_BOARDS['output_scheme1_type1'],
out_width=10,
out_height=7
)
def it_parses_scheme2_type1(self):
self.perform_parser_test(
input=TEST_BOARDS['input_scheme2_type1'],
result=TEST_BOARDS['output_scheme2_type1'],
out_width=10,
out_height=7
)
def it_parses_scheme3_type1(self):
self.perform_parser_test(
input=TEST_BOARDS['input_scheme3_type1'],
result=TEST_BOARDS['output_scheme3_type1'],
out_width=10,
out_height=7
)
def it_parses_scheme4_type1(self):
self.perform_parser_test(
input=TEST_BOARDS['input_scheme4_type1'],
result=TEST_BOARDS['output_scheme4_type1'],
out_width=10,
out_height=7
)
def it_parses_scheme1_type2(self):
self.perform_parser_test(
input=TEST_BOARDS['input_scheme1_type2'],
result=TEST_BOARDS['output_scheme1_type2'],
out_width=10,
out_height=7
)
def it_parses_scheme2_type2(self):
self.perform_parser_test(
input=TEST_BOARDS['input_scheme2_type2'],
result=TEST_BOARDS['output_scheme2_type2'],
out_width=11,
out_height=7
)
def it_parses_scheme3_type2(self):
self.perform_parser_test(
input=TEST_BOARDS['input_scheme3_type2'],
result=TEST_BOARDS['output_scheme3_type2'],
out_width=10,
out_height=7
)
def it_parses_scheme4_type2(self):
self.perform_parser_test(
input=TEST_BOARDS['input_scheme4_type2'],
result=TEST_BOARDS['output_scheme4_type2'],
out_width=11,
out_height=7
)
def it_parses_hexocet_A(self):
self.perform_parser_test(
input=TEST_BOARDS['input_hexocet_A'],
result=TEST_BOARDS['output_hexocet_A'],
out_width=9,
out_height=10
)
def it_parses_hexocet_Perfume(self):
self.perform_parser_test(
input=TEST_BOARDS['input_hexocet_Perfume'],
result=TEST_BOARDS['output_hexocet_Perfume'],
out_width=8,
out_height=8
)
def it_parses_hexocet_Mud(self):
self.perform_parser_test(
input=TEST_BOARDS['input_hexocet_Mud'],
result=TEST_BOARDS['output_hexocet_Mud'],
out_width=7,
out_height=10
)
def it_parses_hexocet_X(self):
self.perform_parser_test(
input=TEST_BOARDS['input_hexocet_X'],
result=TEST_BOARDS['output_hexocet_X'],
out_width=7,
out_height=8
)
def it_parses_hexocet_Wildmil(self):
self.perform_parser_test(
input=TEST_BOARDS['input_hexocet_Wildmil'],
result=TEST_BOARDS['output_hexocet_Wildmil'],
out_width=8,
out_height=8
)
def it_parses_hexocet_Four(self):
self.perform_parser_test(
input=TEST_BOARDS['input_hexocet_Four'],
result=TEST_BOARDS['output_hexocet_Four'],
out_width=8,
out_height=9
)
def it_parses_hexocet_Bird(self):
self.perform_parser_test(
input=TEST_BOARDS['input_hexocet_Bird'],
result=TEST_BOARDS['output_hexocet_Bird'],
out_width=7,
out_height=9
)
def it_parses_hexocet_V(self):
self.perform_parser_test(
input=TEST_BOARDS['input_hexocet_V'],
result=TEST_BOARDS['output_hexocet_V'],
out_width=7,
out_height=7
)
def it_parses_hexocet_Bridge(self):
self.perform_parser_test(
input=TEST_BOARDS['input_hexocet_Bridge'],
result=TEST_BOARDS['output_hexocet_Bridge'],
out_width=8,
out_height=9
)
def it_parses_hexocet_Gun(self):
self.perform_parser_test(
input=TEST_BOARDS['input_hexocet_Gun'],
result=TEST_BOARDS['output_hexocet_Gun'],
out_width=8,
out_height=8
)
def it_parses_hexocet_Kite(self):
self.perform_parser_test(
input=TEST_BOARDS['input_hexocet_Kite'],
result=TEST_BOARDS['output_hexocet_Kite'],
out_width=8,
out_height=9
)
def it_parses_hexocet_Fed(self):
self.perform_parser_test(
input=TEST_BOARDS['input_hexocet_Fed'],
result=TEST_BOARDS['output_hexocet_Fed'],
out_width=8,
out_height=10
)
def it_parses_hexocet_Beetle(self):
self.perform_parser_test(
input=TEST_BOARDS['input_hexocet_Beetle'],
result=TEST_BOARDS['output_hexocet_Beetle'],
out_width=8,
out_height=9
)
def it_parses_hexocet_LittleRabbit(self):
self.perform_parser_test(
input=TEST_BOARDS['input_hexocet_LittleRabbit'],
result=TEST_BOARDS['output_hexocet_LittleRabbit'],
out_width=7,
out_height=7
)
def it_parses_hexocet_Losange(self):
self.perform_parser_test(
input=TEST_BOARDS['input_hexocet_Losange'],
result=TEST_BOARDS['output_hexocet_Losange'],
out_width=9,
out_height=8
)
class describe_row_and_column_reordering:
def test_reverses_columns_for_boards_type1(self):
input = TEST_BOARDS['input_scheme1_type1']
result = TEST_BOARDS['type1_columns_reversed']
board = HexobanBoard(board_str=input)
board.reverse_columns()
assert board.to_str(use_visible_floor=True) == result
assert board.width == 10
assert board.height == 7
def test_reverses_columns_for_boards_type2(self):
input = TEST_BOARDS['input_scheme1_type2']
result = TEST_BOARDS['type2_columns_reversed']
board = HexobanBoard(board_str=input)
board.reverse_columns()
assert board.to_str(use_visible_floor=True) == result
assert board.width == 11
assert board.height == 7
def test_reverses_rows_for_boards_type1(self):
input = TEST_BOARDS['input_scheme1_type1']
result = TEST_BOARDS['type1_rows_reversed']
board = HexobanBoard(board_str=input)
board.reverse_rows()
assert board.to_str(use_visible_floor=True) == result
assert board.width == 10
assert board.height == 7
def test_reverses_rows_for_boards_type2(self):
input = TEST_BOARDS['input_scheme1_type2']
result = TEST_BOARDS['type2_rows_reversed']
board = HexobanBoard(board_str=input)
board.reverse_rows()
assert board.to_str(use_visible_floor=True) == result
assert board.width == 10
assert board.height == 7
class describe_board_resizing:
def test_adds_row_top_type1(self):
input = TEST_BOARDS["input_scheme1_type1"]
result = TEST_BOARDS["row_added_type1_top"]
board = HexobanBoard(board_str=input)
board.add_row_top()
assert board.to_str(use_visible_floor=True) == result
assert board.height == 8
assert board.width == 10
def test_adds_row_top_type2(self):
input = TEST_BOARDS["input_scheme1_type2"]
result = TEST_BOARDS["row_added_type2_top"]
board = HexobanBoard(board_str=input)
board.add_row_top()
assert board.to_str(use_visible_floor=True) == result
assert board.height == 8
assert board.width == 11
def test_adds_row_bottom_type1(self):
input = TEST_BOARDS["input_scheme1_type1"]
result = TEST_BOARDS["row_added_type1_bottom"]
board = HexobanBoard(board_str=input)
board.add_row_bottom()
assert board.to_str(use_visible_floor=True) == result
assert board.height == 8
assert board.width == 10
def test_adds_row_bottom_type2(self):
input = TEST_BOARDS["input_scheme1_type2"]
result = TEST_BOARDS["row_added_type2_bottom"]
board = HexobanBoard(board_str=input)
board.add_row_bottom()
assert board.to_str(use_visible_floor=True) == result
assert board.height == 8
assert board.width == 10
def test_adds_column_left_type1(self):
input = TEST_BOARDS["input_scheme1_type1"]
result = TEST_BOARDS["column_added_type1_left"]
board = HexobanBoard(board_str=input)
board.add_column_left()
assert board.to_str(use_visible_floor=True) == result
assert board.height == 7
assert board.width == 11
def test_adds_column_left_type2(self):
input = TEST_BOARDS["input_scheme1_type2"]
result = TEST_BOARDS["column_added_type2_left"]
board = HexobanBoard(board_str=input)
board.add_column_left()
assert board.to_str(use_visible_floor=True) == result
assert board.height == 7
assert board.width == 11
def test_adds_column_right_type1(self):
input = TEST_BOARDS["input_scheme1_type1"]
result = TEST_BOARDS["column_added_type1_right"]
board = HexobanBoard(board_str=input)
board.add_column_right()
assert board.to_str(use_visible_floor=True) == result
assert board.height == 7
assert board.width == 11
def test_adds_column_right_type2(self):
input = TEST_BOARDS["input_scheme1_type2"]
result = TEST_BOARDS["column_added_type2_right"]
board = HexobanBoard(board_str=input)
board.add_column_right()
assert board.to_str(use_visible_floor=True) == result
assert board.height == 7
assert board.width == 11
def test_removes_row_top_type1(self):
input = TEST_BOARDS["input_scheme1_type1"]
result = TEST_BOARDS["row_removed_type1_top"]
board = HexobanBoard(board_str=input)
board.remove_row_top()
assert board.to_str(use_visible_floor=True) == result
assert board.height == 6
assert board.width == 10
def test_removes_row_top_type2(self):
input = TEST_BOARDS["input_scheme1_type2"]
result = TEST_BOARDS["row_removed_type2_top"]
board = HexobanBoard(board_str=input)
board.remove_row_top()
assert board.to_str(use_visible_floor=True) == result
assert board.height == 6
assert board.width == 11
def test_removes_row_bottom_type1(self):
input = TEST_BOARDS["input_scheme1_type1"]
result = TEST_BOARDS["row_removed_type1_bottom"]
board = HexobanBoard(board_str=input)
board.remove_row_bottom()
assert board.to_str(use_visible_floor=True) == result
assert board.height == 6
assert board.width == 10
def test_removes_row_bottom_type2(self):
input = TEST_BOARDS["input_scheme1_type2"]
result = TEST_BOARDS["row_removed_type2_bottom"]
board = HexobanBoard(board_str=input)
board.remove_row_bottom()
assert board.to_str(use_visible_floor=True) == result
assert board.height == 6
assert board.width == 10
def test_removes_column_left_type1(self):
input = TEST_BOARDS["input_scheme1_type1"]
result = TEST_BOARDS["column_removed_type1_left"]
board = HexobanBoard(board_str=input)
board.remove_column_left()
assert board.to_str(use_visible_floor=True) == result
assert board.height == 7
assert board.width == 9
def test_removes_column_left_type2(self):
input = TEST_BOARDS["input_scheme1_type2"]
result = TEST_BOARDS["column_removed_type2_left"]
board = HexobanBoard(board_str=input)
board.remove_column_left()
assert board.to_str(use_visible_floor=True) == result
assert board.height == 7
assert board.width == 9
def test_removes_column_right_type1(self):
input = TEST_BOARDS["input_scheme1_type1"]
result = TEST_BOARDS["column_removed_type1_right"]
board = HexobanBoard(board_str=input)
board.remove_column_right()
assert board.to_str(use_visible_floor=True) == result
assert board.height == 7
assert board.width == 9
def test_removes_column_right_type2(self):
input = TEST_BOARDS["input_scheme1_type2"]
result = TEST_BOARDS["column_removed_type2_right"]
board = HexobanBoard(board_str=input)
board.remove_column_right()
assert board.to_str(use_visible_floor=True) == result
assert board.height == 7
assert board.width == 9
def test_resizes_board_to_bigger(self):
input = TEST_BOARDS["input_scheme1_type1"]
board = HexobanBoard(board_str=input)
old_width = board.width
old_height = board.height
board.resize(board.width + 5, board.height + 5)
assert board.width == old_width + 5
assert board.height == old_height + 5
def test_resizes_board_to_smaller(self):
input = TEST_BOARDS["input_scheme1_type1"]
board = HexobanBoard(board_str=input)
old_height = board.height
board.resize(board.width - 3, board.height - 3)
assert board.width == 7
assert board.height == old_height - 3
| gpl-3.0 | 1,282,884,382,846,484,700 | 37.334873 | 96 | 0.552744 | false |
symac/wikidata | rugby/01_getListeJoueurs.py | 1 | 1035 | # -*- coding: utf-8 -*-
#!/usr/bin/python
import MySQLdb
import pywikibot
import sys
import re
import mwparserfromhell
from pywikibot import pagegenerators
'''
Ce script va récupérer toutes les pages qui utilisent le template "Infobox Rugbyman"
'''
site = pywikibot.Site("fr", "wikipedia")
def parse(title):
page = pywikibot.Page(site, title)
text = page.get()
return mwparserfromhell.parse(text)
liste = pagegenerators.ReferringPageGenerator(pywikibot.Page(site, u"Modèle:Infobox Rugbyman"), onlyTemplateInclusion=True)
for page in liste:
print str(page.title().encode("utf-8"))
sys.exit()
parsedText = parse("Mathieu Bourret")
templates = parsedText.filter_templates()
for tpl in templates:
if tpl.name.upper().strip() == "INFOBOX RUGBYMAN":
print ">>%s<<" % tpl.name.strip().encode("utf-8")
saisons = re.split("<br ?\/>", str(tpl.get("saison").value))
clubs = re.split("<br ?\/>", str(tpl.get("club").value))
print clubs
print "%s - %s" % (len(clubs), len(saisons))
# pywikibot.extract_templates_and_params | gpl-2.0 | -8,651,722,581,256,401,000 | 26.184211 | 123 | 0.710271 | false |
altaurog/django-caspy | tests/unit/test_adaptor.py | 1 | 9434 | from datetime import datetime, date
from caspy.domain import models as domain
from caspy import models as db
from caspy import django_orm as orm
import pytest
class TestGetField:
def test_foreign(self):
class X:
pass
o = X()
assert orm.get_field(o) is o
assert orm.get_field(o, 'something') is o
def test_domain(self):
o = domain.Book(book_id=1, name='1996')
assert orm.get_field(o) == 1
def test_domain_field_name(self):
o = domain.Book(book_id=1, name='1996')
assert orm.get_field(o, 'name') == '1996'
def test_domain_bad_field(self):
o = domain.Book(book_id=1, name='1996')
with pytest.raises(AttributeError):
assert orm.get_field(o, 'bad')
class TestCurrency:
def test_domain_to_orm(self):
obj = domain.Currency(
cur_code='USD',
shortcut='$',
symbol='$',
long_name='US Dollar',
)
instance = orm.domain_to_orm(obj)
assert isinstance(instance, db.Currency)
assert instance.cur_code == 'USD'
assert instance.shortcut == '$'
assert instance.symbol == '$'
assert instance.long_name == 'US Dollar'
def test_orm_to_domain(self):
instance = db.Currency(
cur_code='USD',
shortcut='$',
symbol='$',
long_name='US Dollar',
)
obj = orm.orm_to_domain(instance)
assert isinstance(obj, domain.Currency)
assert obj.cur_code == 'USD'
assert obj.shortcut == '$'
assert obj.symbol == '$'
assert obj.long_name == 'US Dollar'
class TestBook:
the_time = datetime(2015, 6, 7, 13, 30)
def test_domain_to_orm(self):
obj = domain.Book(
name='Test Book',
created_at=self.the_time,
)
instance = orm.domain_to_orm(obj)
assert isinstance(instance, db.Book)
assert instance.book_id is None
assert instance.name == 'Test Book'
assert instance.created_at == self.the_time
obj.book_id = 1
instance = orm.domain_to_orm(obj)
assert instance.book_id == 1
def test_orm_to_domain(self):
instance = db.Book(
book_id=2,
name='Test Book',
created_at=self.the_time,
)
obj = orm.orm_to_domain(instance)
assert isinstance(obj, domain.Book)
assert obj.book_id == 2
assert obj.name == 'Test Book'
assert obj.created_at == self.the_time
class TestAccountType:
def test_domain_to_orm(self):
obj = domain.AccountType(
account_type='Income',
sign=True,
credit_term='income',
debit_term='expense',
)
instance = orm.domain_to_orm(obj)
assert isinstance(instance, db.AccountType)
assert instance.account_type == 'Income'
assert instance.sign is True
assert instance.credit_term == 'income'
assert instance.debit_term == 'expense'
def test_orm_to_domain(self):
instance = db.AccountType(
account_type='Income',
sign=True,
credit_term='income',
debit_term='expense',
)
obj = orm.orm_to_domain(instance)
assert isinstance(obj, domain.AccountType)
assert obj.account_type == 'Income'
assert obj.sign is True
assert obj.credit_term == 'income'
assert obj.debit_term == 'expense'
class TestAccount:
book = domain.Book(book_id=3, name='Test Book')
currency = domain.Currency(cur_code='USD', symbol='$')
account_type = domain.AccountType(account_type='Expense')
def test_deep_domain_to_orm(self):
obj = domain.Account(
account_id=10,
parent_id=2,
path='Expense::Education',
name='Education',
book=self.book,
account_type=self.account_type,
currency=self.currency,
description='Education Expense',
)
instance = orm.domain_to_orm(obj)
assert isinstance(instance, db.Account)
assert instance.book_id == 3
assert instance.parent_id == 2
assert instance.currency_id == 'USD'
assert instance.account_type_id == 'Expense'
assert instance.account_id == 10
assert instance.name == 'Education'
assert instance.path == 'Expense::Education'
assert instance.description == 'Education Expense'
def test_shallow_domain_to_orm(self):
obj = domain.Account(
account_id=10,
name='Education',
book=3,
account_type='Expense',
currency='USD',
description='Education Expense',
)
instance = orm.domain_to_orm(obj)
assert isinstance(instance, db.Account)
assert instance.book_id == 3
assert instance.currency_id == 'USD'
assert instance.account_type_id == 'Expense'
assert instance.account_id == 10
assert instance.name == 'Education'
assert instance.description == 'Education Expense'
def test_shallow_orm_to_domain(self):
instance = db.Account(
account_id=11,
name='Utilities',
book_id=3,
account_type_id='Expense',
currency_id='USD',
description='Utilities Expense',
)
obj = orm.orm_to_domain(instance)
assert isinstance(obj, domain.Account)
assert obj.account_id == 11
assert obj.name == 'Utilities'
assert obj.book == 3
assert obj.account_type == 'Expense'
assert obj.currency == 'USD'
assert obj.description == 'Utilities Expense'
def test_deep_orm_to_domain(self):
book = db.Book(book_id=3, name='Test Book')
account_type = db.AccountType(account_type='Expense')
currency = db.Currency(cur_code='NIS')
instance = db.Account(
account_id=11,
name='Utilities',
book=book,
account_type=account_type,
currency=currency,
description='Utilities Expense',
)
instance.parent_id = 2
instance.path = 'Expense::Utilities'
obj = orm.orm_to_domain(instance)
assert isinstance(obj, domain.Account)
assert obj.account_id == 11
assert obj.name == 'Utilities'
assert obj.book == 3
assert obj.account_type == 'Expense'
assert obj.currency == 'NIS'
assert obj.description == 'Utilities Expense'
assert obj.parent_id == 2
assert obj.path == 'Expense::Utilities'
class TestTransaction:
def test_domain_to_orm(self):
test_date = date(2015, 7, 21)
test_desc = 'Test Transaction Description'
obj = domain.Transaction(
transaction_id=2,
date=test_date,
description=test_desc,
)
instance = orm.domain_to_orm(obj)
assert isinstance(instance, db.Transaction)
assert instance.transaction_id == 2
assert instance.date == test_date
assert instance.description == test_desc
def test_orm_to_domain(self):
test_date = date(2015, 7, 20)
test_desc = 'Test Transaction ORM to Domain'
instance = db.Transaction(
transaction_id=4,
date=test_date,
description=test_desc,
)
obj = orm.orm_to_domain(instance)
assert isinstance(obj, domain.Transaction)
assert obj.transaction_id == 4
assert obj.date == test_date
assert obj.description == test_desc
assert obj.splits == []
class TestSplit:
def test_domain_to_orm(self):
obj = domain.Split(
split_id=613,
number='365',
description='Split Desc',
account_id=12,
status='n',
amount='18',
)
instance = orm.domain_to_orm(obj)
assert isinstance(instance, db.Split)
assert instance.split_id == 613
assert instance.number == '365'
assert instance.description == 'Split Desc'
assert instance.account_id == 12
assert instance.status == 'n'
assert instance.amount == '18'
def test_orm_to_domain(self):
instance = db.Split(
split_id=248,
transaction_id=50,
number='120',
description='Test Split Desc',
account_id=10,
status='c',
amount='10.37',
)
obj = orm.orm_to_domain(instance)
assert isinstance(instance, db.Split)
assert obj.split_id == 248
assert obj.number == '120'
assert obj.description == 'Test Split Desc'
assert obj.account_id == 10
assert obj.status == 'c'
assert obj.amount == '10.37'
with pytest.raises(AttributeError):
obj.transaction
| bsd-3-clause | -5,163,583,924,773,448,000 | 32.935252 | 61 | 0.53816 | false |
tbenthompson/quadracheer | tests/test_piessens.py | 1 | 3323 | import numpy as np
from quadracheer.piessens import piessen_neg_one_to_one_nodes,\
piessen_method, piessens
from quadracheer.map import map_singular_pt, map_pts_wts
def test_piessen_neg_1_1():
# Example 1 from Piessens
f = lambda x: np.exp(x)
exact = 2.11450175
piessen_est = 2.11450172
x, w = piessen_neg_one_to_one_nodes(2)
est = np.sum(f(x) * w)
np.testing.assert_almost_equal(piessen_est, est)
def test_piessen_0_1():
# Example 1 from Piessens mapped to [0,1]
g = lambda x: np.exp(x)
f = lambda x: g((2 * x) - 1)
exact = 2.11450175
piessen_est = 2.11450172
x, w = piessen_method(2, 0.0, 1.0, 0.5, False)
est = np.sum(f(x) * w)
np.testing.assert_almost_equal(piessen_est, est)
def test_piessen_0_1_with_singularity():
# Example 1 from Piessens mapped to [0,1] and with singularity
g = lambda x: np.exp(x) / x
f = lambda x: 2 * g((2 * x) - 1)
exact = 2.11450175
piessen_est = 2.11450172
x, w = piessen_method(2, 0.0, 1.0, 0.5)
est = np.sum(f(x) * w)
np.testing.assert_almost_equal(piessen_est, est)
def test_QuadOneOverR_1():
f = lambda x: 1 / (x - 0.4)
exact = np.log(3.0 / 2.0)
mapped_x0 = map_singular_pt(0.4, 0.0, 1.0)
x, w = piessens(2, mapped_x0, nonsingular_N = 10)
qx, qw = map_pts_wts(x, w, 0.0, 1.0)
est = np.sum(f(qx) * qw)
np.testing.assert_almost_equal(exact, est)
def test_QuadOneOverR_2():
# Example 1 from Piessens
g = lambda x: np.exp(x) / x
f = lambda x: 2 * g((2 * x) - 1)
exact = 2.11450175
mapped_x0 = map_singular_pt(0.5, 0.0, 1.0)
x, w = piessens(8, mapped_x0, nonsingular_N = 10)
qx, qw = map_pts_wts(x, w, 0.0, 1.0)
est = np.sum(f(qx) * qw)
np.testing.assert_almost_equal(exact, est)
def test_QuadOneOverR_3():
# Example 2 from Piessens
g = lambda x: np.exp(x) / (np.sin(x) - np.cos(x))
f = lambda x: np.pi / 2.0 * g(np.pi / 2.0 * x)
exact = 2.61398312
# Piessens estimate derived with a two pt rule.
piessens_est = 2.61398135
mapped_x0 = map_singular_pt(0.5, 0.0, 1.0)
x, w = piessens(2, mapped_x0)
qx, qw = map_pts_wts(x, w, 0.0, 1.0)
est = np.sum(f(qx) * qw)
np.testing.assert_almost_equal(piessens_est, est)
# Tests x in the upper half of the interval
def test_QuadOneOverR_4():
f = lambda x: np.exp(x) / (x - 0.8)
exact = -1.13761642399
mapped_x0 = map_singular_pt(0.8, 0.0, 1.0)
x, w = piessens(2, mapped_x0, nonsingular_N = 20)
qx, qw = map_pts_wts(x, w, 0.0, 1.0)
est = np.sum(f(qx) * qw)
np.testing.assert_almost_equal(exact, est)
# Tests x in the lower half of the interval.
def test_QuadOneOverR_5():
f = lambda x: np.exp(x) / (x - 0.2)
exact = 3.139062607254266
mapped_x0 = map_singular_pt(0.2, 0.0, 1.0)
x, w = piessens(2, mapped_x0, nonsingular_N = 50)
qx, qw = map_pts_wts(x, w, 0.0, 1.0)
est = np.sum(f(qx) * qw)
np.testing.assert_almost_equal(exact, est)
def test_piessens_4_5():
f = lambda x: np.exp(x - 4) / (x - 4.2)
exact = 3.139062607254266
mapped_x0 = map_singular_pt(4.2, 4.0, 5.0)
x, w = piessens(20, mapped_x0, nonsingular_N = 50)
qx, qw = map_pts_wts(x, w, 4.0, 5.0)
est = np.sum(f(qx) * qw)
np.testing.assert_almost_equal(exact, est)
| mit | 6,857,235,317,311,616,000 | 31.262136 | 66 | 0.589828 | false |
pirate42/docc | docc/image.py | 1 | 2823 | # coding=utf-8
from docc.exceptions import APIError
class Image(object):
"""Represent an Image object (name and distribution information)"""
def __init__(self, identifier, name, distribution):
self.id = identifier
self.name = name
self.distribution = distribution
def __repr__(self):
return "<%s: %s>" % (self.id, self.name)
def __str__(self):
return "%s: %s, %s" % (self.id, self.name, self.distribution)
def __eq__(self, other):
return (
isinstance(other, self.__class__) and
self.__dict__ == other.__dict__
)
def __ne__(self, other):
return not self.__eq__(other)
def destroy(self, service):
"""Destroy this image"""
response = service.get("images/%s/destroy" % self.id)
status = response['status']
return status == 'OK'
@staticmethod
def get(service, identifier):
"""Return the Image given an identifier and None if not found.
:param identifier: TODO
:param service: The service object for the Digital Ocean account
that holds the images
"""
try:
response = service.get('images/%s' % identifier)
except APIError as e:
return None
encoded_image = response['image']
i = Image(encoded_image['id'],
encoded_image['name'],
encoded_image['distribution']
)
return i
@staticmethod
def __images(service, my_filter=None):
"""Return the a list containing all the know images.
:param service: The service object for the Digital Ocean account that
holds the images
:param my_filter: Should be absent, 'my_images', 'global'. If 'all'
this will return all the images you have access to. 'my_images' will
return the images you stored and 'global' the images available to all
customers.
"""
if my_filter is None:
response = service.get("images")
else:
response = service.get("images", {'filter': my_filter})
encoded_images = response['images']
result = []
for encoded_image in encoded_images:
i = Image(encoded_image['id'], encoded_image['name'],
encoded_image['distribution'])
result.append(i)
return result
@staticmethod
def images(service):
"""Return all the known images included mine"""
return Image.__images(service)
@staticmethod
def my_images(service):
"""Return my images"""
return Image.__images(service, 'my_images')
@staticmethod
def global_images(service):
"""Return globally available images"""
return Image.__images(service, 'global') | mit | 7,968,175,958,036,338,000 | 28.726316 | 77 | 0.57492 | false |
Unity-Technologies/ml-agents | ml-agents-envs/mlagents_envs/registry/remote_registry_entry.py | 1 | 3260 | from sys import platform
from typing import Optional, Any, List
from mlagents_envs.environment import UnityEnvironment
from mlagents_envs.base_env import BaseEnv
from mlagents_envs.registry.binary_utils import get_local_binary_path
from mlagents_envs.registry.base_registry_entry import BaseRegistryEntry
class RemoteRegistryEntry(BaseRegistryEntry):
def __init__(
self,
identifier: str,
expected_reward: Optional[float],
description: Optional[str],
linux_url: Optional[str],
darwin_url: Optional[str],
win_url: Optional[str],
additional_args: Optional[List[str]] = None,
):
"""
A RemoteRegistryEntry is an implementation of BaseRegistryEntry that uses a
Unity executable downloaded from the internet to launch a UnityEnvironment.
__Note__: The url provided must be a link to a `.zip` file containing a single
compressed folder with the executable inside. There can only be one executable
in the folder and it must be at the root of the folder.
:param identifier: The name of the Unity Environment.
:param expected_reward: The cumulative reward that an Agent must receive
for the task to be considered solved.
:param description: A description of the Unity Environment. Contains human
readable information about potential special arguments that the make method can
take as well as information regarding the observation, reward, actions,
behaviors and number of agents in the Environment.
:param linux_url: The url of the Unity executable for the Linux platform
:param darwin_url: The url of the Unity executable for the OSX platform
:param win_url: The url of the Unity executable for the Windows platform
"""
super().__init__(identifier, expected_reward, description)
self._linux_url = linux_url
self._darwin_url = darwin_url
self._win_url = win_url
self._add_args = additional_args
def make(self, **kwargs: Any) -> BaseEnv:
"""
Returns the UnityEnvironment that corresponds to the Unity executable found at
the provided url. The arguments passed to this method will be passed to the
constructor of the UnityEnvironment (except for the file_name argument)
"""
url = None
if platform == "linux" or platform == "linux2":
url = self._linux_url
if platform == "darwin":
url = self._darwin_url
if platform == "win32":
url = self._win_url
if url is None:
raise FileNotFoundError(
f"The entry {self.identifier} does not contain a valid url for this "
"platform"
)
path = get_local_binary_path(self.identifier, url)
if "file_name" in kwargs:
kwargs.pop("file_name")
args: List[str] = []
if "additional_args" in kwargs:
if kwargs["additional_args"] is not None:
args += kwargs["additional_args"]
if self._add_args is not None:
args += self._add_args
kwargs["additional_args"] = args
return UnityEnvironment(file_name=path, **kwargs)
| apache-2.0 | 8,832,754,937,924,894,000 | 44.915493 | 87 | 0.647546 | false |
quantopian/metautils | metautils/singleton.py | 1 | 1394 | #
# Copyright 2015 Quantopian, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from metautils import T, templated
def _singleton_new(cls, *args, **kwargs):
"""
An invalid new for singleton objects.
"""
raise TypeError(
"'{0}' cannot be instantiated because it is a singleton".format(
cls.__name__,
),
)
class Singleton(T):
"""
Turns a class statement into an object instantiation to create a
single instance of a class.
This is like the `object` keyword from scala; however, this
does not support companion objects.
"""
@templated
def __new__(mcls, name, bases, dict_, T_, **kwargs):
dict_['__name__'] = name
cls = T_.__new__(mcls, name, bases, dict_)
inst = cls(**kwargs)
# Prevent another instance from being made.
cls.__new__ = _singleton_new
return inst
| apache-2.0 | 591,549,726,284,844,900 | 30.681818 | 74 | 0.657819 | false |
wq/wq.db | tests/test_router.py | 1 | 2694 | from .base import APITestCase
from django.core.exceptions import ImproperlyConfigured
try:
from django.urls import include
except ImportError:
from django.conf.urls import include
class RestRouterTestCase(APITestCase):
def test_rest_model_conflict(self):
from wq.db import rest
from tests.conflict_app.models import Item
# Register model with same name as existing model
with self.assertRaises(ImproperlyConfigured) as e:
rest.router.register_model(Item, fields="__all__")
self.assertEqual(
e.exception.args[0],
"Could not register <class 'tests.conflict_app.models.Item'>: "
"the name 'item' was already registered for "
"<class 'tests.rest_app.models.Item'>"
)
self.assertNotIn(Item, rest.router._models)
# Register model with different name, but same URL as existing model
with self.assertRaises(ImproperlyConfigured) as e:
rest.router.register_model(
Item, name="conflictitem", fields="__all__"
)
self.assertEqual(
e.exception.args[0],
"Could not register <class 'tests.conflict_app.models.Item'>: "
"the url 'items' was already registered for "
"<class 'tests.rest_app.models.Item'>"
)
self.assertNotIn(Item, rest.router._models)
# Register model with different name and URL
rest.router.register_model(
Item, name="conflictitem", url="conflictitems", fields="__all__"
)
self.assertIn(Item, rest.router._models)
self.assertIn("conflictitem", rest.router.get_config()['pages'])
def test_rest_old_config(self):
from wq.db import rest
from tests.conflict_app.models import TestModel
with self.assertRaises(ImproperlyConfigured):
rest.router.register_model(
TestModel,
partial=True,
fields="__all__"
)
self.assertNotIn(TestModel, rest.router._models)
with self.assertRaises(ImproperlyConfigured):
rest.router.register_model(
TestModel,
reversed=True,
fields="__all__"
)
self.assertNotIn(TestModel, rest.router._models)
with self.assertRaises(ImproperlyConfigured):
rest.router.register_model(
TestModel,
max_local_pages=0,
fields="__all__"
)
self.assertNotIn(TestModel, rest.router._models)
def test_rest_include(self):
from wq.db import rest
include(rest.router.urls)
| mit | -4,957,441,498,676,931,000 | 33.538462 | 76 | 0.598367 | false |
pmitche/it3105-aiprogramming | project3/module6/deeplearning/layer.py | 1 | 1224 | import numpy as np
import theano
import theano.tensor as T
class HiddenLayer(object):
def __init__(self, input, num_in, number_of_nodes, activation):
self.num_in = num_in
self.number_of_nodes = number_of_nodes
self.weights = self.init_weights(activation)
self.output = activation(T.dot(input, self.weights))
self.params = [self.weights]
def init_weights(self, activation):
# Default for activation function tanh
weights = np.asarray(
np.random.uniform(
low=-np.sqrt(6. / (self.num_in + self.number_of_nodes)),
high=np.sqrt(6. / (self.num_in + self.number_of_nodes)),
size=(self.num_in, self.number_of_nodes)
),
dtype=theano.config.floatX
)
if activation == T.nnet.sigmoid:
weights *= 4
elif activation == T.nnet.softmax:
weights = np.zeros((self.num_in, self.number_of_nodes), dtype=theano.config.floatX)
elif activation == T.nnet.relu:
weights = np.random.uniform(low=0.0, high=0.1, size=(self.num_in, self.number_of_nodes))
return theano.shared(value=weights, name='weights', borrow=True)
| mit | 273,362,223,681,299,230 | 33 | 100 | 0.596405 | false |
BlackHole/enigma2-obh10 | lib/python/Tools/Downloader.py | 1 | 2535 | from boxbranding import getMachineBrand, getMachineName
from twisted.web import client
from twisted.internet import reactor, defer
from urlparse import urlparse
class HTTPProgressDownloader(client.HTTPDownloader):
def __init__(self, url, outfile, headers=None):
client.HTTPDownloader.__init__(self, url, outfile, headers=headers, agent="%s %s Enigma2 HbbTV/1.1.1 (+PVR+RTSP+DL;OpenBh;;;)" % (getMachineBrand(), getMachineName()))
self.status = self.progress_callback = self.error_callback = self.end_callback = None
self.deferred = defer.Deferred()
def noPage(self, reason):
if self.status == "304":
client.HTTPDownloader.page(self, "")
else:
client.HTTPDownloader.noPage(self, reason)
if self.error_callback:
self.error_callback(reason.getErrorMessage(), self.status)
def gotHeaders(self, headers):
if self.status == "200":
if "content-length" in headers:
self.totalbytes = int(headers["content-length"][0])
else:
self.totalbytes = 0
self.currentbytes = 0.0
return client.HTTPDownloader.gotHeaders(self, headers)
def pagePart(self, packet):
if self.status == "200":
self.currentbytes += len(packet)
if self.totalbytes and self.progress_callback:
self.progress_callback(self.currentbytes, self.totalbytes)
return client.HTTPDownloader.pagePart(self, packet)
def pageEnd(self):
ret = client.HTTPDownloader.pageEnd(self)
if self.end_callback:
self.end_callback()
return ret
class downloadWithProgress:
def __init__(self, url, outputfile, contextFactory=None, *args, **kwargs):
parsed = urlparse(url)
scheme = parsed.scheme
host = parsed.hostname
port = parsed.port or (443 if scheme == 'https' else 80)
self.factory = HTTPProgressDownloader(url, outputfile, *args, **kwargs)
if scheme == 'https':
from twisted.internet import ssl
if contextFactory is None:
contextFactory = ssl.ClientContextFactory()
self.connection = reactor.connectSSL(host, port, self.factory, contextFactory)
else:
self.connection = reactor.connectTCP(host, port, self.factory)
def start(self):
return self.factory.deferred
def stop(self):
if self.connection:
self.factory.progress_callback = self.factory.end_callback = self.factory.error_callback = None
self.connection.disconnect()
def addProgress(self, progress_callback):
self.factory.progress_callback = progress_callback
def addEnd(self, end_callback):
self.factory.end_callback = end_callback
def addError(self, error_callback):
self.factory.error_callback = error_callback
| gpl-2.0 | -6,097,105,769,769,699,000 | 32.8 | 169 | 0.737673 | false |
klocey/DiversityTools | StatPak/ACE.py | 1 | 3982 | # -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function
#scikit-bio/skbio/diversity/alpha/_ace.py
#Greg Caporasogregcaporaso on Aug 7, 2014 API: moved base.py to _base.py
#!/usr/bin/env python
# ----------------------------------------------------------------------------
# Copyright (c) 2013--, scikit-bio development team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file COPYING.txt, distributed with this software.
# ----------------------------------------------------------------------------
import numpy as np
import os
import sys
mydir = os.path.expanduser("~/Desktop/Repos/rare-bio/tools/StatPak")
sys.path.append(mydir)
import DiversityMetrics as DM
from DiversityMetrics import _validate, osd
def ace(counts, rare_threshold=10):
"""Calculate the ACE metric (Abundance-based Coverage Estimator).
Parameters
----------
counts : 1-D array_like, int
Vector of counts.
rare_threshold : int, optional
Threshold at which an OTU containing as many or fewer individuals will
be considered rare.
Returns
-------
double
Computed ACE metric.
Raises
------
ValueError
If every rare OTU is a singleton.
Notes
-----
ACE was first introduced in [1]_ and [2]_. The implementation here is based
on the description given in the EstimateS manual [3]_.
If no rare OTUs exist, returns the number of abundant OTUs. The default
value of 10 for `rare_threshold` is based on [4]_.
If `counts` contains zeros, indicating OTUs which are known to exist in the
environment but did not appear in the sample, they will be ignored for the
purpose of calculating the number of rare OTUs.
References
----------
.. [1] Chao, A. & S.-M Lee. 1992 Estimating the number of classes via
sample coverage. Journal of the American Statistical Association 87,
210-217.
.. [2] Chao, A., M.-C. Ma, & M. C. K. Yang. 1993. Stopping rules and
estimation for recapture debugging with unequal failure rates.
Biometrika 80, 193-201.
.. [3] http://viceroy.eeb.uconn.edu/estimates/
.. [4] Chao, A., W.-H. Hwang, Y.-C. Chen, and C.-Y. Kuo. 2000. Estimating
the number of shared species in two communities. Statistica Sinica
10:227-246.
"""
counts = _validate(counts)
freq_counts = np.bincount(counts)
s_rare = _otus_rare(freq_counts, rare_threshold)
singles = freq_counts[1]
if singles > 0 and singles == s_rare:
raise ValueError("The only rare OTUs are singletons, so the ACE "
"metric is undefined. EstimateS suggests using "
"bias-corrected Chao1 instead.")
s_abun = _otus_abundant(freq_counts, rare_threshold)
if s_rare == 0:
return s_abun
n_rare = _number_rare(freq_counts, rare_threshold)
c_ace = 1 - singles / n_rare
top = s_rare * _number_rare(freq_counts, rare_threshold, gamma=True)
bottom = c_ace * n_rare * (n_rare - 1)
gamma_ace = (top / bottom) - 1
if gamma_ace < 0:
gamma_ace = 0
return s_abun + (s_rare / c_ace) + ((singles / c_ace) * gamma_ace)
def _otus_rare(freq_counts, rare_threshold):
"""Count number of rare OTUs."""
return freq_counts[1:rare_threshold + 1].sum()
def _otus_abundant(freq_counts, rare_threshold):
"""Count number of abundant OTUs."""
return freq_counts[rare_threshold + 1:].sum()
def _number_rare(freq_counts, rare_threshold, gamma=False):
"""Return number of individuals in rare OTUs.
``gamma=True`` generates the ``n_rare`` used for the variation coefficient.
"""
n_rare = 0
if gamma:
for i, j in enumerate(freq_counts[:rare_threshold + 1]):
n_rare = n_rare + (i * j) * (i - 1)
else:
for i, j in enumerate(freq_counts[:rare_threshold + 1]):
n_rare = n_rare + (i * j)
return n_rare
| mit | 2,663,075,253,608,483,000 | 33.626087 | 79 | 0.615018 | false |
sanjayankur31/pyjigdo | pyJigdo/base.py | 1 | 5381 | #
# Copyright 2007-2009 Fedora Unity Project (http://fedoraunity.org)
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; version 2, or (at your option) any
# later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Library General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
import sys, os
from urlparse import urlparse
import pyJigdo.logger
import pyJigdo.pyasync
from pyJigdo.jigdo import JigdoFile
from pyJigdo.translate import _, N_
class PyJigdoBase:
""" PyJigdoBase is the primary object that should be called back to.
This object should be aware of all other objects and maintain their
relationships and states. This class should be used to lookup all
objects and their children. """
def __init__(self, pyjigdo_entry):
""" Initializes the PyJigdoBase class with the options specified
from the command line. Setup all basic options and get things
setup to start creating Jigdo objects. """
self.jigdo_files = {} # {JigdoFile().id: JigdoFile(),}
# FIXME: Populate these entry points.
#self.jigdo_templates = {} # {JigdoTemplate().id: JigdoTemplate(),}
#self.jigdo_slices = {} # {JigdoSlice().id: JigdoSlice(),}
#self.slice_sources = {} # {SliceSource().id: SliceSource(),}
# / FIXME
self.log = None # PyJigdoLogger()
self.async = None # PyJigdoReactor()
self.stats = None # PyJigdoStats()
self.interface = None # PyJigdoTextInterface()
self.scan_targets = [] # [PyJigdoScanTarget(),]
# Set our exit points to callback.
self.abort = pyjigdo_entry.abort
self.done = pyjigdo_entry.done
# Get the options parser, and bring it's options
# and args into this namespace.
self.parser = pyjigdo_entry.parser
self.settings = pyjigdo_entry.cli_options
self.args_jigdo_files = pyjigdo_entry.jigdo_files
# Setup Logging.
self.create_logger()
def run(self):
""" Start up the reactor and start performing operations to
put the Jigdo together. """
# Setup Reactor
self.async = pyJigdo.pyasync.PyJigdoReactor( self.log,
threads = self.settings.download_threads,
timeout = self.settings.download_timeout )
# Prepare Jigdo
if self.prep_jigdo_files():
# Seed Reactor
self.async.seed(self)
else:
self.log.critical(_("Seems there is nothing to do!"))
return self.done()
def create_logger(self):
""" Create a logger instance setting an appropriate loglevel
based on runtime options. """
loglevel = pyJigdo.logger.CRITICAL
if self.settings.verbose >= 3:
loglevel = pyJigdo.logger.DEBUG
elif self.settings.verbose == 2:
loglevel = pyJigdo.logger.INFO
elif self.settings.verbose == 1:
loglevel = pyJigdo.logger.WARNING
if self.settings.debug:
loglevel = pyJigdo.logger.DEBUG
# Initialize the logging object
self.log = pyJigdo.logger.pyJigdoLogger( self.settings.log_file,
loglevel = loglevel )
def prep_jigdo_files(self):
""" Prepare selected Jigdo downloads for injection into our reactor. """
for jigdo in self.args_jigdo_files:
self.log.info(_("Prepping Jigdo file %s ") % jigdo)
jigdo_url = urlparse(jigdo)
jigdo_filename = os.path.basename(jigdo_url.path)
if jigdo_url.scheme or \
(not jigdo_url.scheme and os.path.isfile(jigdo_url.path)):
jigdo_storage_location = os.path.join( self.settings.download_target,
jigdo_filename )
self.log.debug(_("Adding Jigdo file %s" % jigdo_url.geturl()))
self.log.debug(_("Storing Jigdo %s at %s" % ( jigdo_filename,
jigdo_storage_location )))
self.jigdo_files[jigdo] = JigdoFile( self.log,
self.async,
self.settings,
self,
jigdo_url.geturl(),
jigdo_storage_location )
if os.path.isfile(jigdo_url.path): self.jigdo_files[jigdo].has_data = True
else:
self.log.error(_("Jigdo file %s seems to not be valid." % jigdo))
self.log.error(_("Cowardly refusing to use/download."))
if not self.jigdo_files:
self.log.critical(_("Nothing given to download!"))
return False
return True
| gpl-2.0 | 8,769,146,377,141,532,000 | 43.471074 | 90 | 0.584836 | false |
nlamirault/portefaix | diagrams/certmanager.py | 1 | 3182 | #!/usr/bin/python3
# Copyright (C) 2020 Nicolas Lamirault <[email protected]>
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import diagrams
from diagrams.k8s import compute
from diagrams.k8s import network
from diagrams.k8s import rbac
def architecture():
with diagrams.Diagram("cert-manager", show=False, direction="TB"):
with diagrams.Cluster("Cloud Platform"):
with diagrams.Cluster("Kubernetes Cluster"):
clusterRole_cainjector = rbac.ClusterRole()
clusterRoleBinding_cainjector = rbac.ClusterRoleBinding()
clusterRole_webhook = rbac.ClusterRole()
clusterRoleBinding_webhook = rbac.ClusterRoleBinding()
clusterRole_certmanager = rbac.ClusterRole()
clusterRoleBinding_certmanager = rbac.ClusterRoleBinding()
with diagrams.Cluster("cert-manager"):
sa_cainjector = rbac.ServiceAccount("cainjector")
role_cainjector = rbac.Role()
roleBinding_cainjector = rbac.RoleBinding()
role_cainjector << roleBinding_cainjector >> sa_cainjector
clusterRole_cainjector << clusterRoleBinding_cainjector >> sa_cainjector
sa_webhook = rbac.ServiceAccount("webhook")
role_webhook = rbac.Role()
roleBinding_webhook = rbac.RoleBinding()
role_webhook << roleBinding_webhook >> sa_webhook
clusterRole_webhook << clusterRoleBinding_webhook >> sa_webhook
sa_certmanager = rbac.ServiceAccount("certmanager")
role_certmanager = rbac.Role()
roleBinding_certmanager = rbac.RoleBinding()
role_certmanager << roleBinding_certmanager >> sa_certmanager
clusterRole_certmanager << clusterRoleBinding_certmanager >> sa_certmanager
deploy_certmanager = compute.Deployment("certmanager")
svc_certmanager = network.Service()
svc_certmanager << deploy_certmanager << sa_certmanager
deploy_cainjector = compute.Deployment("cainjector")
svc_cainjector = network.Service()
svc_cainjector << deploy_cainjector << sa_cainjector
deploy_webhook = compute.Deployment("webhook")
svc_webhook = network.Service()
svc_webhook << deploy_webhook << sa_webhook
# TODO:
# mutatingWebhookConfiguration
# validatingWebhookConfiguration
| apache-2.0 | -5,073,813,048,137,684,000 | 44.442857 | 95 | 0.621188 | false |
yunojuno/django-inbound-email | inbound_email/backends/sendgrid.py | 1 | 5539 | import json
import logging
from email.utils import getaddresses
from django.core.mail import EmailMultiAlternatives
from django.http import HttpRequest
from django.utils.datastructures import MultiValueDictKeyError
from django.utils.encoding import smart_text
from ..backends import RequestParser
from ..errors import RequestParseError, AttachmentTooLargeError
logger = logging.getLogger(__name__)
def _decode_POST_value(request, field_name, default=None):
"""Helper to decode a request field into unicode based on charsets encoding.
Args:
request: the HttpRequest object.
field_name: the field expected in the request.POST
Kwargs:
default: if passed in then field is optional and default is used if not
found; if None, then assume field exists, which will raise an error
if it does not.
Returns: the contents of the string encoded using the related charset from
the requests.POST['charsets'] dictionary (or 'utf-8' if none specified).
"""
if default is None:
value = request.POST[field_name]
else:
value = request.POST.get(field_name, default)
# it's inefficient to load this each time it gets called, but we're
# not anticipating incoming email being a performance bottleneck right now!
charsets = json.loads(request.POST.get('charsets', "{}"))
charset = charsets.get(field_name, 'utf-8')
if charset.lower() != 'utf-8':
logger.debug("Incoming email field '%s' has %s encoding.", field_name, charset)
return smart_text(value, encoding=charset)
class SendGridRequestParser(RequestParser):
"""SendGrid request parser."""
def _get_addresses(self, address_data, retain_name=False):
"""
Takes RFC-compliant email addresses in both terse (email only)
and verbose (name + email) forms and returns a list of
email address strings
(TODO: breaking change that returns a tuple of (name, email) per string)
"""
if retain_name:
raise NotImplementedError(
"Not yet implemented, but will need client-code changes too"
)
# We trust than an email address contains an "@" after
# email.utils.getaddresses has done the hard work. If we wanted
# to we could use a regex to check for greater email validity
# NB: getaddresses expects a list, so ensure we feed it appropriately
if isinstance(address_data, str):
if "[" not in address_data:
# Definitely turn these into a list
# NB: this is pretty assumptive, but still prob OK
address_data = [address_data]
output = [x[1] for x in getaddresses(address_data) if "@" in x[1]]
return output
def parse(self, request):
"""Parse incoming request and return an email instance.
Args:
request: an HttpRequest object, containing the forwarded email, as
per the SendGrid specification for inbound emails.
Returns:
an EmailMultiAlternatives instance, containing the parsed contents
of the inbound email.
TODO: non-UTF8 charset handling.
TODO: handler headers.
"""
assert isinstance(request, HttpRequest), "Invalid request type: %s" % type(request)
try:
# from_email should never be a list (unless we change our API)
from_email = self._get_addresses([_decode_POST_value(request, 'from')])[0]
# ...but all these can and will be a list
to_email = self._get_addresses([_decode_POST_value(request, 'to')])
cc = self._get_addresses([_decode_POST_value(request, 'cc', default='')])
bcc = self._get_addresses([_decode_POST_value(request, 'bcc', default='')])
subject = _decode_POST_value(request, 'subject')
text = _decode_POST_value(request, 'text', default='')
html = _decode_POST_value(request, 'html', default='')
except IndexError as ex:
raise RequestParseError(
"Inbound request lacks a valid from address: %s." % request.get('from')
)
except MultiValueDictKeyError as ex:
raise RequestParseError("Inbound request is missing required value: %s." % ex)
if "@" not in from_email:
# Light sanity check for potential issues related to taking just the
# first element of the 'from' address list
raise RequestParseError("Could not get a valid from address out of: %s." % request)
email = EmailMultiAlternatives(
subject=subject,
body=text,
from_email=from_email,
to=to_email,
cc=cc,
bcc=bcc,
)
if html is not None and len(html) > 0:
email.attach_alternative(html, "text/html")
# TODO: this won't cope with big files - should really read in in chunks
for n, f in list(request.FILES.items()):
if f.size > self.max_file_size:
logger.debug(
"File attachment %s is too large to process (%sB)",
f.name,
f.size
)
raise AttachmentTooLargeError(
email=email,
filename=f.name,
size=f.size
)
else:
email.attach(f.name, f.read(), f.content_type)
return email
| mit | -6,551,924,801,335,674,000 | 36.938356 | 95 | 0.610399 | false |
openplans/shareabouts-vahi-demo | src/sa_web/views.py | 1 | 9089 | import requests
import yaml
import json
import logging
import os
import time
import hashlib
import httpagentparser
import urllib2
from .config import get_shareabouts_config
from django.shortcuts import render
from django.conf import settings
from django.core.cache import cache
from django.core.mail import send_mail
from django.template.loader import render_to_string
from django.utils.timezone import now
from django.views.decorators.csrf import ensure_csrf_cookie
from proxy.views import proxy_view
log = logging.getLogger(__name__)
def make_api_root(dataset_root):
components = dataset_root.split('/')
if dataset_root.endswith('/'):
return '/'.join(components[:-4]) + '/'
else:
return '/'.join(components[:-3]) + '/'
def make_auth_root(dataset_root):
return make_api_root(dataset_root) + 'users/'
def make_resource_uri(resource, root):
resource = resource.strip('/')
root = root.rstrip('/')
uri = '%s/%s' % (root, resource)
return uri
class ShareaboutsApi (object):
def __init__(self, dataset_root):
self.dataset_root = dataset_root
self.auth_root = make_auth_root(dataset_root)
self.root = make_api_root(dataset_root)
def get(self, resource, default=None, **kwargs):
uri = make_resource_uri(resource, root=self.dataset_root)
res = requests.get(uri, params=kwargs,
headers={'Accept': 'application/json'})
return (res.text if res.status_code == 200 else default)
def current_user(self, default=u'null', **kwargs):
uri = make_resource_uri('current', root=self.auth_root)
res = requests.get(uri, headers={'Accept': 'application/json'}, **kwargs)
return (res.text if res.status_code == 200 else default)
@ensure_csrf_cookie
def index(request, default_place_type):
# Load app config settings
config = get_shareabouts_config(settings.SHAREABOUTS.get('CONFIG'))
config.update(settings.SHAREABOUTS.get('CONTEXT', {}))
# Get initial data for bootstrapping into the page.
api = ShareaboutsApi(dataset_root=settings.SHAREABOUTS.get('DATASET_ROOT'))
# Handle place types in case insensitive way (park works just like Park)
lower_place_types = [k.lower() for k in config['place_types'].keys()]
if default_place_type.lower() in lower_place_types:
validated_default_place_type = default_place_type
else:
validated_default_place_type = ''
# Get the content of the static pages linked in the menu.
pages_config = config.get('pages', [])
pages_config_json = json.dumps(pages_config)
# The user token will be a pair, with the first element being the type
# of identification, and the second being an identifier. It could be
# 'username:mjumbewu' or 'ip:123.231.132.213', etc. If the user is
# unauthenticated, the token will be session-based.
if 'user_token' not in request.session:
t = int(time.time() * 1000)
ip = request.META['REMOTE_ADDR']
unique_string = str(t) + str(ip)
session_token = 'session:' + hashlib.md5(unique_string).hexdigest()
request.session['user_token'] = session_token
request.session.set_expiry(0)
user_token_json = u'"{0}"'.format(request.session['user_token'])
# Get the browser that the user is using.
user_agent_string = request.META['HTTP_USER_AGENT']
user_agent = httpagentparser.detect(user_agent_string)
user_agent_json = json.dumps(user_agent)
context = {'config': config,
'user_token_json': user_token_json,
'pages_config': pages_config,
'pages_config_json': pages_config_json,
'user_agent_json': user_agent_json,
'default_place_type': validated_default_place_type,
'API_ROOT': api.root,
'DATASET_ROOT': api.dataset_root,
}
return render(request, 'index.html', context)
def place_was_created(request, path, response):
path = path.strip('/')
return (
path.startswith('places') and
not path.startswith('places/') and
response.status_code == 201)
def send_place_created_notifications(request, response):
config = get_shareabouts_config(settings.SHAREABOUTS.get('CONFIG'))
config.update(settings.SHAREABOUTS.get('CONTEXT', {}))
# Before we start, check whether we're configured to send at all on new
# place.
should_send = config.get('notifications', {}).get('on_new_place', False)
if not should_send:
return
# First, check that we have all the settings and data we need. Do not bail
# after each error, so that we can report on all the validation problems
# at once.
errors = []
try:
place = json.loads(response.content)
except ValueError:
errors.append('Received invalid place JSON: %r' % (response.content,))
try:
from_email = settings.EMAIL_ADDRESS
except AttributeError:
errors.append('EMAIL_ADDRESS setting must be configured in order to send notification emails.')
try:
email_field = config.get('notifications', {}).get('submitter_email_field', 'submitter_email')
recipient_email = place['properties'][email_field]
except KeyError:
errors.append('No "%s" field found on the place. Be sure to configure the "notifications.submitter_email_field" property if necessary.' % (email_field,))
# Bail if any errors were found. Send all errors to the logs and otherwise
# fail silently.
if errors:
for error_msg in errors:
log.error(error_msg)
return
# If the user didn't provide an email address, then no need to go further.
if not recipient_email:
return
# If we didn't find any errors, then render the email and send.
context_data = {'place': place, 'config': config, 'request': request}
subject = render_to_string('new_place_email_subject.txt', context_data)
body = render_to_string('new_place_email_body.txt', context_data)
# connection = smtp.EmailBackend(
# host=...,
# port=...,
# username=...,
# use_tls=...)
send_mail(
subject,
body,
from_email,
[recipient_email])
# connection=connection,
# html_message=html_body) # For multipart, HTML-enabled emails
return
def api(request, path):
"""
A small proxy for a Shareabouts API server, exposing only
one configured dataset.
"""
root = settings.SHAREABOUTS.get('DATASET_ROOT')
api_key = settings.SHAREABOUTS.get('DATASET_KEY')
api_session_cookie = request.COOKIES.get('sa-api-sessionid')
# It doesn't matter what the CSRF token value is, as long as the cookie and
# header value match.
api_csrf_token = '1234csrf567token'
url = make_resource_uri(path, root)
headers = {'X-SHAREABOUTS-KEY': api_key,
'X-CSRFTOKEN': api_csrf_token}
cookies = {'sessionid': api_session_cookie,
'csrftoken': api_csrf_token} \
if api_session_cookie else {'csrftoken': api_csrf_token}
# Clear cookies from the current domain, so that they don't interfere with
# our settings here.
request.META.pop('HTTP_COOKIE', None)
response = proxy_view(request, url, requests_args={
'headers': headers,
'cookies': cookies
})
if place_was_created(request, path, response):
send_place_created_notifications(request, response)
return response
def users(request, path):
"""
A small proxy for a Shareabouts API server, exposing only
user authentication.
"""
root = make_auth_root(settings.SHAREABOUTS.get('DATASET_ROOT'))
api_key = settings.SHAREABOUTS.get('DATASET_KEY')
api_session_cookie = request.COOKIES.get('sa-api-session')
url = make_resource_uri(path, root)
headers = {'X-Shareabouts-Key': api_key} if api_key else {}
cookies = {'sessionid': api_session_cookie} if api_session_cookie else {}
return proxy_view(request, url, requests_args={
'headers': headers,
'allow_redirects': False,
'cookies': cookies
})
def csv_download(request, path):
"""
A small proxy for a Shareabouts API server, exposing only
one configured dataset.
"""
root = settings.SHAREABOUTS.get('DATASET_ROOT')
api_key = settings.SHAREABOUTS.get('DATASET_KEY')
api_session_cookie = request.COOKIES.get('sa-api-session')
url = make_resource_uri(path, root)
headers = {
'X-Shareabouts-Key': api_key,
'ACCEPT': 'text/csv'
}
cookies = {'sessionid': api_session_cookie} if api_session_cookie else {}
return proxy_view(request, url, requests_args={
'headers': headers,
'cookies': cookies
})
# Send the csv as a timestamped download
filename = '.'.join([os.path.split(path)[1],
now().strftime('%Y%m%d%H%M%S'),
'csv'])
response['Content-disposition'] = 'attachment; filename=' + filename
return response
| gpl-3.0 | 6,137,123,870,696,673,000 | 33.42803 | 161 | 0.645836 | false |
NOAA-ORR-ERD/gridded | gridded/tests/test_dataset.py | 1 | 1234 | #!/usr/bin/env python
from __future__ import absolute_import, division, print_function, unicode_literals
import pytest
import os
import netCDF4 as nc
from gridded import Dataset
from .utilities import get_test_file_dir
test_dir = get_test_file_dir()
# Need to hook this up to existing test data infrastructure
# and add more infrustructure...
sample_sgrid_file = os.path.join(test_dir, 'staggered_sine_channel.nc')
def test_load_sgrid():
""" tests you can intitilize an conforming sgrid file"""
sinusoid = Dataset(sample_sgrid_file)
assert True # just to make it a test
def test_info():
"""
Make sure the info property is working
This doesn't test much -- jsut tht it won't crash
"""
gds = Dataset(sample_sgrid_file)
info = gds.info
print(info)
# just a couple checks to make sure it's not totally bogus
assert "gridded.Dataset:" in info
assert "variables:" in info
assert "attributes:" in info
# def test_get_variables_by_attribute():
# gds = Dataset(sample_sgrid_file)
# print(gds.varibles)
# assert False
def test_save_invalid_format():
ds = Dataset()
with pytest.raises(ValueError):
ds.save("a_filename.txt", format="text")
| unlicense | -9,179,753,565,101,680,000 | 21.035714 | 82 | 0.683955 | false |
Belval/pdf2image | tests.py | 1 | 66419 | import os
import sys
import errno
import pathlib
import tempfile
import unittest
import time
import shutil
import subprocess
from inspect import signature
from subprocess import Popen, PIPE
from tempfile import TemporaryDirectory
from multiprocessing.dummy import Pool
from memory_profiler import profile as profile_memory
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), "..")))
from pdf2image import (
convert_from_bytes,
convert_from_path,
pdfinfo_from_bytes,
pdfinfo_from_path,
)
from pdf2image.exceptions import (
PDFInfoNotInstalledError,
PDFPageCountError,
PDFSyntaxError,
PDFPopplerTimeoutError,
)
from functools import wraps
PROFILE_MEMORY = os.environ.get('PROFILE_MEMORY', False)
try:
subprocess.call(
["pdfinfo", "-h"], stdout=open(os.devnull, "w"), stderr=open(os.devnull, "w")
)
POPPLER_INSTALLED = True
except OSError as e:
if e.errno == errno.ENOENT:
POPPLER_INSTALLED = False
def profile(f):
if PROFILE_MEMORY:
@wraps(f)
@profile_memory
def wrapped(*args, **kwargs):
r = f(*args, **kwargs)
return r
return wrapped
else:
@wraps(f)
def wrapped(*args, **kwargs):
r = f(*args, **kwargs)
return r
return wrapped
def get_poppler_path():
return pathlib.Path(
Popen(["which", "pdftoppm"], stdout=PIPE).communicate()[0].strip().decode()
).parent
class PDFConversionMethods(unittest.TestCase):
@profile
@unittest.skipIf(not POPPLER_INSTALLED, "Poppler is not installed!")
def test_conversion_from_bytes(self):
start_time = time.time()
with open("./tests/test.pdf", "rb") as pdf_file:
images_from_bytes = convert_from_bytes(pdf_file.read())
self.assertTrue(len(images_from_bytes) == 1)
print("test_conversion_from_bytes: {} sec".format(time.time() - start_time))
@profile
@unittest.skipIf(not POPPLER_INSTALLED, "Poppler is not installed!")
def test_conversion_from_path(self):
start_time = time.time()
images_from_path = convert_from_path("./tests/test.pdf")
self.assertTrue(len(images_from_path) == 1)
print("test_conversion_from_path: {} sec".format(time.time() - start_time))
@profile
@unittest.skipIf(not POPPLER_INSTALLED, "Poppler is not installed!")
def test_conversion_from_bytes_using_dir(self):
start_time = time.time()
with TemporaryDirectory() as path:
with open("./tests/test.pdf", "rb") as pdf_file:
images_from_bytes = convert_from_bytes(
pdf_file.read(), output_folder=path
)
self.assertTrue(len(images_from_bytes) == 1)
[im.close() for im in images_from_bytes]
print(
"test_conversion_from_bytes_using_dir: {} sec".format(
time.time() - start_time
)
)
@profile
@unittest.skipIf(not POPPLER_INSTALLED, "Poppler is not installed!")
def test_conversion_from_path_using_dir(self):
start_time = time.time()
with TemporaryDirectory() as path:
images_from_path = convert_from_path("./tests/test.pdf", output_folder=path)
self.assertTrue(len(images_from_path) == 1)
[im.close() for im in images_from_path]
print(
"test_conversion_from_path_using_dir: {} sec".format(
time.time() - start_time
)
)
@profile
@unittest.skipIf(not POPPLER_INSTALLED, "Poppler is not installed!")
def test_conversion_from_bytes_14(self):
start_time = time.time()
with open("./tests/test_14.pdf", "rb") as pdf_file:
images_from_bytes = convert_from_bytes(pdf_file.read())
self.assertTrue(len(images_from_bytes) == 14)
print(
"test_conversion_from_bytes_14: {} sec".format(
(time.time() - start_time) / 14.0
)
)
@profile
@unittest.skipIf(not POPPLER_INSTALLED, "Poppler is not installed!")
def test_conversion_from_path_14(self):
start_time = time.time()
images_from_path = convert_from_path("./tests/test_14.pdf")
self.assertTrue(len(images_from_path) == 14)
print(
"test_conversion_from_path_14: {} sec".format(
(time.time() - start_time) / 14.0
)
)
@profile
@unittest.skipIf(not POPPLER_INSTALLED, "Poppler is not installed!")
def test_conversion_from_bytes_using_dir_14(self):
start_time = time.time()
with TemporaryDirectory() as path:
with open("./tests/test_14.pdf", "rb") as pdf_file:
images_from_bytes = convert_from_bytes(
pdf_file.read(), output_folder=path
)
self.assertTrue(len(images_from_bytes) == 14)
[im.close() for im in images_from_bytes]
print(
"test_conversion_from_bytes_using_dir_14: {} sec".format(
(time.time() - start_time) / 14.0
)
)
@profile
@unittest.skipIf(not POPPLER_INSTALLED, "Poppler is not installed!")
def test_conversion_from_path_using_dir_14(self):
start_time = time.time()
with TemporaryDirectory() as path:
images_from_path = convert_from_path(
"./tests/test_14.pdf", output_folder=path
)
self.assertTrue(len(images_from_path) == 14)
[im.close() for im in images_from_path]
print(
"test_conversion_from_path_using_dir_14: {} sec".format(
(time.time() - start_time) / 14.0
)
)
@profile
@unittest.skipIf(not POPPLER_INSTALLED, "Poppler is not installed!")
@unittest.skipIf(
"TRAVIS" in os.environ and os.environ["TRAVIS"] == "true",
"Skipping this test on Travis CI.",
)
def test_conversion_from_bytes_241(self): # pragma: no cover
start_time = time.time()
with open("./tests/test_241.pdf", "rb") as pdf_file:
images_from_bytes = convert_from_bytes(pdf_file.read())
self.assertTrue(len(images_from_bytes) == 241)
print(
"test_conversion_from_bytes_241: {} sec".format(
(time.time() - start_time) / 241.0
)
)
@profile
@unittest.skipIf(not POPPLER_INSTALLED, "Poppler is not installed!")
@unittest.skipIf(
"TRAVIS" in os.environ and os.environ["TRAVIS"] == "true",
"Skipping this test on Travis CI.",
)
def test_conversion_from_path_241(self): # pragma: no cover
start_time = time.time()
images_from_path = convert_from_path("./tests/test_241.pdf")
self.assertTrue(len(images_from_path) == 241)
print(
"test_conversion_from_path_241: {} sec".format(
(time.time() - start_time) / 241.0
)
)
@profile
@unittest.skipIf(not POPPLER_INSTALLED, "Poppler is not installed!")
@unittest.skipIf(
"TRAVIS" in os.environ and os.environ["TRAVIS"] == "true",
"Skipping this test on Travis CI.",
)
def test_conversion_from_bytes_using_dir_241(self): # pragma: no cover
start_time = time.time()
with TemporaryDirectory() as path:
with open("./tests/test_241.pdf", "rb") as pdf_file:
images_from_bytes = convert_from_bytes(
pdf_file.read(), output_folder=path
)
self.assertTrue(len(images_from_bytes) == 241)
[im.close() for im in images_from_bytes]
print(
"test_conversion_from_bytes_using_dir_241: {} sec".format(
(time.time() - start_time) / 241.0
)
)
@profile
@unittest.skipIf(not POPPLER_INSTALLED, "Poppler is not installed!")
@unittest.skipIf(
"TRAVIS" in os.environ and os.environ["TRAVIS"] == "true",
"Skipping this test on Travis CI.",
)
def test_conversion_from_path_using_dir_241(self): # pragma: no cover
start_time = time.time()
with TemporaryDirectory() as path:
images_from_path = convert_from_path(
"./tests/test_241.pdf", output_folder=path
)
self.assertTrue(len(images_from_path) == 241)
[im.close() for im in images_from_path]
print(
"test_conversion_from_path_using_dir_241: {} sec".format(
(time.time() - start_time) / 241.0
)
)
@profile
@unittest.skipIf(not POPPLER_INSTALLED, "Poppler is not installed!")
def test_empty_if_not_pdf(self):
start_time = time.time()
with self.assertRaises(Exception):
convert_from_path("./tests/test.jpg")
print("test_empty_if_not_pdf: {} sec".format(time.time() - start_time))
@profile
@unittest.skipIf(not POPPLER_INSTALLED, "Poppler is not installed!")
def test_empty_if_file_not_found(self):
start_time = time.time()
with self.assertRaises(Exception):
convert_from_path("./tests/totally_a_real_file_in_folder.xyz")
print("test_empty_if_file_not_found: {} sec".format(time.time() - start_time))
@profile
@unittest.skipIf(not POPPLER_INSTALLED, "Poppler is not installed!")
def test_empty_if_corrupted_pdf(self):
start_time = time.time()
with self.assertRaises(Exception):
convert_from_path("./tests/test_corrupted.pdf")
print("test_empty_if_corrupted_pdf: {} sec".format(time.time() - start_time))
## Test first page
@profile
@unittest.skipIf(not POPPLER_INSTALLED, "Poppler is not installed!")
def test_conversion_from_bytes_14_first_page_12(self):
start_time = time.time()
with open("./tests/test_14.pdf", "rb") as pdf_file:
images_from_bytes = convert_from_bytes(pdf_file.read(), first_page=12)
self.assertTrue(len(images_from_bytes) == 3)
print(
"test_conversion_from_bytes_14_last_page_12: {} sec".format(
(time.time() - start_time) / 14.0
)
)
@profile
@unittest.skipIf(not POPPLER_INSTALLED, "Poppler is not installed!")
def test_conversion_from_path_14_first_page_12(self):
start_time = time.time()
images_from_path = convert_from_path("./tests/test_14.pdf", first_page=12)
self.assertTrue(len(images_from_path) == 3)
print(
"test_conversion_from_path_14_first_page_12: {} sec".format(
(time.time() - start_time) / 14.0
)
)
@profile
@unittest.skipIf(not POPPLER_INSTALLED, "Poppler is not installed!")
def test_conversion_from_bytes_using_dir_14_first_page_12(self):
start_time = time.time()
with TemporaryDirectory() as path:
with open("./tests/test_14.pdf", "rb") as pdf_file:
images_from_bytes = convert_from_bytes(
pdf_file.read(), output_folder=path, first_page=12
)
self.assertTrue(len(images_from_bytes) == 3)
[im.close() for im in images_from_bytes]
print(
"test_conversion_from_bytes_using_dir_14_first_page_12: {} sec".format(
(time.time() - start_time) / 14.0
)
)
@profile
@unittest.skipIf(not POPPLER_INSTALLED, "Poppler is not installed!")
def test_conversion_from_path_using_dir_14_first_page_12(self):
start_time = time.time()
with TemporaryDirectory() as path:
images_from_path = convert_from_path(
"./tests/test_14.pdf", output_folder=path, first_page=12
)
self.assertTrue(len(images_from_path) == 3)
[im.close() for im in images_from_path]
print(
"test_conversion_from_path_using_dir_14_first_page_12: {} sec".format(
(time.time() - start_time) / 14.0
)
)
## Test last page
@profile
@unittest.skipIf(not POPPLER_INSTALLED, "Poppler is not installed!")
def test_conversion_from_bytes_14_last_page_12(self):
start_time = time.time()
with open("./tests/test_14.pdf", "rb") as pdf_file:
images_from_bytes = convert_from_bytes(pdf_file.read(), last_page=12)
self.assertTrue(len(images_from_bytes) == 12)
print(
"test_conversion_from_bytes_14_last_page_12: {} sec".format(
(time.time() - start_time) / 14.0
)
)
@profile
@unittest.skipIf(not POPPLER_INSTALLED, "Poppler is not installed!")
def test_conversion_from_path_14_last_page_12(self):
start_time = time.time()
images_from_path = convert_from_path("./tests/test_14.pdf", last_page=12)
self.assertTrue(len(images_from_path) == 12)
print(
"test_conversion_from_path_14_last_page_12: {} sec".format(
(time.time() - start_time) / 14.0
)
)
@profile
@unittest.skipIf(not POPPLER_INSTALLED, "Poppler is not installed!")
def test_conversion_from_bytes_using_dir_14_last_page_12(self):
start_time = time.time()
with TemporaryDirectory() as path:
with open("./tests/test_14.pdf", "rb") as pdf_file:
images_from_bytes = convert_from_bytes(
pdf_file.read(), output_folder=path, last_page=12
)
self.assertTrue(len(images_from_bytes) == 12)
[im.close() for im in images_from_bytes]
print(
"test_conversion_from_bytes_using_dir_14_last_page_12: {} sec".format(
(time.time() - start_time) / 14.0
)
)
@profile
@unittest.skipIf(not POPPLER_INSTALLED, "Poppler is not installed!")
def test_conversion_from_path_using_dir_14_last_page_12(self):
start_time = time.time()
with TemporaryDirectory() as path:
images_from_path = convert_from_path(
"./tests/test_14.pdf", output_folder=path, last_page=12
)
self.assertTrue(len(images_from_path) == 12)
[im.close() for im in images_from_path]
print(
"test_conversion_from_path_using_dir_14_last_page_12: {} sec".format(
(time.time() - start_time) / 14.0
)
)
## Test first and last page
@profile
@unittest.skipIf(not POPPLER_INSTALLED, "Poppler is not installed!")
def test_conversion_from_bytes_14_first_page_2_last_page_12(self):
start_time = time.time()
with open("./tests/test_14.pdf", "rb") as pdf_file:
images_from_bytes = convert_from_bytes(
pdf_file.read(), first_page=2, last_page=12
)
self.assertTrue(len(images_from_bytes) == 11)
print(
"test_conversion_from_bytes_14_first_page_2_last_page_12: {} sec".format(
(time.time() - start_time) / 14.0
)
)
@profile
@unittest.skipIf(not POPPLER_INSTALLED, "Poppler is not installed!")
def test_conversion_from_path_14_first_page_2_last_page_12(self):
start_time = time.time()
images_from_path = convert_from_path(
"./tests/test_14.pdf", first_page=2, last_page=12
)
self.assertTrue(len(images_from_path) == 11)
print(
"test_conversion_from_path_14_first_page_2_last_page_12: {} sec".format(
(time.time() - start_time) / 14.0
)
)
@profile
@unittest.skipIf(not POPPLER_INSTALLED, "Poppler is not installed!")
def test_conversion_from_bytes_using_dir_14_first_page_2_last_page_12(self):
start_time = time.time()
with TemporaryDirectory() as path:
with open("./tests/test_14.pdf", "rb") as pdf_file:
images_from_bytes = convert_from_bytes(
pdf_file.read(), output_folder=path, first_page=2, last_page=12
)
self.assertTrue(len(images_from_bytes) == 11)
[im.close() for im in images_from_bytes]
print(
"test_conversion_from_bytes_using_dir_14_first_page_2_last_page_12: {} sec".format(
(time.time() - start_time) / 14.0
)
)
@profile
@unittest.skipIf(not POPPLER_INSTALLED, "Poppler is not installed!")
def test_conversion_from_path_using_dir_14_first_page_2_last_page_12(self):
start_time = time.time()
with TemporaryDirectory() as path:
images_from_path = convert_from_path(
"./tests/test_14.pdf", output_folder=path, first_page=2, last_page=12
)
self.assertTrue(len(images_from_path) == 11)
[im.close() for im in images_from_path]
print(
"test_conversion_from_path_using_dir_14_first_page_2_last_page_12: {} sec".format(
(time.time() - start_time) / 14.0
)
)
## Test output as jpeg
@profile
@unittest.skipIf(not POPPLER_INSTALLED, "Poppler is not installed!")
def test_conversion_to_jpeg_from_bytes(self):
start_time = time.time()
with open("./tests/test.pdf", "rb") as pdf_file:
images_from_bytes = convert_from_bytes(pdf_file.read(), fmt="jpg")
self.assertTrue(images_from_bytes[0].format == "JPEG")
print(
"test_conversion_to_jpeg_from_bytes_14: {} sec".format(
(time.time() - start_time) / 14.0
)
)
@profile
@unittest.skipIf(not POPPLER_INSTALLED, "Poppler is not installed!")
def test_conversion_to_jpeg_from_path_using_dir(self):
start_time = time.time()
with TemporaryDirectory() as path:
images_from_path = convert_from_path(
"./tests/test.pdf", output_folder=path, fmt="jpeg"
)
self.assertTrue(images_from_path[0].format == "JPEG")
[im.close() for im in images_from_path]
print(
"test_conversion_to_jpeg_from_path_using_dir_14: {} sec".format(
(time.time() - start_time) / 14.0
)
)
## Test output as png
@profile
@unittest.skipIf(not POPPLER_INSTALLED, "Poppler is not installed!")
def test_conversion_to_png_from_bytes(self):
start_time = time.time()
with open("./tests/test.pdf", "rb") as pdf_file:
images_from_bytes = convert_from_bytes(pdf_file.read(), fmt="png")
self.assertTrue(images_from_bytes[0].format == "PNG")
print(
"test_conversion_to_png_from_bytes_14: {} sec".format(
(time.time() - start_time) / 14.0
)
)
@profile
@unittest.skipIf(not POPPLER_INSTALLED, "Poppler is not installed!")
def test_conversion_to_png_from_path_using_dir(self):
start_time = time.time()
with TemporaryDirectory() as path:
images_from_path = convert_from_path(
"./tests/test.pdf", output_folder=path, fmt="png"
)
self.assertTrue(images_from_path[0].format == "PNG")
[im.close() for im in images_from_path]
print(
"test_conversion_to_png_from_path_using_dir_14: {} sec".format(
(time.time() - start_time) / 14.0
)
)
## Test output with not-empty output_folder
@profile
@unittest.skipIf(not POPPLER_INSTALLED, "Poppler is not installed!")
def test_non_empty_output_folder(self):
start_time = time.time()
images_from_path = convert_from_path(
"./tests/test.pdf", output_folder="./tests/"
)
self.assertTrue(len(images_from_path) == 1)
[im.close() for im in images_from_path]
[os.remove(im.filename) for im in images_from_path]
print(
"test_non_empty_output_folder: {} sec".format(
(time.time() - start_time) / 14.0
)
)
## Test format that starts with a dot
@profile
@unittest.skipIf(not POPPLER_INSTALLED, "Poppler is not installed!")
def test_format_that_starts_with_a_dot(self):
start_time = time.time()
with TemporaryDirectory() as path:
with open("./tests/test.pdf", "rb") as pdf_file:
images_from_bytes = convert_from_bytes(
pdf_file.read(), output_folder=path, fmt=".jpg"
)
self.assertTrue(len(images_from_bytes) == 1)
[im.close() for im in images_from_bytes]
print(
"test_format_that_starts_with_a_dot: {} sec".format(
time.time() - start_time
)
)
## Test locked PDF
@profile
@unittest.skipIf(not POPPLER_INSTALLED, "Poppler is not installed!")
def test_locked_pdf_with_userpw_only(self):
start_time = time.time()
with TemporaryDirectory() as path:
with open("./tests/test_locked_user_only.pdf", "rb") as pdf_file:
images_from_bytes = convert_from_bytes(
pdf_file.read(), output_folder=path, fmt=".jpg", userpw="pdf2image"
)
self.assertTrue(len(images_from_bytes) == 1)
[im.close() for im in images_from_bytes]
print(
"test_locked_pdf_with_userpw_only: {} sec".format(time.time() - start_time)
)
@profile
@unittest.skipIf(not POPPLER_INSTALLED, "Poppler is not installed!")
def test_not_locked_pdf(self):
start_time = time.time()
with TemporaryDirectory() as path:
with open("./tests/test.pdf", "rb") as pdf_file:
images_from_bytes = convert_from_bytes(
pdf_file.read(), output_folder=path, fmt=".jpg", userpw="pdf2image"
)
self.assertTrue(len(images_from_bytes) == 1)
[im.close() for im in images_from_bytes]
print(
"test_locked_pdf_with_userpw_only: {} sec".format(time.time() - start_time)
)
@profile
@unittest.skipIf(not POPPLER_INSTALLED, "Poppler is not installed!")
def test_locked_pdf_with_ownerpw_only(self):
start_time = time.time()
with TemporaryDirectory() as path:
with open("./tests/test_locked_owner_only.pdf", "rb") as pdf_file:
# No need to pass a ownerpw because the absence of userpw means we can read it anyway
images_from_bytes = convert_from_bytes(
pdf_file.read(), output_folder=path, fmt=".jpg"
)
self.assertTrue(len(images_from_bytes) == 1)
[im.close() for im in images_from_bytes]
print(
"test_locked_pdf_with_ownerpw_only: {} sec".format(time.time() - start_time)
)
@profile
@unittest.skipIf(not POPPLER_INSTALLED, "Poppler is not installed!")
def test_locked_pdf_with_ownerpw_and_userpw(self):
start_time = time.time()
with TemporaryDirectory() as path:
with open("./tests/test_locked_both.pdf", "rb") as pdf_file:
images_from_bytes = convert_from_bytes(
pdf_file.read(), output_folder=path, fmt=".jpg", userpw="pdf2image"
)
self.assertTrue(len(images_from_bytes) == 1)
[im.close() for im in images_from_bytes]
print(
"test_locked_pdf_with_ownerpw_and_userpw: {} sec".format(
time.time() - start_time
)
)
## Tests cropbox
@profile
@unittest.skipIf(not POPPLER_INSTALLED, "Poppler is not installed!")
def test_conversion_from_bytes_using_cropbox(self):
start_time = time.time()
with open("./tests/test.pdf", "rb") as pdf_file:
images_from_bytes = convert_from_bytes(pdf_file.read(), use_cropbox=True)
self.assertTrue(len(images_from_bytes) == 1)
print(
"test_conversion_from_bytes_using_cropbox: {} sec".format(
time.time() - start_time
)
)
@profile
@unittest.skipIf(not POPPLER_INSTALLED, "Poppler is not installed!")
def test_conversion_from_path_using_cropbox(self):
start_time = time.time()
images_from_path = convert_from_path("./tests/test.pdf", use_cropbox=True)
self.assertTrue(len(images_from_path) == 1)
print(
"test_conversion_from_path_using_cropbox: {} sec".format(
time.time() - start_time
)
)
@profile
@unittest.skipIf(not POPPLER_INSTALLED, "Poppler is not installed!")
def test_conversion_from_bytes_using_dir_and_cropbox(self):
start_time = time.time()
with TemporaryDirectory() as path:
with open("./tests/test.pdf", "rb") as pdf_file:
images_from_bytes = convert_from_bytes(
pdf_file.read(), output_folder=path, use_cropbox=True
)
self.assertTrue(len(images_from_bytes) == 1)
[im.close() for im in images_from_bytes]
print(
"test_conversion_from_bytes_using_dir_and_cropbox: {} sec".format(
time.time() - start_time
)
)
@profile
@unittest.skipIf(not POPPLER_INSTALLED, "Poppler is not installed!")
def test_conversion_from_path_using_dir_and_cropbox(self):
start_time = time.time()
with TemporaryDirectory() as path:
images_from_path = convert_from_path(
"./tests/test.pdf", output_folder=path, use_cropbox=True
)
self.assertTrue(len(images_from_path) == 1)
[im.close() for im in images_from_path]
print(
"test_conversion_from_path_using_dir_and_cropbox: {} sec".format(
time.time() - start_time
)
)
## Tests multithreading
@profile
@unittest.skipIf(not POPPLER_INSTALLED, "Poppler is not installed!")
def test_conversion_from_bytes_14_with_4_threads(self):
start_time = time.time()
with open("./tests/test_14.pdf", "rb") as pdf_file:
images_from_bytes = convert_from_bytes(pdf_file.read(), thread_count=4)
self.assertTrue(len(images_from_bytes) == 14)
print(
"test_conversion_from_bytes_14_with_4_thread: {} sec".format(
(time.time() - start_time) / 14.0
)
)
@profile
@unittest.skipIf(not POPPLER_INSTALLED, "Poppler is not installed!")
def test_conversion_from_path_14_with_4_threads(self):
start_time = time.time()
images_from_path = convert_from_path("./tests/test_14.pdf", thread_count=4)
self.assertTrue(len(images_from_path) == 14)
print(
"test_conversion_from_path_14_with_4_thread: {} sec".format(
(time.time() - start_time) / 14.0
)
)
@profile
@unittest.skipIf(not POPPLER_INSTALLED, "Poppler is not installed!")
def test_conversion_from_bytes_14_with_15_threads(self):
start_time = time.time()
with open("./tests/test_14.pdf", "rb") as pdf_file:
images_from_bytes = convert_from_bytes(pdf_file.read(), thread_count=15)
self.assertTrue(len(images_from_bytes) == 14)
print(
"test_conversion_from_bytes_14_with_15_thread: {} sec".format(
(time.time() - start_time) / 14.0
)
)
@profile
@unittest.skipIf(not POPPLER_INSTALLED, "Poppler is not installed!")
def test_conversion_from_path_14_with_0_threads(self):
start_time = time.time()
images_from_path = convert_from_path("./tests/test_14.pdf", thread_count=0)
self.assertTrue(len(images_from_path) == 14)
print(
"test_conversion_from_path_14_with_4_thread: {} sec".format(
(time.time() - start_time) / 14.0
)
)
@profile
@unittest.skipIf(not POPPLER_INSTALLED, "Poppler is not installed!")
def test_conversion_from_bytes_using_dir_14_with_4_threads(self):
start_time = time.time()
with TemporaryDirectory() as path:
with open("./tests/test_14.pdf", "rb") as pdf_file:
images_from_bytes = convert_from_bytes(
pdf_file.read(), output_folder=path, thread_count=4
)
self.assertTrue(len(images_from_bytes) == 14)
[im.close() for im in images_from_bytes]
print(
"test_conversion_from_bytes_using_dir_14_with_4_thread: {} sec".format(
(time.time() - start_time) / 14.0
)
)
@profile
@unittest.skipIf(not POPPLER_INSTALLED, "Poppler is not installed!")
def test_conversion_from_path_using_dir_14_with_4_threads(self):
start_time = time.time()
with TemporaryDirectory() as path:
images_from_path = convert_from_path(
"./tests/test_14.pdf", output_folder=path, thread_count=4
)
self.assertTrue(len(images_from_path) == 14)
[im.close() for im in images_from_path]
print(
"test_conversion_from_path_using_dir_14_with_4_thread: {} sec".format(
(time.time() - start_time) / 14.0
)
)
@profile
@unittest.skipIf(
"TRAVIS" in os.environ and os.environ["TRAVIS"] == "true",
"Skipping this test on Travis CI.",
)
@unittest.skipIf(not POPPLER_INSTALLED, "Poppler is not installed!")
def test_conversion_from_bytes_241_with_4_threads(self): # pragma: no cover
start_time = time.time()
with open("./tests/test_241.pdf", "rb") as pdf_file:
images_from_bytes = convert_from_bytes(pdf_file.read(), thread_count=4)
self.assertTrue(len(images_from_bytes) == 241)
print(
"test_conversion_from_bytes_241_with_4_thread: {} sec".format(
(time.time() - start_time) / 241.0
)
)
@profile
@unittest.skipIf(
"TRAVIS" in os.environ and os.environ["TRAVIS"] == "true",
"Skipping this test on Travis CI.",
)
@unittest.skipIf(not POPPLER_INSTALLED, "Poppler is not installed!")
def test_conversion_from_path_241_with_4_threads(self): # pragma: no cover
start_time = time.time()
images_from_path = convert_from_path("./tests/test_241.pdf", thread_count=4)
self.assertTrue(len(images_from_path) == 241)
print(
"test_conversion_from_path_241_with_4_thread: {} sec".format(
(time.time() - start_time) / 241.0
)
)
@profile
@unittest.skipIf(
"TRAVIS" in os.environ and os.environ["TRAVIS"] == "true",
"Skipping this test on Travis CI.",
)
@unittest.skipIf(not POPPLER_INSTALLED, "Poppler is not installed!")
def test_conversion_from_bytes_using_dir_241_with_4_threads(
self,
): # pragma: no cover
start_time = time.time()
with TemporaryDirectory() as path:
with open("./tests/test_241.pdf", "rb") as pdf_file:
images_from_bytes = convert_from_bytes(
pdf_file.read(), output_folder=path, thread_count=4
)
self.assertTrue(len(images_from_bytes) == 241)
[im.close() for im in images_from_bytes]
print(
"test_conversion_from_bytes_using_dir_241_with_4_thread: {} sec".format(
(time.time() - start_time) / 241.0
)
)
@profile
@unittest.skipIf(
"TRAVIS" in os.environ and os.environ["TRAVIS"] == "true",
"Skipping this test on Travis CI.",
)
@unittest.skipIf(not POPPLER_INSTALLED, "Poppler is not installed!")
def test_conversion_from_path_using_dir_241_with_4_threads(
self,
): # pragma: no cover
start_time = time.time()
with TemporaryDirectory() as path:
images_from_path = convert_from_path(
"./tests/test_241.pdf", output_folder=path, thread_count=4
)
self.assertTrue(len(images_from_path) == 241)
[im.close() for im in images_from_path]
print(
"test_conversion_from_path_using_dir_241_with_4_thread: {} sec".format(
(time.time() - start_time) / 241.0
)
)
# Testing custom exceptions
@unittest.skipIf(POPPLER_INSTALLED, "Poppler is installed, skipping.")
def test_pdfinfo_not_installed_throws(self):
start_time = time.time()
try:
images_from_path = convert_from_path("./tests/test_14.pdf")
raise Exception("This should not happen")
except PDFInfoNotInstalledError as ex:
pass
print(
"test_pdfinfo_not_installed_throws: {} sec".format(
(time.time() - start_time) / 14.0
)
)
@unittest.skipIf(not POPPLER_INSTALLED, "Poppler is not installed!")
def test_missingfonterror_throws(self):
start_time = time.time()
try:
images_from_path = convert_from_path("./tests/test_strict.pdf", strict=True)
raise Exception("This should not happen")
except PDFSyntaxError as ex:
pass
print("test_syntaxerror_throws: {} sec".format(time.time() - start_time))
# Test transparent
@profile
@unittest.skipIf(not POPPLER_INSTALLED, "Poppler is not installed!")
def test_conversion_from_bytes_using_transparent(self):
start_time = time.time()
with open("./tests/test.pdf", "rb") as pdf_file:
images_from_bytes = convert_from_bytes(
pdf_file.read(), transparent=True, fmt="png"
)
self.assertTrue(len(images_from_bytes) == 1)
print(
"test_conversion_from_bytes_using_transparent: {} sec".format(
time.time() - start_time
)
)
@profile
@unittest.skipIf(not POPPLER_INSTALLED, "Poppler is not installed!")
def test_conversion_from_path_using_transparent(self):
start_time = time.time()
images_from_path = convert_from_path(
"./tests/test.pdf", transparent=True, fmt="png"
)
self.assertTrue(len(images_from_path) == 1)
print(
"test_conversion_from_path_using_transparent: {} sec".format(
time.time() - start_time
)
)
@profile
@unittest.skipIf(not POPPLER_INSTALLED, "Poppler is not installed!")
def test_conversion_from_bytes_using_dir_and_transparent(self):
start_time = time.time()
with TemporaryDirectory() as path:
with open("./tests/test.pdf", "rb") as pdf_file:
images_from_bytes = convert_from_bytes(
pdf_file.read(), output_folder=path, transparent=True, fmt="png"
)
self.assertTrue(len(images_from_bytes) == 1)
[im.close() for im in images_from_bytes]
print(
"test_conversion_from_bytes_using_dir_and_transparent: {} sec".format(
time.time() - start_time
)
)
@profile
@unittest.skipIf(not POPPLER_INSTALLED, "Poppler is not installed!")
def test_conversion_from_path_using_dir_and_transparent(self):
start_time = time.time()
with TemporaryDirectory() as path:
images_from_path = convert_from_path(
"./tests/test.pdf", output_folder=path, transparent=True, fmt="png"
)
self.assertTrue(len(images_from_path) == 1)
[im.close() for im in images_from_path]
print(
"test_conversion_from_path_using_dir_and_transparent: {} sec".format(
time.time() - start_time
)
)
@profile
@unittest.skipIf(not POPPLER_INSTALLED, "Poppler is not installed!")
def test_conversion_transparent_without_png(self):
start_time = time.time()
images_from_path = convert_from_path("./tests/test.pdf", transparent=True)
self.assertTrue(len(images_from_path) == 1)
[im.close() for im in images_from_path]
print(
"test_conversion_from_path_using_transparent_without_png: {} sec".format(
time.time() - start_time
)
)
## Test output as TIFF
@profile
@unittest.skipIf(not POPPLER_INSTALLED, "Poppler is not installed!")
def test_conversion_to_tiff_from_bytes(self):
start_time = time.time()
with open("./tests/test.pdf", "rb") as pdf_file:
images_from_bytes = convert_from_bytes(pdf_file.read(), fmt="tiff")
self.assertTrue(images_from_bytes[0].format == "TIFF")
print(
"test_conversion_to_tiff_from_bytes_14: {} sec".format(
(time.time() - start_time) / 14.0
)
)
@profile
@unittest.skipIf(not POPPLER_INSTALLED, "Poppler is not installed!")
def test_conversion_to_tiff_from_path_using_dir(self):
start_time = time.time()
with TemporaryDirectory() as path:
images_from_path = convert_from_path(
"./tests/test.pdf", output_folder=path, fmt="tiff"
)
self.assertTrue(images_from_path[0].format == "TIFF")
[im.close() for im in images_from_path]
print(
"test_conversion_to_tiff_from_path_using_dir_14: {} sec".format(
(time.time() - start_time) / 14.0
)
)
## Test hanging file handles
@profile
@unittest.skipIf(not POPPLER_INSTALLED, "Poppler is not installed!")
@unittest.skipIf(not os.name == "posix", "This test only works on posix systems")
def test_close_tempfile_after_conversion(self):
start_time = time.time()
with open("./tests/test.pdf", "rb") as pdf_file:
fd_count_before = len(
subprocess.check_output(
["ls", "-l", "/proc/" + str(os.getpid()) + "/fd"]
)
.decode("utf8")
.split("\n")
)
pdf_data = pdf_file.read()
images_from_bytes = []
for i in range(50):
images_from_bytes.extend(convert_from_bytes(pdf_data))
# Closing the images
[im.close() for im in images_from_bytes]
pid = os.getpid()
fd_count_after = len(
subprocess.check_output(
["ls", "-l", "/proc/" + str(os.getpid()) + "/fd"]
)
.decode("utf8")
.split("\n")
)
# Add an error margin
self.assertTrue(abs(fd_count_before - fd_count_after) <= 3)
print(
"test_close_tempfile_after_conversion: {} sec".format(
time.time() - start_time
)
)
## Test poppler_path
@profile
@unittest.skipIf(not POPPLER_INSTALLED, "Poppler is not installed!")
@unittest.skipIf(not os.name == "posix", "This test only works on posix systems")
def test_use_poppler_path(self):
os.mkdir("./bin")
shutil.copy("/usr/bin/pdftoppm", "./bin")
shutil.copy("/usr/bin/pdfinfo", "./bin")
start_time = time.time()
try:
images_from_path = convert_from_path(
"./tests/test.pdf", poppler_path="./bin"
)
finally:
shutil.rmtree("./bin")
self.assertTrue(len(images_from_path) == 1)
[im.close() for im in images_from_path]
print(
"test_conversion_from_path_using_poppler_path: {} sec".format(
time.time() - start_time
)
)
@profile
@unittest.skipIf(not POPPLER_INSTALLED, "Poppler is not installed!")
@unittest.skipIf(not os.name == "posix", "This test only works on posix systems")
def test_use_poppler_path_with_trailing_slash(self):
os.mkdir("./bin")
shutil.copy("/usr/bin/pdftoppm", "./bin")
shutil.copy("/usr/bin/pdfinfo", "./bin")
start_time = time.time()
try:
images_from_path = convert_from_path(
"./tests/test.pdf", poppler_path="./bin/"
)
finally:
shutil.rmtree("./bin")
self.assertTrue(len(images_from_path) == 1)
[im.close() for im in images_from_path]
print(
"test_conversion_from_path_using_poppler_path_with_trailing_slash: {} sec".format(
time.time() - start_time
)
)
## Test first page greater or equal to last_page
@profile
@unittest.skipIf(not POPPLER_INSTALLED, "Poppler is not installed!")
def test_conversion_from_path_14_first_page_1_last_page_1(self):
start_time = time.time()
images_from_path = convert_from_path(
"./tests/test_14.pdf", first_page=1, last_page=1
)
self.assertTrue(len(images_from_path) == 1)
print(
"test_conversion_from_path_14_first_page_1_last_page_1: {} sec".format(
(time.time() - start_time) / 14.0
)
)
@profile
@unittest.skipIf(not POPPLER_INSTALLED, "Poppler is not installed!")
def test_conversion_from_path_14_first_page_12_last_page_1(self):
start_time = time.time()
images_from_path = convert_from_path(
"./tests/test_14.pdf", first_page=12, last_page=1
)
self.assertTrue(len(images_from_path) == 0)
print(
"test_conversion_from_path_14_first_page_12_last_page_1: {} sec".format(
(time.time() - start_time) / 14.0
)
)
## Test singlefile
@profile
@unittest.skipIf(not POPPLER_INSTALLED, "Poppler is not installed!")
def test_conversion_from_bytes_using_dir_single_file(self):
start_time = time.time()
with TemporaryDirectory() as path:
with open("./tests/test.pdf", "rb") as pdf_file:
images_from_bytes = convert_from_bytes(
pdf_file.read(),
output_folder=path,
output_file="test",
single_file=True,
)
self.assertTrue(len(images_from_bytes) == 1)
self.assertTrue(
images_from_bytes[0].filename == os.path.join(path, "test.ppm")
)
[im.close() for im in images_from_bytes]
print(
"test_conversion_from_bytes_using_dir_single_file: {} sec".format(
time.time() - start_time
)
)
@profile
@unittest.skipIf(not POPPLER_INSTALLED, "Poppler is not installed!")
def test_conversion_from_path_using_dir_single_file(self):
start_time = time.time()
with TemporaryDirectory() as path:
images_from_path = convert_from_path(
"./tests/test.pdf",
output_folder=path,
output_file="test",
single_file=True,
)
self.assertTrue(len(images_from_path) == 1)
self.assertTrue(
images_from_path[0].filename == os.path.join(path, "test.ppm")
)
[im.close() for im in images_from_path]
print(
"test_conversion_from_path_using_dir_single_file: {} sec".format(
time.time() - start_time
)
)
@profile
@unittest.skipIf(not POPPLER_INSTALLED, "Poppler is not installed!")
def test_conversion_from_path_using_dir_14_single_file(self):
start_time = time.time()
with TemporaryDirectory() as path:
images_from_path = convert_from_path(
"./tests/test_14.pdf",
output_folder=path,
output_file="test",
single_file=True,
)
self.assertTrue(len(images_from_path) == 1)
self.assertTrue(
images_from_path[0].filename == os.path.join(path, "test.ppm")
)
[im.close() for im in images_from_path]
print(
"test_conversion_from_path_using_dir_14_single_file: {} sec".format(
(time.time() - start_time) / 14.0
)
)
## Test file with same name in directory
@profile
@unittest.skipIf(not POPPLER_INSTALLED, "Poppler is not installed!")
def test_conversion_from_path_using_dir_with_containing_file_with_same_name(self):
start_time = time.time()
with TemporaryDirectory() as path:
shutil.copyfile("./tests/test.pdf", os.path.join(path, "test.pdf"))
images_from_path = convert_from_path(
"./tests/test.pdf", output_folder=path, output_file="test"
)
self.assertTrue(len(images_from_path) == 1)
self.assertTrue(
images_from_path[0].filename == os.path.join(path, "test0001-1.ppm")
)
[im.close() for im in images_from_path]
print(
"test_conversion_from_path_using_dir_single_file: {} sec".format(
time.time() - start_time
)
)
## Test grayscale option
@profile
@unittest.skipIf(not POPPLER_INSTALLED, "Poppler is not installed!")
def test_conversion_to_grayscale_from_bytes(self):
start_time = time.time()
with open("./tests/test_14.pdf", "rb") as pdf_file:
images_from_bytes = convert_from_bytes(pdf_file.read(), grayscale=True)
self.assertTrue(images_from_bytes[0].mode == "L")
print(
"test_conversion_to_grayscale_from_bytes_14: {} sec".format(
(time.time() - start_time) / 14.0
)
)
@profile
@unittest.skipIf(not POPPLER_INSTALLED, "Poppler is not installed!")
def test_conversion_to_grayscale_from_path(self):
start_time = time.time()
images_from_path = convert_from_path("./tests/test_14.pdf", grayscale=True)
self.assertTrue(images_from_path[0].mode == "L")
[im.close() for im in images_from_path]
print(
"test_conversion_to_grayscale_from_path_14: {} sec".format(
(time.time() - start_time) / 14.0
)
)
@profile
@unittest.skipIf(not POPPLER_INSTALLED, "Poppler is not installed!")
def test_conversion_to_grayscale_from_path_using_dir(self):
start_time = time.time()
with TemporaryDirectory() as path:
images_from_path = convert_from_path(
"./tests/test_14.pdf", output_folder=path, grayscale=True
)
self.assertTrue(images_from_path[0].mode == "L")
[im.close() for im in images_from_path]
print(
"test_conversion_to_grayscale_from_path_using_dir_14: {} sec".format(
(time.time() - start_time) / 14.0
)
)
## Test pathlib support
@profile
@unittest.skipIf(not POPPLER_INSTALLED, "Poppler is not installed!")
def test_conversion_from_pathlib_path_using_dir(self):
start_time = time.time()
with TemporaryDirectory() as path:
images_from_path = convert_from_path(
pathlib.Path("./tests/test.pdf"),
output_folder=pathlib.Path(path),
poppler_path=get_poppler_path(),
)
self.assertTrue(len(images_from_path) == 1)
[im.close() for im in images_from_path]
print(
"test_conversion_from_pathlib_path_using_dir: {} sec".format(
time.time() - start_time
)
)
@profile
@unittest.skipIf(not POPPLER_INSTALLED, "Poppler is not installed!")
def test_conversion_from_pathlib_path_14(self):
start_time = time.time()
images_from_path = convert_from_path(pathlib.Path("./tests/test_14.pdf"))
self.assertTrue(len(images_from_path) == 14)
print(
"test_conversion_from_pathlib_path_14: {} sec".format(
(time.time() - start_time) / 14.0
)
)
@profile
@unittest.skipIf(not POPPLER_INSTALLED, "Poppler is not installed!")
def test_conversion_from_pathlib_path_using_dir_14(self):
start_time = time.time()
with TemporaryDirectory() as path:
images_from_path = convert_from_path(
pathlib.Path("./tests/test_14.pdf"),
output_folder=pathlib.Path(path),
poppler_path=get_poppler_path(),
)
self.assertTrue(len(images_from_path) == 14)
[im.close() for im in images_from_path]
print(
"test_conversion_from_pathlib_path_using_dir_14: {} sec".format(
(time.time() - start_time) / 14.0
)
)
## Test jpegopt parameter
@profile
@unittest.skipIf(not POPPLER_INSTALLED, "Poppler is not installed!")
def test_conversion_from_path_with_quality(self):
start_time = time.time()
images_from_path = convert_from_path(
"./tests/test.pdf", fmt="jpeg", jpegopt={"quality": 100}
)
self.assertTrue(len(images_from_path) == 1)
print(
"test_conversion_from_path_with_quality: {} sec".format(
time.time() - start_time
)
)
@profile
@unittest.skipIf(not POPPLER_INSTALLED, "Poppler is not installed!")
def test_conversion_from_bytes_with_quality(self):
start_time = time.time()
with open("./tests/test.pdf", "rb") as pdf_file:
images_from_bytes = convert_from_bytes(
pdf_file.read(), fmt="jpg", jpegopt={"quality": 100}
)
self.assertTrue(len(images_from_bytes) == 1)
print(
"test_conversion_from_bytes_with_quality: {} sec".format(
time.time() - start_time
)
)
@profile
@unittest.skipIf(not POPPLER_INSTALLED, "Poppler is not installed!")
def test_conversion_from_path_with_quality_and_progressive(self):
start_time = time.time()
images_from_path = convert_from_path(
"./tests/test.pdf",
fmt="jpeg",
jpegopt={"quality": 100, "progressive": True},
)
self.assertTrue(len(images_from_path) == 1)
print(
"test_conversion_from_path_with_quality_and_progressive: {} sec".format(
time.time() - start_time
)
)
@profile
@unittest.skipIf(not POPPLER_INSTALLED, "Poppler is not installed!")
def test_conversion_from_path_with_quality_and_not_progressive(self):
start_time = time.time()
images_from_path = convert_from_path(
"./tests/test.pdf",
fmt="jpeg",
jpegopt={"quality": 100, "progressive": False},
)
self.assertTrue(len(images_from_path) == 1)
print(
"test_conversion_from_path_with_quality_and_progressive: {} sec".format(
time.time() - start_time
)
)
@profile
@unittest.skipIf(not POPPLER_INSTALLED, "Poppler is not installed!")
def test_conversion_from_bytes_with_quality_and_progressive(self):
start_time = time.time()
with open("./tests/test.pdf", "rb") as pdf_file:
images_from_bytes = convert_from_bytes(
pdf_file.read(),
fmt="jpg",
jpegopt={"quality": 100, "progressive": True},
)
self.assertTrue(len(images_from_bytes) == 1)
print(
"test_conversion_from_bytes_with_quality_and_progressive: {} sec".format(
time.time() - start_time
)
)
@profile
@unittest.skipIf(POPPLER_INSTALLED, "Poppler is not installed!")
def test_conversion_from_bytes_with_quality_and_not_progressive(self):
start_time = time.time()
with open("./tests/test.pdf", "rb") as pdf_file:
try:
images_from_bytes = convert_from_bytes(
pdf_file.read(), fmt="jpg", jpegopt={"quality": 100}
)
except PDFInfoNotInstalledError:
pass
print(
"test_conversion_from_bytes_with_quality_and_poppler_not_installed: {} sec".format(
time.time() - start_time
)
)
@profile
@unittest.skipIf(not POPPLER_INSTALLED, "Poppler is not installed!")
def test_conversion_from_path_with_quality_and_progressive_and_optimize(self):
start_time = time.time()
images_from_path = convert_from_path(
"./tests/test.pdf",
fmt="jpeg",
jpegopt={"quality": 100, "progressive": True, "optimize": True},
)
self.assertTrue(len(images_from_path) == 1)
print(
"test_conversion_from_path_with_quality_and_progressive_and_optimize: {} sec".format(
time.time() - start_time
)
)
@profile
@unittest.skipIf(not POPPLER_INSTALLED, "Poppler is not installed!")
def test_conversion_from_bytes_with_quality_and_progressive_and_optimize(self):
start_time = time.time()
with open("./tests/test.pdf", "rb") as pdf_file:
images_from_bytes = convert_from_bytes(
pdf_file.read(),
fmt="jpg",
jpegopt={"quality": 100, "progressive": True, "optimize": True},
)
self.assertTrue(len(images_from_bytes) == 1)
print(
"test_conversion_from_bytes_with_quality_and_progressive_and_optimize: {} sec".format(
time.time() - start_time
)
)
## Test size parameter
@profile
@unittest.skipIf(not POPPLER_INSTALLED, "Poppler is not installed!")
def test_conversion_from_path_with_int_size(self):
start_time = time.time()
images_from_path = convert_from_path("./tests/test.pdf", size=400)
self.assertTrue(images_from_path[0].size[1] == 400)
self.assertTrue(len(images_from_path) == 1)
print(
"test_conversion_from_path_with_int_size: {} sec".format(
time.time() - start_time
)
)
@profile
@unittest.skipIf(not POPPLER_INSTALLED, "Poppler is not installed!")
def test_conversion_from_path_with_1d_tuple_size(self):
start_time = time.time()
images_from_path = convert_from_path("./tests/test.pdf", size=(400,))
self.assertTrue(images_from_path[0].size[1] == 400)
self.assertTrue(len(images_from_path) == 1)
print(
"test_conversion_from_path_with_1d_tuple_size: {} sec".format(
time.time() - start_time
)
)
@profile
@unittest.skipIf(not POPPLER_INSTALLED, "Poppler is not installed!")
def test_conversion_from_path_with_2d_tuple_size(self):
start_time = time.time()
images_from_path = convert_from_path("./tests/test.pdf", size=(400, 400))
self.assertTrue(images_from_path[0].size == (400, 400))
self.assertTrue(len(images_from_path) == 1)
print(
"test_conversion_from_path_with_2d_tuple_size: {} sec".format(
time.time() - start_time
)
)
@profile
@unittest.skipIf(not POPPLER_INSTALLED, "Poppler is not installed!")
def test_conversion_from_path_with_invalid_size(self):
start_time = time.time()
try:
images_from_path = convert_from_path("./tests/test.pdf", size="bad value")
raise Exception("This should not happen")
except ValueError:
pass
print(
"test_conversion_from_path_with_invalid_size: {} sec".format(
time.time() - start_time
)
)
@profile
@unittest.skipIf(not POPPLER_INSTALLED, "Poppler is not installed!")
def test_conversion_from_path_with_2d_tuple_size_with_None_width(self):
start_time = time.time()
images_from_path = convert_from_path("./tests/test.pdf", size=(None, 400))
self.assertTrue(images_from_path[0].size[0] == 310)
self.assertTrue(images_from_path[0].size[1] == 400)
self.assertTrue(len(images_from_path) == 1)
print(
"test_conversion_from_path_with_2d_tuple_size_with_None_width: {} sec".format(
time.time() - start_time
)
)
@profile
@unittest.skipIf(not POPPLER_INSTALLED, "Poppler is not installed!")
def test_conversion_from_path_with_2d_tuple_size_with_None_height(self):
start_time = time.time()
images_from_path = convert_from_path("./tests/test.pdf", size=(400, None))
self.assertTrue(images_from_path[0].size[0] == 400)
self.assertTrue(images_from_path[0].size[1] == 518)
self.assertTrue(len(images_from_path) == 1)
print(
"test_conversion_from_path_with_2d_tuple_size_with_None_height: {} sec".format(
time.time() - start_time
)
)
## Test hide annotations parameter
@profile
@unittest.skipIf(not POPPLER_INSTALLED, "Poppler is not installed!")
def test_conversion_from_path_with_hide_annotations(self):
images_from_path = convert_from_path("./tests/test_annotations.pdf", hide_annotations=True)
start_time = time.time()
self.assertTrue(len(images_from_path) == 1)
print(
"test_conversion_from_path_with_hide_annotations: {} sec".format(
time.time() - start_time
)
)
@profile
@unittest.skipIf(not POPPLER_INSTALLED, "Poppler is not installed!")
def test_conversion_from_bytes_with_hide_annotations(self):
start_time = time.time()
with open("./tests/test_annotations.pdf", "rb") as pdf_file:
images_from_bytes = convert_from_bytes(
pdf_file.read(),
hide_annotations=True,
)
self.assertTrue(len(images_from_bytes) == 1)
print(
"test_conversion_from_bytes_with_hide_annotations: {} sec".format(
time.time() - start_time
)
)
@profile
@unittest.skipIf(not POPPLER_INSTALLED, "Poppler is not installed!")
def test_conversion_from_path_with_hide_annotations_with_invalid_arg_combination(self):
start_time = time.time()
try:
images_from_path = convert_from_path(
"./tests/test_annotations.pdf",
hide_annotations=True,
use_pdftocairo=True,
)
raise Exception("This should not happen")
except NotImplementedError:
pass
print(
"test_conversion_from_path_with_hide_annotations_with_invalid_arg_combination: {} sec".format(
time.time() - start_time
)
)
## Test pdfinfo
@profile
@unittest.skipIf(not POPPLER_INSTALLED, "Poppler is not installed!")
def test_pdfinfo_from_path(self):
start_time = time.time()
info = pdfinfo_from_path("./tests/test.pdf")
self.assertTrue(info.get("Pages", 0) == 1)
print("test_pdfinfo_from_path: {} sec".format(time.time() - start_time))
@profile
@unittest.skipIf(not POPPLER_INSTALLED, "Poppler is not installed!")
def test_pdfinfo_from_bytes(self):
start_time = time.time()
with open("./tests/test.pdf", "rb") as fh:
info = pdfinfo_from_bytes(fh.read())
self.assertTrue(info.get("Pages", 0) == 1)
print("test_pdfinfo_from_bytes: {} sec".format(time.time() - start_time))
@profile
@unittest.skipIf(not POPPLER_INSTALLED, "Poppler is not installed!")
def test_pdfinfo_from_path_241(self):
start_time = time.time()
info = pdfinfo_from_path("./tests/test_241.pdf")
self.assertTrue(info.get("Pages", 0) == 241)
print("test_pdfinfo_from_path_241: {} sec".format(time.time() - start_time))
@profile
@unittest.skipIf(not POPPLER_INSTALLED, "Poppler is not installed!")
def test_pdfinfo_from_bytes_241(self):
start_time = time.time()
with open("./tests/test_241.pdf", "rb") as fh:
info = pdfinfo_from_bytes(fh.read())
self.assertTrue(info.get("Pages", 0) == 241)
print("test_pdfinfo_from_bytes_241: {} sec".format(time.time() - start_time))
@profile
@unittest.skipIf(not POPPLER_INSTALLED, "Poppler is not installed!")
def test_pdfinfo_from_path_invalid(self):
start_time = time.time()
try:
info = pdfinfo_from_path("./tests/test.jpg")
raise Exception("This should not happen")
except PDFPageCountError:
pass
print("test_pdfinfo_from_path_241: {} sec".format(time.time() - start_time))
@profile
@unittest.skipIf(not POPPLER_INSTALLED, "Poppler is not installed!")
def test_pdfinfo_from_bytes_invalid(self):
start_time = time.time()
try:
with open("./tests/test.jpg", "rb") as fh:
info = pdfinfo_from_bytes(fh.read())
raise Exception("This should not happen")
except PDFPageCountError:
pass
print("test_pdfinfo_from_path_241: {} sec".format(time.time() - start_time))
# Test conversion with paths_only
@profile
@unittest.skipIf(not POPPLER_INSTALLED, "Poppler is not installed!")
def test_conversion_from_path_using_dir_paths_only(self):
start_time = time.time()
with TemporaryDirectory() as path:
images_from_path = convert_from_path(
"./tests/test.pdf", output_folder=path, paths_only=True
)
self.assertTrue(len(images_from_path) == 1)
self.assertTrue(type(images_from_path[0]) == str)
print(
"test_conversion_from_path_using_dir: {} sec".format(
time.time() - start_time
)
)
# Test for issue #125
@profile
@unittest.skipIf(not POPPLER_INSTALLED, "Poppler is not installed")
def test_multithread_conversion(self):
start_time = time.time()
files = ["./tests/test.pdf",] * 50
with Pool(10) as p:
res = p.map(convert_from_path, files)
self.assertTrue(len(res) == 50)
print("test_multithread_conversion: {} sec".format(time.time() - start_time))
@profile
@unittest.skipIf(not POPPLER_INSTALLED, "Poppler is not installed!")
def test_conversion_from_path_with_use_pdftocairo(self):
start_time = time.time()
images_from_path = convert_from_path("./tests/test.pdf", use_pdftocairo=True)
self.assertTrue(len(images_from_path) == 1)
print(
"test_conversion_from_path_with_use_pdftocairo: {} sec".format(
time.time() - start_time
)
)
@profile
@unittest.skipIf(not POPPLER_INSTALLED, "Poppler is not installed!")
def test_conversion_from_bytes_with_use_pdftocairo(self):
start_time = time.time()
with open("./tests/test.pdf", "rb") as fh:
images_from_bytes = convert_from_bytes(fh.read(), use_pdftocairo=True)
self.assertTrue(len(images_from_bytes) == 1)
print(
"test_conversion_from_bytes_with_use_pdftocairo: {} sec".format(
time.time() - start_time
)
)
@profile
@unittest.skipIf(not POPPLER_INSTALLED, "Poppler is not installed!")
def test_pdfinfo_rawdates(self):
start_time = time.time()
info = pdfinfo_from_path("./tests/test.pdf", rawdates=True)
self.assertTrue("D:" in info["CreationDate"])
print(
"test_pdfinfo_rawdates: {} sec".format(time.time() - start_time)
)
@profile
@unittest.skipIf(not POPPLER_INSTALLED, "Poppler is not installed!")
def test_pdfinfo_locked_pdf_with_userpw_only(self):
start_time = time.time()
with TemporaryDirectory() as path:
with open("./tests/test_locked_user_only.pdf", "rb") as pdf_file:
info = pdfinfo_from_bytes(
pdf_file.read(), userpw="pdf2image"
)
self.assertTrue("CreationDate" in info)
print(
"test_pdfinfo_locked_pdf_with_userpw_only: {} sec".format(time.time() - start_time)
)
@profile
def test_convert_from_functions_same_number_of_parameters(self):
start_time = time.time()
self.assertEqual(
len(signature(convert_from_path).parameters),
len(signature(convert_from_bytes).parameters),
)
print("test_convert_from_functions_same_number_of_parameters: {} sec".format(time.time() - start_time))
@profile
def test_pdfinfo_functions_same_number_of_parameters(self):
start_time = time.time()
self.assertEqual(
len(signature(pdfinfo_from_path).parameters),
len(signature(pdfinfo_from_bytes).parameters),
)
print("test_pdfinfo_functions_same_number_of_parameters: {} sec".format(time.time() - start_time))
@unittest.skipIf(not POPPLER_INSTALLED, "Poppler is not installed!")
def test_timeout_pdfinfo_from_path_241(self):
start_time = time.time()
with self.assertRaises(PDFPopplerTimeoutError):
info = pdfinfo_from_path("./tests/test_241.pdf", timeout=0.00001)
print("test_timeout_pdfinfo_from_path_241: {} sec".format(time.time() - start_time))
@profile
@unittest.skipIf(not POPPLER_INSTALLED, "Poppler is not installed!")
def test_timeout_convert_from_path_241(self):
start_time = time.time()
with self.assertRaises(PDFPopplerTimeoutError):
imgs = convert_from_path("./tests/test_241.pdf", timeout=1)
print("test_timeout_convert_from_path_241: {} sec".format(time.time() - start_time))
if __name__ == "__main__":
unittest.main()
| mit | -5,162,533,474,726,274,000 | 37.72828 | 111 | 0.567955 | false |
Lujeni/ansible | lib/ansible/modules/monitoring/zabbix/zabbix_host_info.py | 1 | 8077 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) [email protected]
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
RETURN = '''
---
hosts:
description: List of Zabbix hosts. See https://www.zabbix.com/documentation/4.0/manual/api/reference/host/get for list of host values.
returned: success
type: dict
sample: [ { "available": "1", "description": "", "disable_until": "0", "error": "", "flags": "0", "groups": ["1"], "host": "Host A", ... } ]
'''
DOCUMENTATION = '''
---
module: zabbix_host_info
short_description: Gather information about Zabbix host
description:
- This module allows you to search for Zabbix host entries.
- This module was called C(zabbix_host_facts) before Ansible 2.9. The usage did not change.
version_added: "2.7"
author:
- "Michael Miko (@RedWhiteMiko)"
requirements:
- "python >= 2.6"
- "zabbix-api >= 0.5.4"
options:
host_name:
description:
- Name of the host in Zabbix.
- host_name is the unique identifier used and cannot be updated using this module.
required: true
host_ip:
description:
- Host interface IP of the host in Zabbix.
required: false
exact_match:
description:
- Find the exact match
type: bool
default: no
remove_duplicate:
description:
- Remove duplicate host from host result
type: bool
default: yes
host_inventory:
description:
- List of host inventory keys to display in result.
- Whole host inventory is retrieved if keys are not specified.
type: list
required: false
version_added: 2.8
extends_documentation_fragment:
- zabbix
'''
EXAMPLES = '''
- name: Get host info
local_action:
module: zabbix_host_info
server_url: http://monitor.example.com
login_user: username
login_password: password
host_name: ExampleHost
host_ip: 127.0.0.1
timeout: 10
exact_match: no
remove_duplicate: yes
- name: Reduce host inventory information to provided keys
local_action:
module: zabbix_host_info
server_url: http://monitor.example.com
login_user: username
login_password: password
host_name: ExampleHost
host_inventory:
- os
- tag
host_ip: 127.0.0.1
timeout: 10
exact_match: no
remove_duplicate: yes
'''
import atexit
import traceback
from ansible.module_utils.basic import AnsibleModule, missing_required_lib
try:
from zabbix_api import ZabbixAPI
HAS_ZABBIX_API = True
except ImportError:
ZBX_IMP_ERR = traceback.format_exc()
HAS_ZABBIX_API = False
class Host(object):
def __init__(self, module, zbx):
self._module = module
self._zapi = zbx
def get_hosts_by_host_name(self, host_name, exact_match, host_inventory):
""" Get host by host name """
search_key = 'search'
if exact_match:
search_key = 'filter'
host_list = self._zapi.host.get({'output': 'extend', 'selectParentTemplates': ['name'], search_key: {'host': [host_name]},
'selectInventory': host_inventory})
if len(host_list) < 1:
self._module.fail_json(msg="Host not found: %s" % host_name)
else:
return host_list
def get_hosts_by_ip(self, host_ips, host_inventory):
""" Get host by host ip(s) """
hostinterfaces = self._zapi.hostinterface.get({
'output': 'extend',
'filter': {
'ip': host_ips
}
})
if len(hostinterfaces) < 1:
self._module.fail_json(msg="Host not found: %s" % host_ips)
host_list = []
for hostinterface in hostinterfaces:
host = self._zapi.host.get({
'output': 'extend',
'selectGroups': 'extend',
'selectParentTemplates': ['name'],
'hostids': hostinterface['hostid'],
'selectInventory': host_inventory
})
host[0]['hostinterfaces'] = hostinterface
host_list.append(host[0])
return host_list
def delete_duplicate_hosts(self, hosts):
""" Delete duplicated hosts """
unique_hosts = []
listed_hostnames = []
for zabbix_host in hosts:
if zabbix_host['name'] in listed_hostnames:
continue
unique_hosts.append(zabbix_host)
listed_hostnames.append(zabbix_host['name'])
return unique_hosts
def main():
module = AnsibleModule(
argument_spec=dict(
server_url=dict(type='str', required=True, aliases=['url']),
login_user=dict(type='str', required=True),
login_password=dict(type='str', required=True, no_log=True),
host_name=dict(type='str', default='', required=False),
host_ip=dict(type='list', default=[], required=False),
http_login_user=dict(type='str', required=False, default=None),
http_login_password=dict(type='str', required=False, default=None, no_log=True),
validate_certs=dict(type='bool', required=False, default=True),
timeout=dict(type='int', default=10),
exact_match=dict(type='bool', required=False, default=False),
remove_duplicate=dict(type='bool', required=False, default=True),
host_inventory=dict(type='list', default=[], required=False)
),
supports_check_mode=True
)
if module._name == 'zabbix_host_facts':
module.deprecate("The 'zabbix_host_facts' module has been renamed to 'zabbix_host_info'", version='2.13')
if not HAS_ZABBIX_API:
module.fail_json(msg=missing_required_lib('zabbix-api', url='https://pypi.org/project/zabbix-api/'), exception=ZBX_IMP_ERR)
server_url = module.params['server_url']
login_user = module.params['login_user']
login_password = module.params['login_password']
http_login_user = module.params['http_login_user']
http_login_password = module.params['http_login_password']
validate_certs = module.params['validate_certs']
host_name = module.params['host_name']
host_ips = module.params['host_ip']
timeout = module.params['timeout']
exact_match = module.params['exact_match']
is_remove_duplicate = module.params['remove_duplicate']
host_inventory = module.params['host_inventory']
if not host_inventory:
host_inventory = 'extend'
zbx = None
# login to zabbix
try:
zbx = ZabbixAPI(server_url, timeout=timeout, user=http_login_user, passwd=http_login_password,
validate_certs=validate_certs)
zbx.login(login_user, login_password)
atexit.register(zbx.logout)
except Exception as e:
module.fail_json(msg="Failed to connect to Zabbix server: %s" % e)
host = Host(module, zbx)
if host_name:
hosts = host.get_hosts_by_host_name(host_name, exact_match, host_inventory)
if is_remove_duplicate:
hosts = host.delete_duplicate_hosts(hosts)
extended_hosts = []
for zabbix_host in hosts:
zabbix_host['hostinterfaces'] = host._zapi.hostinterface.get({
'output': 'extend', 'hostids': zabbix_host['hostid']
})
extended_hosts.append(zabbix_host)
module.exit_json(ok=True, hosts=extended_hosts)
elif host_ips:
extended_hosts = host.get_hosts_by_ip(host_ips, host_inventory)
if is_remove_duplicate:
hosts = host.delete_duplicate_hosts(extended_hosts)
module.exit_json(ok=True, hosts=extended_hosts)
else:
module.exit_json(ok=False, hosts=[], result="No Host present")
if __name__ == '__main__':
main()
| gpl-3.0 | -9,156,222,259,647,895,000 | 32.936975 | 142 | 0.604432 | false |
arbenson/mrtsqr | dumbo/BtA.py | 1 | 2491 | """
Copyright (c) 2012-2014, Austin Benson and David Gleich
All rights reserved.
This file is part of MRTSQR and is under the BSD 2-Clause License,
which can be found in the LICENSE file in the root directory, or at
http://opensource.org/licenses/BSD-2-Clause
"""
"""
BtA.py
===========
Driver code for computing B^T * A, where both B and A are tall and skinny.
Usage:
dumbo start AtA.py -hadoop $HADOOP_INSTALL \
-matA [path to matrix A] \
-matB [path to matrix B] \
-B_id [unique identifier for path of B] \
-reduce_schedule [optional: number of reducers to use in each stage] \
-output [optional: name of output file] \
-blocksize [optional: block size for compression]
The option 'B_id' is a unique identifier for the path of the B matrix that
does not occur in the path to the A matrix.
TODO(arbenson): this should be automated
Example usage:
dumbo start BtA.py -hadoop $HADOOP_INSTALL -matA A_matrix.mseq \
-matB B_matrix.mseq -output BTA_OUT -B_id B_matrix -blocksize 10
"""
import os
import util
import sys
import dumbo
import time
import numpy
import mrmc
gopts = util.GlobalOptions()
def runner(job):
blocksize = gopts.getintkey('blocksize')
schedule = gopts.getstrkey('reduce_schedule')
schedule = int(schedule)
B_id = gopts.getstrkey('B_id')
if B_id == '':
print "'B_id' not specified"
sys.exit(-1)
job.additer(mapper=mrmc.BtAMapper(B_id=B_id),
reducer=mrmc.BtAReducer(blocksize=blocksize),
opts=[('numreducetasks', str(schedule))])
job.additer(mapper='org.apache.hadoop.mapred.lib.IdentityMapper',
reducer=mrmc.ArraySumReducer,
opts=[('numreducetasks','1')])
def starter(prog):
# set the global opts
gopts.prog = prog
matB = prog.delopt('matB')
if not matB:
return "'matB' not specified'"
matA = prog.delopt('matA')
if not matA:
return "'matA' not specified'"
gopts.getstrkey('B_id', '')
mrmc.starter_helper(prog)
prog.addopt('input', matB)
prog.addopt('input', matA)
matname, matext = os.path.splitext(matA)
gopts.getintkey('blocksize',3)
gopts.getstrkey('reduce_schedule','1')
output = prog.getopt('output')
if not output:
prog.addopt('output','%s-BtA%s'%(matname,matext))
gopts.save_params()
if __name__ == '__main__':
import dumbo
dumbo.main(runner, starter)
| bsd-2-clause | 7,643,623,938,800,354,000 | 26.076087 | 75 | 0.638298 | false |
facebookresearch/faiss | benchs/bench_index_flat.py | 1 | 2187 | #!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import time
import os
import numpy as np
import faiss
from faiss.contrib.datasets import SyntheticDataset
os.system("grep -m1 'model name' < /proc/cpuinfo")
def format_tab(x):
return "\n".join("\t".join("%g" % xi for xi in row) for row in x)
faiss.cvar.distance_compute_min_k_reservoir = 5
# for have_threads in True, False:
for have_threads in False, :
if have_threads:
# good config for Intel(R) Xeon(R) CPU E5-2698 v4 @ 2.20GHz
nthread = 32
else:
nthread = 1
faiss.omp_set_num_threads(nthread)
print("************ nthread=", nthread)
for nq in 100, 10000:
print("*********** nq=", nq)
if nq == 100:
nrun = 500
unit = "ms"
else:
nrun = 20
unit = "s"
restab = []
for d in 16, 32, 64, 128:
print("========== d=", d)
nb = 10000
# d = 32
ds = SyntheticDataset(d, 0, nb, nq)
print(ds)
index = faiss.IndexFlatL2(d)
index.add(ds.get_database())
nrun = 10
restab1 = []
restab.append(restab1)
for k in 1, 10, 100:
times = []
for run in range(nrun):
t0 = time.time()
index.search(ds.get_queries(), k)
t1 = time.time()
if run >= nrun // 5: # the rest is considered warmup
times.append((t1 - t0))
times = np.array(times)
if unit == "ms":
times *= 1000
print("search k=%3d t=%.3f ms (± %.4f)" % (
k, np.mean(times), np.std(times)))
else:
print("search k=%3d t=%.3f s (± %.4f)" % (
k, np.mean(times), np.std(times)))
restab1.append(np.mean(times))
print("restab=\n", format_tab(restab))
| mit | -6,713,802,029,258,484,000 | 24.114943 | 72 | 0.474142 | false |
nanomolina/controlDeGastos | Windows/openWindow.py | 1 | 5551 | # -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'new.ui'
#
# Created: Fri Aug 15 21:30:13 2014
# by: PyQt4 UI code generator 4.10.4
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
import sys
from os import getcwd, listdir
from os.path import join, isfile
DB_PATH = "Database/.Database"
COLOR = "#F28F1D" #"#F57B00"
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
def _fromUtf8(s):
return s
try:
_encoding = QtGui.QApplication.UnicodeUTF8
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig, _encoding)
except AttributeError:
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig)
class Ui_Dialog_Open(object):
def setupUi(self, Dialog):
Dialog.setObjectName(_fromUtf8("Dialog"))
Dialog.resize(243, 397)
self.db_name = ""
self.button_accepted = False
style = "QDialog {background-color:" \
" QLinearGradient(x1:0, y1:0, x2:0, y2:1, stop:0 #616161," \
" stop: 0.5 #505050, stop: 0.6 #434343, stop:1 #656565);}"
Dialog.setStyleSheet(style)
self.initLayout(Dialog)
self.listdb = []
self.initLabel()
self.initListWidget()
self.initButtonBox()
self.retranslateUi(Dialog)
QtCore.QObject.connect(self.buttonBox, QtCore.SIGNAL(_fromUtf8("accepted()")), self.buttonAccepted)
QtCore.QObject.connect(self.buttonBox, QtCore.SIGNAL(_fromUtf8("accepted()")), Dialog.accept)
QtCore.QObject.connect(self.buttonBox, QtCore.SIGNAL(_fromUtf8("rejected()")), Dialog.reject)
QtCore.QMetaObject.connectSlotsByName(Dialog)
def initLayout(self, Dialog):
self.verticalLayoutWidget = QtGui.QWidget(Dialog)
self.verticalLayoutWidget.setGeometry(QtCore.QRect(10, 10, 221, 371))
self.verticalLayoutWidget.setObjectName(_fromUtf8("verticalLayoutWidget"))
self.verticalLayout = QtGui.QVBoxLayout(self.verticalLayoutWidget)
self.verticalLayout.setMargin(0)
self.verticalLayout.setObjectName(_fromUtf8("verticalLayout"))
def initLabel(self):
self.label = QtGui.QLabel(self.verticalLayoutWidget)
self.label.setObjectName(_fromUtf8("label"))
font = QtGui.QFont()
font.setPointSize(12)
font.setBold(True)
font.setWeight(75)
self.label.setFont(font)
style = "QLabel {background-color:" \
" QLinearGradient( x1: 0, y1: 0, x2: 0, y2: 1," \
" stop: 0 #4d4d4d, stop: 0 #FF4000, stop: 1 #F57B00);" \
" top: 5px; border: 1px solid #656565;" \
" gridline-color: #BAB0A7} "
self.label.setStyleSheet(_fromUtf8(style))
self.verticalLayout.addWidget(self.label)
def initListWidget(self):
self.listWidget = QtGui.QListWidget(self.verticalLayoutWidget)
self.listWidget.setObjectName(_fromUtf8("listWidget"))
self.verticalLayout.addWidget(self.listWidget)
font = QtGui.QFont()
font.setPointSize(13)
font.setBold(True)
font.setWeight(75)
self.listWidget.setFont(font)
style = "QListWidget {background-color:" \
" QLinearGradient( x1: 0, y1: 0, x2: 0, y2: 1," \
" stop: 0 #4d4d4d, stop: 0 #646464, stop: 1 #BDB9B5);" \
" padding: 1px; border-style: solid;" \
" border: 1px solid #656565; border-radius: 5;}"
self.listWidget.setStyleSheet(style)
current_path = getcwd()
db_path = join(current_path, DB_PATH)
listOfdir = listdir(db_path)
for file_ in listOfdir:
dirOfFile = join(db_path, file_)
if isfile(dirOfFile) and ".db" in file_:
file_ , trash = file_.split(".db")
item = QtGui.QListWidgetItem()
self.listWidget.addItem(item)
self.listdb.append(file_)
def initButtonBox(self):
self.buttonBox = QtGui.QDialogButtonBox(self.verticalLayoutWidget)
self.buttonBox.setOrientation(QtCore.Qt.Horizontal)
self.buttonBox.setStandardButtons(QtGui.QDialogButtonBox.Cancel|QtGui.QDialogButtonBox.Ok)
self.buttonBox.setObjectName(_fromUtf8("buttonBox"))
self.verticalLayout.addWidget(self.buttonBox)
style = "background-color: QLinearGradient(" \
" x1: 0, y1: 0, x2: 0, y2: 1," \
" stop: 0 #4d4d4d, stop: 0 #646464, stop: 1 #BDB9B5);"
self.buttonBox.setStyleSheet(style)
def retranslateUi(self, Dialog):
Dialog.setWindowTitle(_translate("Dialog", "Abrir", None))
self.label.setText(_translate("Dialog", "Bases de Datos disponibles:", None))
__sortingEnabled = self.listWidget.isSortingEnabled()
self.listWidget.setSortingEnabled(False)
counter = 0
for file_ in self.listdb:
item = self.listWidget.item(counter)
item.setText(_translate("Form", file_, None))
counter += 1
self.listWidget.setSortingEnabled(__sortingEnabled)
def buttonAccepted(self):
currentItem = self.listWidget.currentItem()
name = currentItem.text()
self.db_name = name + ".db"
self.button_accepted = True
def getNameToOpen(self):
return join(DB_PATH, str(self.db_name))
def accepted(self):
return self.button_accepted
| gpl-2.0 | -6,159,931,926,551,762,000 | 39.518248 | 107 | 0.631057 | false |
mottosso/mindbender-setup | bin/pythonpath/raven/contrib/django/client.py | 1 | 10355 | # -*- coding: utf-8 -*-
"""
raven.contrib.django.client
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2010-2012 by the Sentry Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
from __future__ import absolute_import
import time
import logging
from django.conf import settings
from django.core.exceptions import SuspiciousOperation
from django.http import HttpRequest
from django.template import TemplateSyntaxError
from django.utils.datastructures import MultiValueDict
try:
# support Django 1.9
from django.template.base import Origin
except ImportError:
# backward compatibility
from django.template.loader import LoaderOrigin as Origin
from raven.base import Client
from raven.contrib.django.utils import get_data_from_template, get_host
from raven.contrib.django.middleware import SentryMiddleware
from raven.utils.compat import string_types, binary_type, iterlists
from raven.contrib.django.resolver import RouteResolver
from raven.utils.wsgi import get_headers, get_environ
from raven.utils import once
from raven import breadcrumbs
__all__ = ('DjangoClient',)
class _FormatConverter(object):
def __init__(self, param_mapping):
self.param_mapping = param_mapping
self.params = []
def __getitem__(self, val):
self.params.append(self.param_mapping.get(val))
return '%s'
def format_sql(sql, params):
rv = []
if isinstance(params, dict):
conv = _FormatConverter(params)
if params:
sql = sql % conv
params = conv.params
else:
params = ()
for param in params or ():
if param is None:
rv.append('NULL')
elif isinstance(param, string_types):
if isinstance(param, binary_type):
param = param.decode('utf-8', 'replace')
if len(param) > 256:
param = param[:256] + u'…'
rv.append("'%s'" % param.replace("'", "''"))
else:
rv.append(repr(param))
return sql, rv
@once
def install_sql_hook():
"""If installed this causes Django's queries to be captured."""
try:
from django.db.backends.utils import CursorWrapper
except ImportError:
from django.db.backends.util import CursorWrapper
try:
real_execute = CursorWrapper.execute
real_executemany = CursorWrapper.executemany
except AttributeError:
# XXX(mitsuhiko): On some very old django versions (<1.6) this
# trickery would have to look different but I can't be bothered.
return
def record_sql(vendor, alias, start, duration, sql, params):
def processor(data):
real_sql, real_params = format_sql(sql, params)
if real_params:
real_sql = real_sql % tuple(real_params)
# maybe category to 'django.%s.%s' % (vendor, alias or
# 'default') ?
data.update({
'message': real_sql,
'category': 'query',
})
breadcrumbs.record(processor=processor)
def record_many_sql(vendor, alias, start, sql, param_list):
duration = time.time() - start
for params in param_list:
record_sql(vendor, alias, start, duration, sql, params)
def execute(self, sql, params=None):
start = time.time()
try:
return real_execute(self, sql, params)
finally:
record_sql(self.db.vendor, getattr(self.db, 'alias', None),
start, time.time() - start, sql, params)
def executemany(self, sql, param_list):
start = time.time()
try:
return real_executemany(self, sql, param_list)
finally:
record_many_sql(self.db.vendor, getattr(self.db, 'alias', None),
start, sql, param_list)
CursorWrapper.execute = execute
CursorWrapper.executemany = executemany
breadcrumbs.ignore_logger('django.db.backends')
class DjangoClient(Client):
logger = logging.getLogger('sentry.errors.client.django')
resolver = RouteResolver()
def __init__(self, *args, **kwargs):
install_sql_hook = kwargs.pop('install_sql_hook', True)
Client.__init__(self, *args, **kwargs)
if install_sql_hook:
self.install_sql_hook()
def install_sql_hook(self):
install_sql_hook()
def get_user_info(self, user):
try:
if hasattr(user, 'is_authenticated'):
# is_authenticated was a method in Django < 1.10
if callable(user.is_authenticated):
authenticated = user.is_authenticated()
else:
authenticated = user.is_authenticated
if not authenticated:
return None
user_info = {}
user_info['id'] = user.pk
if hasattr(user, 'email'):
user_info['email'] = user.email
if hasattr(user, 'get_username'):
user_info['username'] = user.get_username()
elif hasattr(user, 'username'):
user_info['username'] = user.username
return user_info
except Exception:
# We expect that user objects can be somewhat broken at times
# and try to just handle as much as possible and ignore errors
# as good as possible here.
return None
def get_data_from_request(self, request):
result = {}
user = getattr(request, 'user', None)
if user is not None:
user_info = self.get_user_info(user)
if user_info:
result['user'] = user_info
try:
uri = request.build_absolute_uri()
except SuspiciousOperation:
# attempt to build a URL for reporting as Django won't allow us to
# use get_host()
if request.is_secure():
scheme = 'https'
else:
scheme = 'http'
host = get_host(request)
uri = '%s://%s%s' % (scheme, host, request.path)
if request.method not in ('GET', 'HEAD'):
try:
data = request.body
except Exception:
try:
data = request.raw_post_data
except Exception:
# assume we had a partial read.
try:
data = request.POST or '<unavailable>'
except Exception:
data = '<unavailable>'
else:
if isinstance(data, MultiValueDict):
data = dict(
(k, v[0] if len(v) == 1 else v)
for k, v in iterlists(data))
else:
data = None
environ = request.META
result.update({
'request': {
'method': request.method,
'url': uri,
'query_string': request.META.get('QUERY_STRING'),
'data': data,
'cookies': dict(request.COOKIES),
'headers': dict(get_headers(environ)),
'env': dict(get_environ(environ)),
}
})
return result
def build_msg(self, *args, **kwargs):
data = super(DjangoClient, self).build_msg(*args, **kwargs)
for frame in self._iter_frames(data):
module = frame.get('module')
if not module:
continue
if module.startswith('django.'):
frame['in_app'] = False
if not self.site and 'django.contrib.sites' in settings.INSTALLED_APPS:
try:
from django.contrib.sites.models import Site
site = Site.objects.get_current()
site_name = site.name or site.domain
data['tags'].setdefault('site', site_name)
except Exception:
# Database error? Fallback to the id
try:
data['tags'].setdefault('site', settings.SITE_ID)
except AttributeError:
# SITE_ID wasn't set, so just ignore
pass
return data
def capture(self, event_type, request=None, **kwargs):
if 'data' not in kwargs:
kwargs['data'] = data = {}
else:
data = kwargs['data']
if request is None:
request = getattr(SentryMiddleware.thread, 'request', None)
is_http_request = isinstance(request, HttpRequest)
if is_http_request:
data.update(self.get_data_from_request(request))
if kwargs.get('exc_info'):
exc_value = kwargs['exc_info'][1]
# As of r16833 (Django) all exceptions may contain a
# ``django_template_source`` attribute (rather than the legacy
# ``TemplateSyntaxError.source`` check) which describes
# template information. As of Django 1.9 or so the new
# template debug thing showed up.
if hasattr(exc_value, 'django_template_source') or \
((isinstance(exc_value, TemplateSyntaxError) and
isinstance(getattr(exc_value, 'source', None),
(tuple, list)) and
isinstance(exc_value.source[0], Origin))) or \
hasattr(exc_value, 'template_debug'):
source = getattr(exc_value, 'django_template_source',
getattr(exc_value, 'source', None))
debug = getattr(exc_value, 'template_debug', None)
if source is None:
self.logger.info('Unable to get template source from exception')
data.update(get_data_from_template(source, debug))
result = super(DjangoClient, self).capture(event_type, **kwargs)
if is_http_request and result:
# attach the sentry object to the request
request.sentry = {
'project_id': data.get('project', self.remote.project),
'id': result,
}
return result
def get_transaction_from_request(self, request):
return self.resolver.resolve(request.path)
| mit | -6,920,952,999,196,945,000 | 33.055921 | 84 | 0.552014 | false |
JeremyOT/Toto | toto/zmqworkerconnection.py | 1 | 10185 | import toto
import zmq
import cPickle as pickle
import zlib
import logging
from toto.exceptions import *
from toto.workerconnection import WorkerConnection
from threading import Thread
from tornado.options import options
from tornado.gen import Task
from collections import deque
from zmq.eventloop.ioloop import ZMQPoller, IOLoop, PeriodicCallback
from zmq.eventloop.zmqstream import ZMQStream
from time import time
from uuid import uuid4
from traceback import format_exc
from toto.options import safe_define
WORKER_SOCKET_CONNECT = 'CONNECT'
WORKER_SOCKET_DISCONNECT = 'DISCONNECT'
class ZMQWorkerConnection(WorkerConnection):
'''Use a ``WorkerConnection`` to make RPCs to the remote worker service(s) or worker/router specified by ``address``.
``address`` may be either an enumerable of address strings or a string of comma separated addresses. RPC retries
and timeouts will happen by at most every ``abs(timeout)`` seconds when a periodic callback runs through all active
messages and checks for prolonged requests. This is also the default timeout for any new calls. ``timeout`` must not be
``0``.
Optionally pass any object or module with ``compress`` and ``decompress`` methods as the ``compression`` parameter to
compress messages. The module must implement the same algorithm used on the worker service. By default, messages are not
compressed.
Optionally pass any object or module with ``dumps`` and ``loads`` methods that convert an ``object`` to and from a
``str`` to replace the default ``cPickle`` serialization with a protocol of your choice.
Use ``auto_retry`` to specify whether or not messages should be retried by default. Retrying messages can cause substantial
congestion in your worker service. Use with caution.
'''
def __init__(self, address, timeout=10.0, compression=None, serialization=None, auto_retry=False):
if not address:
self.active_connections = set()
elif isinstance(address, str):
self.active_connections = {i.strip() for i in address.split(',')}
else:
self.active_connections = set(address)
self.message_address = 'inproc://WorkerConnection%s' % id(self)
self.__context = zmq.Context()
self.__queue_socket = self.__context.socket(zmq.PUSH)
self.__queue_socket.bind(self.message_address)
self.__thread = None
self.__timeout = timeout
self.__callbacks = {}
self.__queued_messages = {}
self.__message_auto_retry = {}
self.__message_timeouts = {}
self.__ioloop = None
self.__auto_retry = auto_retry
self.loads = serialization and serialization.loads or pickle.loads
self.dumps = serialization and serialization.dumps or pickle.dumps
self.compress = compression and compression.compress or (lambda x: x)
self.decompress = compression and compression.decompress or (lambda x: x)
def invoke(self, method, parameters={}, callback=None, timeout=0, auto_retry=None, await=False):
'''Invoke a ``method`` to be run on a remote worker process with the given ``parameters``. If specified, ``callback`` will be
invoked with any response from the remote worker. By default the worker will timeout or retry based on the settings of the
current ``WorkerConnection`` but ``timeout`` and ``auto_retry`` can be used for invocation specific behavior.
Note: ``callback`` will be invoked with ``{'error': 'timeout'}`` on ``timeout`` if ``auto_retry`` is false. Invocations
set to retry will never timeout and will instead be re-sent until a response is received. This behavior can be useful for
critical operations but has the potential to cause substantial congestion in the worker system. Use with caution. Negative
values of ``timeout`` will prevent messages from ever expiring or retrying regardless of ``auto_retry``. The default
values of ``timeout`` and ``auto_retry`` cause a fallback to the values used to initialize ``WorkerConnection``.
Passing ``await=True`` will wrap the call in a ``tornado.gen.Task`` allowing you to ``yield`` the response from the worker.
The ``Task`` replaces ``callback`` so any user supplied callback will be ignored when ``await=True``.
Alternatively, you can invoke methods with ``WorkerConnection.<module>.<method>(*args, **kwargs)``
where ``"<module>.<method>"`` will be passed as the ``method`` argument to ``invoke()``.
'''
if await:
return Task(lambda callback: self._queue_message(self.compress(self.dumps({'method': method, 'parameters': parameters})), callback, timeout, auto_retry))
self._queue_message(self.compress(self.dumps({'method': method, 'parameters': parameters})), callback, timeout, auto_retry)
def add_connection(self, address):
'''Connect to the worker at ``address``. Worker invocations will be round robin load balanced between all connected workers.'''
self._queue_message(address, command=WORKER_SOCKET_CONNECT)
def remove_connection(self, address):
'''Disconnect from the worker at ``address``. Worker invocations will be round robin load balanced between all connected workers.'''
self._queue_message(address, command=WORKER_SOCKET_DISCONNECT)
def set_connections(self, addresses):
'''A convenience method to set the connected addresses. A connection will be made to any new address included in the ``addresses``
enumerable and any currently connected address not included in ``addresses`` will be disconnected. If an address in ``addresses``
is already connected, it will not be affected.
'''
addresses = set(addresses)
to_remove = self.active_connections - addresses
to_add = addresses - self.active_connections
for a in to_remove:
self.remove_connection(a)
for a in to_add:
self.add_connection(a)
def __len__(self):
return len(self.__queued_messages)
def _queue_message(self, message, callback=None, timeout=0, auto_retry=None, command=''):
if not self.__ioloop:
self.start()
message_id = str(uuid4())
if callback:
self.__callbacks[message_id] = callback
if timeout != 0:
self.__message_timeouts[message_id] = timeout
if auto_retry is not None:
self.__message_auto_retry[message_id] = auto_retry
self.__queue_socket.send_multipart((command, message_id, message))
def log_error(self, error):
logging.error(repr(error))
def start(self):
if self.__ioloop:
return
def loop():
self.__ioloop = IOLoop()
queue_socket = self.__context.socket(zmq.PULL)
queue_socket.connect(self.message_address)
queue_stream = ZMQStream(queue_socket, self.__ioloop)
def receive_response(message, response_override=None):
self.__queued_messages.pop(message[1], None)
self.__message_timeouts.pop(message[1], None)
callback = self.__callbacks.pop(message[1], None)
if callback:
try:
callback(response_override or self.loads(self.decompress(message[2])))
except Exception as e:
self.log_error(e)
callback({'error': e})
def create_worker_stream():
def close_callback():
logging.info('Worker stream closed')
create_worker_stream()
worker_socket = self.__context.socket(zmq.DEALER)
for address in self.active_connections:
worker_socket.connect(address)
worker_stream = ZMQStream(worker_socket, self.__ioloop)
worker_stream.on_recv(receive_response)
worker_stream.set_close_callback(close_callback)
self._worker_stream = worker_stream
create_worker_stream()
def queue_message(message):
if message[0]:
if message[0] == WORKER_SOCKET_CONNECT and message[2] not in self.active_connections:
self.active_connections.add(message[2])
self._worker_stream.socket.connect(message[2])
elif message[0] == WORKER_SOCKET_DISCONNECT and message[2] in self.active_connections:
self.active_connections.remove(message[2])
self._worker_stream.socket.disconnect(message[2])
return
self.__queued_messages[message[1]] = (time(), message)
try:
self._worker_stream.send_multipart(message)
except IOError as e:
self.log_error(e)
logging.info('Reconnecting')
create_worker_stream()
except Exception as e:
self.log_error(e)
queue_stream.on_recv(queue_message)
def timeout_message():
now = time()
for message, retry in [(item[1], self.__message_auto_retry.get(item[1][1], self.__auto_retry)) for item, t in ((i, self.__message_timeouts.get(i[1][1], self.__timeout)) for i in self.__queued_messages.itervalues()) if t >= 0 and (item[0] + t < now)]:
if retry:
logging.info('Worker timeout, requeuing ' + message[1])
queue_message(message)
else:
receive_response(('', message[1]), {'error': 'timeout'})
timeout_callback = PeriodicCallback(timeout_message, int(abs(self.__timeout * 1000.0)), io_loop = self.__ioloop)
timeout_callback.start()
self.__ioloop.start()
self.__thread = None
self.__thread = Thread(target=loop)
self.__thread.daemon = True
self.__thread.start()
def stop(self):
if self.__ioloop:
self.__ioloop.stop()
def join(self):
if self.__thread:
self.__thread.join()
@classmethod
def instance(cls):
'''Returns the default instance of ``ZMQWorkerConnection`` as configured by the options prefixed
with ``worker_``, instantiating it if necessary. Import the ``workerconnection`` module within
your ``TotoService`` and run it with ``--help`` to see all available options.
'''
if not hasattr(cls, '_instance'):
cls._instance = cls(options.worker_address, timeout=options.worker_timeout, compression=options.worker_compression_module and __import__(options.worker_compression_module), serialization=options.worker_serialization_module and __import__(options.worker_serialization_module), auto_retry=options.worker_auto_retry)
return cls._instance
| mit | 51,147,384,794,014,200 | 47.5 | 319 | 0.68434 | false |
coddingtonbear/django-location | location/signals.py | 1 | 1394 | from django.dispatch.dispatcher import Signal
from location.models import LocationSnapshot
location_updated = Signal(providing_args=['user', 'from_', 'to'])
location_changed = Signal(providing_args=['user', 'from_', 'to'])
class watch_location(object):
def __init__(self, user):
self.user = user
def _get_current_location(self):
return LocationSnapshot.objects.filter(
source__user=self.user,
).order_by('-date')[0]
def __enter__(self):
self.original_location = None
try:
self.original_location = self._get_current_location()
except IndexError:
pass
return self
def __exit__(self, *args):
current_location = self._get_current_location()
if self.original_location != current_location:
location_updated.send(
sender=self,
user=self.user,
from_=self.original_location,
to=current_location,
)
if (
self.original_location and
self.original_location.location
!= current_location.location
):
location_changed.send(
sender=self,
user=self.user,
from_=self.original_location,
to=current_location,
)
| mit | -3,250,612,997,930,834,400 | 29.304348 | 65 | 0.537303 | false |
pck886/kicomav | Engine/plugins/emalware.py | 1 | 12387 | # -*- coding:utf-8 -*-
# Author: Kei Choi([email protected])
import os
import re
import kernel
import kavutil
import cryptolib
# -------------------------------------------------------------------------
# KavMain 클래스
# -------------------------------------------------------------------------
class KavMain:
# ---------------------------------------------------------------------
# init(self, plugins_path)
# 플러그인 엔진을 초기화 한다.
# 인력값 : plugins_path - 플러그인 엔진의 위치
# verbose - 디버그 모드 (True or False)
# 리턴값 : 0 - 성공, 0 이외의 값 - 실패
# ---------------------------------------------------------------------
def init(self, plugins_path, verbose=False): # 플러그인 엔진 초기화
pat = r'POST /cdn-cgi/\x00\x00 HTTP/1.1\r\nUser-Agent: \x00\r\nHost:' + \
r'[\d\D]+?GET\x00+/\x00+Cookie:[\d\D]+?http[\d\D]+?url=[\d\D]+?POST'
self.p_linux_mirai = re.compile(pat)
# 변종 바이러스 패턴
self.mirai_a_strings = [
'POST /cdn-cgi/',
'HTTP/1.1\r\nUser-Agent: ',
'Host:',
'GET',
'Cookie:',
'http',
'url=',
'proc/net/tcp'
]
self.aho_mirai_a = kavutil.AhoCorasick()
self.aho_mirai_a.make_tree(self.mirai_a_strings)
return 0 # 플러그인 엔진 초기화 성공
# ---------------------------------------------------------------------
# uninit(self)
# 플러그인 엔진을 종료한다.
# 리턴값 : 0 - 성공, 0 이외의 값 - 실패
# ---------------------------------------------------------------------
def uninit(self): # 플러그인 엔진 종료
return 0 # 플러그인 엔진 종료 성공
# ---------------------------------------------------------------------
# getinfo(self)
# 플러그인 엔진의 주요 정보를 알려준다. (제작자, 버전, ...)
# 리턴값 : 플러그인 엔진 정보
# ---------------------------------------------------------------------
def getinfo(self): # 플러그인 엔진의 주요 정보
info = dict() # 사전형 변수 선언
info['author'] = 'Kei Choi' # 제작자
info['version'] = '1.1' # 버전
info['title'] = 'eMalware Engine' # 엔진 설명
info['kmd_name'] = 'emalware' # 엔진 파일 이름
info['sig_num'] = kavutil.handle_pattern_md5.get_sig_num('emalware') + 2 # 진단/치료 가능한 악성코드 수
return info
# ---------------------------------------------------------------------
# listvirus(self)
# 진단/치료 가능한 악성코드의 리스트를 알려준다.
# 리턴값 : 악성코드 리스트
# ---------------------------------------------------------------------
def listvirus(self): # 진단 가능한 악성코드 리스트
vlist = kavutil.handle_pattern_md5.get_sig_vlist('emalware')
vlist.append('Backdoor.Linux.Mirai.a.gen')
vlist = list(set(vlist))
vlist.sort()
vlists = []
for vname in vlist:
vlists.append(kavutil.normal_vname(vname))
vlists.append(kavutil.normal_vname('<n>AdWare.Win32.Sokuxuan.gen'))
return vlists
# ---------------------------------------------------------------------
# scan(self, filehandle, filename, fileformat)
# 악성코드를 검사한다.
# 입력값 : filehandle - 파일 핸들
# filename - 파일 이름
# fileformat - 파일 포맷
# filename_ex - 파일 이름 (압축 내부 파일 이름)
# 리턴값 : (악성코드 발견 여부, 악성코드 이름, 악성코드 ID) 등등
# ---------------------------------------------------------------------
def scan(self, filehandle, filename, fileformat, filename_ex): # 악성코드 검사
try:
mm = filehandle
# 미리 분석된 파일 포맷중에 PE 포맷이 있는가?
if 'ff_pe' in fileformat:
ff = fileformat['ff_pe']
# case 1 : 섹션 전체를 hash로 검사
for idx, section in enumerate(ff['pe']['Sections']):
# if (section['Characteristics'] & 0x20000000) == 0x20000000: # 실행 속성?
# print section['Name'], hex(section['SizeRawData'])
fsize = section['SizeRawData']
if fsize and kavutil.handle_pattern_md5.match_size('emalware', fsize):
foff = section['PointerRawData']
fmd5 = cryptolib.md5(mm[foff:foff+fsize])
# print fsize, fmd5
vname = kavutil.handle_pattern_md5.scan('emalware', fsize, fmd5)
if vname:
vname = kavutil.normal_vname(vname)
return True, vname, 0, kernel.INFECTED
# case 2. 마지막 섹션에 실행 파일 존재
if len(ff['pe']['Sections']):
# 마지막 섹션
sec = ff['pe']['Sections'][-1]
off = sec['PointerRawData']
size = sec['SizeRawData']
# 실행 파일이 존재하는가?
exe_offs = [m.start() for m in re.finditer('MZ', mm[off:off+size])]
for exe_pos in exe_offs:
fsize = 0x1d5
if fsize and kavutil.handle_pattern_md5.match_size('emalware', fsize):
fmd5 = cryptolib.md5(mm[off + exe_pos:off + exe_pos + fsize])
# print fsize, fmd5
vname = kavutil.handle_pattern_md5.scan('emalware', fsize, fmd5)
if vname:
# return True, vname, 0, kernel.INFECTED
idx = len(ff['pe']['Sections']) - 1
vname = kavutil.normal_vname(vname)
return True, vname, (0x80000000 + idx), kernel.INFECTED
# case 3. pdb를 이용해서 악성코드 검사
if 'PDB_Name' in ff['pe']:
pdb_sigs = {
':\\pz_git\\bin\\': '<n>AdWare.Win32.Sokuxuan.gen',
':\\CODE\\vitruvian\\': '<n>AdWare.Win32.Vitruvian.gen',
}
for pat in pdb_sigs.keys():
if ff['pe']['PDB_Name'].find(pat) != -1:
vname = kavutil.normal_vname(pdb_sigs[pat])
return True, vname, 0, kernel.INFECTED
# 미리 분석된 파일 포맷중에 ELF 포맷이 있는가?
elif 'ff_elf' in fileformat:
ff = fileformat['ff_elf']
if len(ff['elf']['Sections']):
for section in ff['elf']['Sections']:
if (section['Type'] & 0x1) == 0x1 and (section['Flag'] & 0x4) == 0x4: # 프로그램 데이터이면서 실행 속성?
# print section['Name'], section['Size'], section['Offset']
fsize = section['Size']
if fsize and kavutil.handle_pattern_md5.match_size('emalware', fsize):
foff = section['Offset']
fmd5 = cryptolib.md5(mm[foff:foff + fsize])
# print fsize, fmd5
vname = kavutil.handle_pattern_md5.scan('emalware', fsize, fmd5)
if vname:
vname = kavutil.normal_vname(vname)
return True, vname, 0, kernel.INFECTED
elif len(ff['elf']['ProgramHeaders']):
for ph in ff['elf']['ProgramHeaders']:
if (ph['Type'] & 0x1) == 0x1 and (ph['Flag'] & 0x1) == 0x1:
fsize = ph['Size']
if fsize and kavutil.handle_pattern_md5.match_size('emalware', fsize):
foff = ph['Offset']
fmd5 = cryptolib.md5(mm[foff:foff + fsize])
# print fsize, fmd5
vname = kavutil.handle_pattern_md5.scan('emalware', fsize, fmd5)
if vname:
vname = kavutil.normal_vname(vname)
return True, vname, 0, kernel.INFECTED
# Mirai 변종 진단
'''
for section in ff['elf']['Sections']:
if section['Name'] == '.rodata':
fsize = section['Size']
foff = section['Offset']
if self.p_linux_mirai.match(mm[foff:foff+fsize]):
return True, 'Backdoor.Linux.Mirai.gen', 0, kernel.SUSPECT
'''
for section in ff['elf']['Sections']:
if section['Name'] == '.rodata':
vstring = []
foff = section['Offset']
ret = self.aho_mirai_a.search(mm[foff:foff + 0x200])
for n in ret[:len(self.mirai_a_strings)]:
vstring.append(n[1])
# print vstring
# print len(set(vstring)), len(self.mirai_a_strings)
if set(vstring) == set(self.mirai_a_strings):
return True, 'Backdoor.Linux.Mirai.a.gen', 0, kernel.SUSPECT
# NSIS 같은 설치 프로그램의 경우 첨부 영역에 존재하는데..
# 디컴파일하지 않고 오리지널 이미지 원본을 탐지하도록 했음..
if 'ff_attach' in fileformat:
foff = fileformat['ff_attach']['Attached_Pos']
buf = mm[foff:]
fsize = len(buf)
if fsize and kavutil.handle_pattern_md5.match_size('emalware', fsize):
fmd5 = cryptolib.md5(buf) # 첨부 위치부터 끝까지
vname = kavutil.handle_pattern_md5.scan('emalware', fsize, fmd5)
if vname:
vname = kavutil.normal_vname(vname)
return True, vname, 0, kernel.INFECTED
except IOError:
pass
# 악성코드를 발견하지 못했음을 리턴한다.
return False, '', -1, kernel.NOT_FOUND
# ---------------------------------------------------------------------
# disinfect(self, filename, malware_id)
# 악성코드를 치료한다.
# 입력값 : filename - 파일 이름
# : malware_id - 치료할 악성코드 ID
# 리턴값 : 악성코드 치료 여부
# ---------------------------------------------------------------------
def disinfect(self, filename, malware_id): # 악성코드 치료
try:
# 악성코드 진단 결과에서 받은 ID 값이 0인가?
if malware_id == 0:
os.remove(filename) # 파일 삭제
return True # 치료 완료 리턴
if malware_id & 0x80000000 == 0x80000000:
idx = malware_id & 0x7fffffff
import pe
buf = open(filename, 'rb').read()
pe = pe.PE(buf, False, filename)
try:
pe_format = pe.parse() # PE 파일 분석
except MemoryError:
pe_format = None
if pe_format is None:
return False
ff = {'pe': pe_format}
if len(ff['pe']['Sections']) > idx:
section = ff['pe']['Sections'][idx]
fsize = section['SizeRawData']
foff = section['PointerRawData']
data = buf[:foff] + ('\x00' * fsize) + buf[foff+fsize:]
open(filename, 'wb').write(data)
return True
except IOError:
pass
return False # 치료 실패 리턴
| gpl-2.0 | 4,424,197,806,499,684,000 | 41.310861 | 115 | 0.409843 | false |
DHTC-Tools/logstash-confs | condor/python/split-index.py | 1 | 5102 | #!/usr/bin/env python
import datetime
import argparse
import sys
import logging
import pytz
import elasticsearch
import elasticsearch.helpers
ES_NODES = 'uct2-es-door.mwt2.org'
VERSION = '0.1'
SOURCE_INDEX = 'osg-connect-job-details'
def get_start_week(start_date):
"""
Return a datetime that starts at the beginning of the iso week that
start_date falls in (e.g. if start_date is in day 5 of an iso week
return a datetime object from 5 days ago)
:param start_date: an UTC localized datetime object to use
:return: an UTC localized datetime that
"""
iso_datetime = start_date - datetime.timedelta(days=start_date.isoweekday())
return iso_datetime
def validate_date(arg):
"""
Validate that text string provided is a valid date
"""
if arg is None or len(arg) != 8:
return None
year = arg[0:4]
month = arg[4:6]
day = arg[6:8]
try:
year = int(year)
month = int(month)
day = int(day)
except ValueError:
return None
if year < 2000 or year > 2038:
return None
if month < 1 or month > 12:
return None
if day < 1 or day > 31:
return None
try:
utc = pytz.utc
temp = utc.localize(datetime.datetime(year, month, day, 0, 0, 0))
except ValueError:
return None
return temp
def reindex(source_index, target_index, start_date, end_date, client):
"""
Reindex documents that occur between start_date and end_date
from source index to target index
:param client: instantiated ES client to use
:param source_index: source index for documents
:param target_index: destination index for documents that match
:param start_date: UTC localized datetime that documents need to occur after
:param end_date: UTC localized datetime that documents need to occur before
:return: tuple of (# of successes, error messages) indicating any issues
"""
utc = pytz.utc
start_time = utc.localize(datetime.datetime.combine(start_date, datetime.time(0, 0, 0)))
end_time = utc.localize(datetime.datetime.combine(end_date, datetime.time(0, 0, 0)))
range_query = {"query": {
"filtered": {
"filter": {
"bool": {
"must": [
{"range":
{"@timestamp":
{"gte": start_time.isoformat(),
"lt": end_time.isoformat()}}}]}}}}}
sys.stdout.write("Reindexing into {0}\n".format(target_index))
results = elasticsearch.helpers.reindex(client,
source_index,
target_index,
range_query,
scroll='30m')
return results
def get_es_client():
""" Instantiate DB client and pass connection back """
return elasticsearch.Elasticsearch(hosts=ES_NODES,
retry_on_timeout=True,
max_retries=10,
timeout=300)
def scan_and_reindex(start_date=None, end_date=None, client=None):
"""
Iterate through weeks between start and end date and
reindex documents to a weekly index
:param start_date: date to start reindexing
:param end_date: date to end indexing
:param client: instantiated ES client to use
:return: None
"""
current_date = get_start_week(start_date)
while current_date < end_date:
iso_year, iso_week, _ = current_date.isocalendar()
weekly_index = "{0}-{1}-{2:0>2}".format('osg-connect-job-details',
iso_year,
iso_week)
week_end_date = current_date + datetime.timedelta(days=7)
results = reindex(SOURCE_INDEX,
weekly_index,
current_date,
week_end_date,
client)
logging.warning("{0}".format(results))
current_date += datetime.timedelta(days=7)
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Reindex events from ' +
'osg-connect-job-details ' +
'to weekly indices')
parser.add_argument('--start-date',
dest='start_date',
default=None,
required=True,
help='Reindex events that occur on this day or after')
parser.add_argument('--end-date',
dest='end_date',
default=None,
help='Reindex events that occur before this day')
args = parser.parse_args(sys.argv[1:])
start_date = validate_date(args.start_date)
end_date = validate_date(args.end_date)
client = get_es_client()
scan_and_reindex(start_date, end_date, client)
| apache-2.0 | -7,657,909,026,755,955,000 | 33.945205 | 92 | 0.546844 | false |
phoebe-project/phoebe2-docs | 2.0/tutorials/irrad_method_horvat.py | 1 | 3065 | #!/usr/bin/env python
# coding: utf-8
# Lambert Scattering (irrad_method='horvat')
# ============================
#
# Setup
# -----------------------------
# Let's first make sure we have the latest version of PHOEBE 2.0 installed. (You can comment out this line if you don't use pip for your installation or don't want to update to the latest release).
# In[ ]:
get_ipython().system('pip install -I "phoebe>=2.0,<2.1"')
# As always, let's do imports and initialize a logger and a new bundle. See [Building a System](../tutorials/building_a_system.html) for more details.
# In[1]:
get_ipython().run_line_magic('matplotlib', 'inline')
# In[2]:
import phoebe
from phoebe import u # units
import numpy as np
import matplotlib.pyplot as plt
logger = phoebe.logger('error')
b = phoebe.default_binary()
# Relevant Parameters
# ---------------------------------
# For parameters that affect reflection and heating (irrad_frac_\*) see the tutorial on [reflection and heating](./reflection_heating.ipynb).
#
# The 'irrad_method' compute option dictates whether irradiation is handled according to the new Horvat scheme which includes Lambert Scattering, Wilson's original reflection scheme, or ignored entirely.
# In[3]:
print(b['irrad_method'])
# Influence on Light Curves (fluxes)
# ---------------------------------
#
# Let's (roughtly) reproduce Figure 8 from [Prsa et al. 2016](http://phoebe-project.org/publications/2016Prsa+) which shows the difference between Wilson and Horvat schemes for various inclinations.
#
# <img src="prsa+2016_fig8.png" alt="Figure 8" width="600px"/>
#
# First we'll roughly create a A0-K0 binary and set reasonable albedos.
# In[4]:
b['teff@primary'] = 11000
b['rpole@primary'] = 2.5
b['gravb_bol@primary'] = 1.0
b['teff@secondary'] = 5000
b['rpole@secondary'] = 0.85
b['q@binary'] = 0.8/3.0
b.flip_constraint('mass@primary', solve_for='sma@binary')
b['mass@primary'] = 3.0
# In[5]:
print(b.filter(qualifier=['mass', 'rpole', 'teff'], context='component'))
# In[6]:
b['irrad_frac_refl_bol@primary'] = 1.0
b['irrad_frac_refl_bol@secondary'] = 0.6
# Now we'll compute the light curves with wilson and horvat irradiation, and plot the relative differences between the two as a function of phase, for several different values of the inclination.
#
# Note that Figure 8 excluded eclipse effects, but that ability is not included in PHOEBE 2.0, so there will be a slight discrepancy for inclinations which exhibit eclipses.
# In[7]:
phases = np.linspace(0,1,101)
b.add_dataset('lc', times=b.to_time(phases))
# In[8]:
for incl in [0,30,60,90]:
b.set_value('incl@binary', incl)
b.run_compute(irrad_method='wilson')
fluxes_wilson = b.get_value('fluxes', context='model')
b.run_compute(irrad_method='horvat')
fluxes_horvat = b.get_value('fluxes', context='model')
plt.plot(phases, (fluxes_wilson-fluxes_horvat)/fluxes_wilson, label='i={}'.format(incl))
plt.xlabel('phase')
plt.ylabel('[F(wilson) - F(horvat)] / F(wilson)')
plt.legend(loc='upper center')
plt.show()
| gpl-3.0 | -2,983,392,925,752,449,000 | 25.422414 | 203 | 0.675041 | false |
Bharath-J/Mezzanine | setup.py | 1 | 4939 |
# #import os
# #import sys
# #from setuptools import setup, find_packages
# #from shutil import rmtree
# #from mezzanine import __version__ as version
# #exclude = ["mezzanine/project_template/dev.db",
# # "mezzanine/project_template/project_name/local_settings.py"]
# #if sys.argv == ["setup.py", "test"]:
# # exclude = []
# exclude = dict([(e, None) for e in exclude])
# for e in exclude:
# if e.endswith(".py"):
# try:
# os.remove("%sc" % e)
# except:
# pass
# try:
# with open(e, "r") as f:
# exclude[e] = (f.read(), os.stat(e))
# os.remove(e)
# except:
# pass
# if sys.argv[:2] == ["setup.py", "bdist_wheel"]:
# # Remove previous build dir when creating a wheel build,
# # since if files have been removed from the project,
# # they'll still be cached in the build dir and end up
# # as part of the build, which is really neat!
# try:
# rmtree("build")
# except:
# pass
# try:
# setup(
# name="Mezzanine",
# version=version,
# author="Stephen McDonald",
# author_email="[email protected]",
# description="An open source content management platform built using "
# "the Django framework.",
# long_description=open("README.rst", 'rb').read().decode('utf-8'),
# license="BSD",
# url="http://mezzanine.jupo.org/",
# zip_safe=False,
# include_package_data=True,
# packages=find_packages(),
# install_requires=[
# "django-contrib-comments",
# "django >= 1.7, < 1.9",
# "filebrowser_safe >= 0.4.0",
# "grappelli_safe >= 0.4.0",
# "tzlocal >= 1.0",
# "bleach >= 1.4",
# "beautifulsoup4 >= 4.1.3",
# "requests >= 2.1.0",
# "requests-oauthlib >= 0.4",
# "future >= 0.9.0",
# "pillow",
# "chardet",
# ],
# entry_points="""
# [console_scripts]
# mezzanine-project=mezzanine.bin.mezzanine_project:create_project
# """,
# test_suite="mezzanine.bin.runtests.main",
# tests_require=["pyflakes>=0.6.1", "pep8>=1.4.1"],
# classifiers=[
# "Development Status :: 5 - Production/Stable",
# "Environment :: Web Environment",
# "Framework :: Django",
# "Intended Audience :: Developers",
# "License :: OSI Approved :: BSD License",
# "Operating System :: OS Independent",
# "Programming Language :: Python",
# "Programming Language :: Python :: 2.7",
# "Programming Language :: Python :: 3",
# "Programming Language :: Python :: 3.3",
# "Programming Language :: Python :: 3.4",
# "Programming Language :: Python :: 3.5",
# "Topic :: Internet :: WWW/HTTP",
# "Topic :: Internet :: WWW/HTTP :: Dynamic Content",
# "Topic :: Internet :: WWW/HTTP :: WSGI",
# "Topic :: Software Development :: Libraries :: "
# "Application Frameworks",
# "Topic :: Software Development :: Libraries :: Python Modules",
# ])
# finally:
# for e in exclude:
# if exclude[e] is not None:
# data, stat = exclude[e]
# try:
# with open(e, "w") as f:
# f.write(data)
# os.chown(e, stat.st_uid, stat.st_gid)
# os.chmod(e, stat.st_mode)
# except:
# pass
# My setup
import os
from setuptools import setup
with open(os.path.join(os.path.dirname(__file__), 'README.rst')) as readme:
README = readme.read()
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name='mezzanine-bhj',
version='0.4',
packages=['mezzanine'],
include_package_data=True,
license='BSD License', # example license
description='A simple Django app to conduct Web-based polls.',
long_description=README,
url='https://www.example.com/',
author='Your Name',
author_email='[email protected]',
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License', # example license
'Operating System :: OS Independent',
'Programming Language :: Python',
# Replace these appropriately if you are stuck on Python 2.
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
],
)
| bsd-2-clause | 2,649,597,721,540,508,700 | 34.028369 | 79 | 0.524398 | false |
mcclurmc/juju | juju/providers/ec2/tests/common.py | 1 | 7904 | from yaml import dump
from twisted.internet.defer import fail, succeed
from txaws.s3.client import S3Client
from txaws.s3.exception import S3Error
from txaws.ec2.client import EC2Client
from txaws.ec2.exception import EC2Error
from txaws.ec2.model import Instance, Reservation, SecurityGroup
from juju.lib.mocker import KWARGS, MATCH
from juju.providers.ec2 import MachineProvider
from juju.providers.ec2.machine import EC2ProviderMachine
MATCH_GROUP = MATCH(lambda x: x.startswith("juju-moon"))
class EC2TestMixin(object):
env_name = "moon"
service_factory_kwargs = None
def get_config(self):
return {"type": "ec2",
"juju-origin": "distro",
"admin-secret": "magic-beans",
"access-key": "0f62e973d5f8",
"secret-key": "3e5a7c653f59",
"control-bucket": self.env_name}
def get_provider(self):
"""Return the ec2 machine provider.
This should only be invoked after mocker is in replay mode so the
AWS service class will be appropriately replaced by the mock.
"""
return MachineProvider(self.env_name, self.get_config())
def get_instance(self,
instance_id, state="running", machine_id=42, **kwargs):
groups = kwargs.pop("groups",
["juju-%s" % self.env_name,
"juju-%s-%s" % (self.env_name, machine_id)])
reservation = Reservation("x", "y", groups=groups)
return Instance(instance_id, state, reservation=reservation, **kwargs)
def assert_machine(self, machine, instance_id, dns_name):
self.assertTrue(isinstance(machine, EC2ProviderMachine))
self.assertEquals(machine.instance_id, instance_id)
self.assertEquals(machine.dns_name, dns_name)
def get_ec2_error(self, entity_id,
format="The instance ID %r does not exist",
code=503):
"""Make a representative EC2Error for `entity_id`, eg AWS instance_id.
This error is paired with `get_wrapped_ec2_text` below. The
default format represents a fairly common error seen in
working with EC2. There are others."""
message = format % entity_id
return EC2Error(
"<error><Code>1</Code><Message>%s</Message></error>" % message,
code)
def setUp(self):
# mock out the aws services
service_factory = self.mocker.replace(
"txaws.service.AWSServiceRegion")
self._service = service_factory(KWARGS)
def store_factory_kwargs(**kwargs):
self.service_factory_kwargs = kwargs
self.mocker.call(store_factory_kwargs)
self.s3 = self.mocker.mock(S3Client)
self._service.get_s3_client()
self.mocker.result(self.s3)
self.ec2 = self.mocker.mock(EC2Client)
self._service.get_ec2_client()
self.mocker.result(self.ec2)
class EC2MachineLaunchMixin(object):
def _mock_launch_utils(self, ami_name="ami-default", **get_ami_kwargs):
get_public_key = self.mocker.replace(
"juju.providers.common.utils.get_user_authorized_keys")
def match_config(arg):
return isinstance(arg, dict)
get_public_key(MATCH(match_config))
self.mocker.result("zebra")
if not get_ami_kwargs:
return
get_ami = self.mocker.replace(
"juju.providers.ec2.utils.get_current_ami")
get_ami(KWARGS)
def check_kwargs(**kwargs):
self.assertEquals(kwargs, get_ami_kwargs)
return succeed(ami_name)
self.mocker.call(check_kwargs)
def _mock_create_group(self):
group_name = "juju-%s" % self.env_name
self.ec2.create_security_group(
group_name, "juju group for %s" % self.env_name)
self.mocker.result(succeed(True))
self.ec2.authorize_security_group(
group_name, ip_protocol="tcp", from_port="22",
to_port="22", cidr_ip="0.0.0.0/0")
self.mocker.result(succeed([self.env_name]))
self.ec2.describe_security_groups(group_name)
self.mocker.result(succeed(
[SecurityGroup(group_name, "", owner_id="123")]))
self.ec2.authorize_security_group(
group_name, source_group_name=group_name,
source_group_owner_id="123")
self.mocker.result(succeed(True))
def _mock_create_machine_group(self, machine_id):
machine_group_name = "juju-%s-%s" % (self.env_name, machine_id)
self.ec2.create_security_group(
machine_group_name, "juju group for %s machine %s" % (
self.env_name, machine_id))
self.mocker.result(succeed(True))
def _mock_delete_machine_group(self, machine_id):
machine_group_name = "juju-%s-%s" % (self.env_name, machine_id)
self.ec2.delete_security_group(machine_group_name)
self.mocker.result(succeed(True))
def _mock_delete_machine_group_was_deleted(self, machine_id):
machine_group_name = "juju-%s-%s" % (self.env_name, machine_id)
self.ec2.delete_security_group(machine_group_name)
self.mocker.result(fail(self.get_ec2_error(
machine_group_name,
"There are active instances using security group %r")))
def _mock_get_zookeeper_hosts(self, hosts=None):
"""
Try to encapsulate a variety of behaviors here..
if hosts is None, a default host is used.
if hosts is False, no s3 state is returned
if hosts are passed as a list of instances, they
are returned.
"""
if hosts is None:
hosts = [self.get_instance(
"i-es-zoo", private_dns_name="es.example.internal")]
self.s3.get_object(self.env_name, "provider-state")
if hosts is False:
error = S3Error("<error/>", 404)
error.errors = [{"Code": "NoSuchKey"}]
self.mocker.result(fail(error))
return
state = dump({
"zookeeper-instances":
[i.instance_id for i in hosts]})
self.mocker.result(succeed(state))
if hosts:
# connect grabs the first host of a set.
self.ec2.describe_instances(hosts[0].instance_id)
self.mocker.result(succeed([hosts[0]]))
class MockInstanceState(object):
"""Mock the result of ec2_describe_instances when called successively.
Each call of :method:`get_round` returns a list of mock `Instance`
objects, using the state for that round. Instance IDs not used in
the round (and passed in from ec2_describe_instances) are
automatically skipped."""
def __init__(self, tester, instance_ids, machine_ids, states):
self.tester = tester
self.instance_ids = instance_ids
self.machine_ids = machine_ids
self.states = states
self.round = 0
def get_round(self, *current_instance_ids):
result = []
for instance_id, machine_id, state in zip(
self.instance_ids, self.machine_ids, self.states[self.round]):
if instance_id not in current_instance_ids:
# Ignore instance_ids that are no longer being
# described, because they have since moved into a
# terminated state
continue
result.append(self.tester.get_instance(instance_id,
machine_id=machine_id,
state=state))
self.round += 1
return succeed(result)
class Observed(object):
"""Minimal wrapper just to ensure :method:`add` returns a `Deferred`."""
def __init__(self):
self.items = set()
def add(self, item):
self.items.add(item)
return succeed(True)
| agpl-3.0 | -6,238,537,517,998,629,000 | 35.762791 | 78 | 0.602859 | false |
jelmer/python-fastimport | fastimport/processors/filter_processor.py | 1 | 11587 | # Copyright (C) 2009 Canonical Ltd
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""Import processor that filters the input (and doesn't import)."""
from .. import (
commands,
helpers,
processor,
)
import stat
class FilterProcessor(processor.ImportProcessor):
"""An import processor that filters the input to include/exclude objects.
No changes to the current repository are made.
Here are the supported parameters:
* include_paths - a list of paths that commits must change in order to
be kept in the output stream
* exclude_paths - a list of paths that should not appear in the output
stream
* squash_empty_commits - if set to False, squash commits that don't have
any changes after the filter has been applied
"""
known_params = [
b'include_paths',
b'exclude_paths',
b'squash_empty_commits'
]
def pre_process(self):
self.includes = self.params.get(b'include_paths')
self.excludes = self.params.get(b'exclude_paths')
self.squash_empty_commits = bool(
self.params.get(b'squash_empty_commits', True))
# What's the new root, if any
self.new_root = helpers.common_directory(self.includes)
# Buffer of blobs until we know we need them: mark -> cmd
self.blobs = {}
# These are the commits we've squashed so far
self.squashed_commits = set()
# Map of commit-id to list of parents
self.parents = {}
def pre_handler(self, cmd):
self.command = cmd
# Should this command be included in the output or not?
self.keep = False
# Blobs to dump into the output before dumping the command itself
self.referenced_blobs = []
def post_handler(self, cmd):
if not self.keep:
return
# print referenced blobs and the command
for blob_id in self.referenced_blobs:
self._print_command(self.blobs[blob_id])
self._print_command(self.command)
def progress_handler(self, cmd):
"""Process a ProgressCommand."""
# These always pass through
self.keep = True
def blob_handler(self, cmd):
"""Process a BlobCommand."""
# These never pass through directly. We buffer them and only
# output them if referenced by an interesting command.
self.blobs[cmd.id] = cmd
self.keep = False
def checkpoint_handler(self, cmd):
"""Process a CheckpointCommand."""
# These always pass through
self.keep = True
def commit_handler(self, cmd):
"""Process a CommitCommand."""
# These pass through if they meet the filtering conditions
interesting_filecmds = self._filter_filecommands(cmd.iter_files)
if interesting_filecmds or not self.squash_empty_commits:
# If all we have is a single deleteall, skip this commit
if len(interesting_filecmds) == 1 and isinstance(
interesting_filecmds[0], commands.FileDeleteAllCommand):
pass
else:
# Remember just the interesting file commands
self.keep = True
cmd.file_iter = iter(interesting_filecmds)
# Record the referenced blobs
for fc in interesting_filecmds:
if isinstance(fc, commands.FileModifyCommand):
if (fc.dataref is not None and
not stat.S_ISDIR(fc.mode)):
self.referenced_blobs.append(fc.dataref)
# Update from and merges to refer to commits in the output
cmd.from_ = self._find_interesting_from(cmd.from_)
cmd.merges = self._find_interesting_merges(cmd.merges)
else:
self.squashed_commits.add(cmd.id)
# Keep track of the parents
if cmd.from_ and cmd.merges:
parents = [cmd.from_] + cmd.merges
elif cmd.from_:
parents = [cmd.from_]
else:
parents = None
if cmd.mark is not None:
self.parents[b':' + cmd.mark] = parents
def reset_handler(self, cmd):
"""Process a ResetCommand."""
if cmd.from_ is None:
# We pass through resets that init a branch because we have to
# assume the branch might be interesting.
self.keep = True
else:
# Keep resets if they indirectly reference something we kept
cmd.from_ = self._find_interesting_from(cmd.from_)
self.keep = cmd.from_ is not None
def tag_handler(self, cmd):
"""Process a TagCommand."""
# Keep tags if they indirectly reference something we kept
cmd.from_ = self._find_interesting_from(cmd.from_)
self.keep = cmd.from_ is not None
def feature_handler(self, cmd):
"""Process a FeatureCommand."""
feature = cmd.feature_name
if feature not in commands.FEATURE_NAMES:
self.warning(
"feature %s is not supported - parsing may fail"
% (feature,))
# These always pass through
self.keep = True
def _print_command(self, cmd):
"""Wrapper to avoid adding unnecessary blank lines."""
text = bytes(cmd)
self.outf.write(text)
if not text.endswith(b'\n'):
self.outf.write(b'\n')
def _filter_filecommands(self, filecmd_iter):
"""Return the filecommands filtered by includes & excludes.
:return: a list of FileCommand objects
"""
if self.includes is None and self.excludes is None:
return list(filecmd_iter())
# Do the filtering, adjusting for the new_root
result = []
for fc in filecmd_iter():
if (isinstance(fc, commands.FileModifyCommand) or
isinstance(fc, commands.FileDeleteCommand)):
if self._path_to_be_kept(fc.path):
fc.path = self._adjust_for_new_root(fc.path)
else:
continue
elif isinstance(fc, commands.FileDeleteAllCommand):
pass
elif isinstance(fc, commands.FileRenameCommand):
fc = self._convert_rename(fc)
elif isinstance(fc, commands.FileCopyCommand):
fc = self._convert_copy(fc)
else:
self.warning(
"cannot handle FileCommands of class %s - ignoring",
fc.__class__)
continue
if fc is not None:
result.append(fc)
return result
def _path_to_be_kept(self, path):
"""Does the given path pass the filtering criteria?"""
if self.excludes and (
path in self.excludes
or helpers.is_inside_any(self.excludes, path)):
return False
if self.includes:
return (
path in self.includes
or helpers.is_inside_any(self.includes, path))
return True
def _adjust_for_new_root(self, path):
"""Adjust a path given the new root directory of the output."""
if self.new_root is None:
return path
elif path.startswith(self.new_root):
return path[len(self.new_root):]
else:
return path
def _find_interesting_parent(self, commit_ref):
while True:
if commit_ref not in self.squashed_commits:
return commit_ref
parents = self.parents.get(commit_ref)
if not parents:
return None
commit_ref = parents[0]
def _find_interesting_from(self, commit_ref):
if commit_ref is None:
return None
return self._find_interesting_parent(commit_ref)
def _find_interesting_merges(self, commit_refs):
if commit_refs is None:
return None
merges = []
for commit_ref in commit_refs:
parent = self._find_interesting_parent(commit_ref)
if parent is not None:
merges.append(parent)
if merges:
return merges
else:
return None
def _convert_rename(self, fc):
"""Convert a FileRenameCommand into a new FileCommand.
:return: None if the rename is being ignored, otherwise a
new FileCommand based on the whether the old and new paths
are inside or outside of the interesting locations.
"""
old = fc.old_path
new = fc.new_path
keep_old = self._path_to_be_kept(old)
keep_new = self._path_to_be_kept(new)
if keep_old and keep_new:
fc.old_path = self._adjust_for_new_root(old)
fc.new_path = self._adjust_for_new_root(new)
return fc
elif keep_old:
# The file has been renamed to a non-interesting location.
# Delete it!
old = self._adjust_for_new_root(old)
return commands.FileDeleteCommand(old)
elif keep_new:
# The file has been renamed into an interesting location
# We really ought to add it but we don't currently buffer
# the contents of all previous files and probably never want
# to. Maybe fast-import-info needs to be extended to
# remember all renames and a config file can be passed
# into here ala fast-import?
self.warning(
"cannot turn rename of %s into an add of %s yet" %
(old, new))
return None
def _convert_copy(self, fc):
"""Convert a FileCopyCommand into a new FileCommand.
:return: None if the copy is being ignored, otherwise a
new FileCommand based on the whether the source and destination
paths are inside or outside of the interesting locations.
"""
src = fc.src_path
dest = fc.dest_path
keep_src = self._path_to_be_kept(src)
keep_dest = self._path_to_be_kept(dest)
if keep_src and keep_dest:
fc.src_path = self._adjust_for_new_root(src)
fc.dest_path = self._adjust_for_new_root(dest)
return fc
elif keep_src:
# The file has been copied to a non-interesting location.
# Ignore it!
return None
elif keep_dest:
# The file has been copied into an interesting location
# We really ought to add it but we don't currently buffer
# the contents of all previous files and probably never want
# to. Maybe fast-import-info needs to be extended to
# remember all copies and a config file can be passed
# into here ala fast-import?
self.warning(
"cannot turn copy of %s into an add of %s yet" %
(src, dest))
return None
| gpl-2.0 | -4,424,120,872,560,378,000 | 36.866013 | 77 | 0.586778 | false |
wgkoro/Countdown-Reader-closing | source/libs/utils.py | 1 | 1190 | #!/usr/bin/env python
#-*- coding:utf-8 -*-
import string
import re
import random
import hashlib
from flask import request, session
def generate_csrf_token():
rand_str = randstr(20)
session['_csrf_token'] = get_session_token(rand_str)
return rand_str
def get_session_token(rand_str):
key = 'SaltOfToken'
string = key + rand_str
return hashlib.sha1(string).hexdigest()
def get_template_file():
if not is_mobile():
return 'top.html'
return 'mobile.html'
def is_mobile():
ua = request.headers.get('User-Agent', '')
if not ua:
return False
ua = ua.lower()
if re.match(r'.*(iphone|android).*', ua):
return True
return False
def check_pager(pager):
try:
pager = int(pager)
except:
pager = 1
if pager > 50:
pager = 1
return pager
def randstr(n):
alphabets = string.digits + string.letters
return ''.join(random.choice(alphabets) for i in xrange(n))
def select_rand_img():
imgs = ['nature', 'flower', 'night', 'metro', 'tree']
background = random.choice(imgs)
if is_mobile():
return '%s_s.jpg' % background
return '%s.jpg' % background
| mit | -7,334,088,037,797,551,000 | 19.517241 | 63 | 0.612605 | false |
OnroerendErfgoed/skosprovider_heritagedata | setup.py | 1 | 1309 | import os
try:
from setuptools import setup, find_packages
except ImportError:
from distutils.core import setup
here = os.path.abspath(os.path.dirname(__file__))
README = open(os.path.join(here, 'README.rst')).read()
packages = [
'skosprovider_heritagedata'
]
requires = [
'skosprovider>=0.6.0',
'requests',
'rdflib'
]
setup(
name='skosprovider_heritagedata',
version='0.3.1',
description='Skosprovider implementation of the heritagedata.org Vocabularies',
long_description=README,
packages=packages,
include_package_data=True,
install_requires=requires,
license='MIT',
zip_safe=False,
classifiers=[
'Intended Audience :: Developers',
'Natural Language :: English',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
],
author='Flanders Heritage Agency',
author_email='[email protected]',
url='https://github.com/OnroerendErfgoed/skosprovider_heritagedata',
keywords='heritagedata.org skos skosprovider thesauri vocabularies',
test_suite='nose.collector'
)
| mit | 516,847,866,655,250,750 | 27.456522 | 83 | 0.662338 | false |
alexad2/XeRPI | Xe1T_Kr83m_Note/lce_helpers_v2.py | 1 | 8006 | import numpy as np
from collections import defaultdict
import ROOT
from subprocess import call
import pandas as pd
##################################################################################################
def atan(y, x):
phi = np.arctan2(y, x)
for i in range(len(phi)):
if phi[i] < 0:
phi[i] += 2*np.pi
return phi
##################################################################################################
def xe_to_lyBins(df,bin_settings,peak,bin_spec_dir='Bin_Hists'):
R, Z, A_r, N_phi, N_z = bin_settings
z_width = Z / N_z
phi_widths = []
for n in N_phi:
phi_widths.append(2*np.pi/n)
if peak == 's10':
s1_spec_max = 20000
s1_ene = 32.1498 # from nuclear data sheets a=83
position = 'i0'
elif peak == 's11':
s1_spec_max = 100
s1_ene = 9.4051
position = 'i0' # only ever consider position of 1st s1
else:
print('error: invalid peak')
return()
bin_data = defaultdict(list)
for z_i in range(int(N_z)):
z_min = z_i * z_width
z_max = (z_i+1) * z_width
df_z = df[ (df[position+'z']<z_min) & (df[position+'z']>=z_max) ]
for r_i in range(len(A_r)):
if r_i == 0:
r_min = 0
else:
r_min = A_r[r_i-1]
r_max = A_r[r_i]
df_r = df_z[ ( np.sqrt(df_z[position+'x']**2 + df_z[position+'y']**2)>r_min )
& ( np.sqrt(df_z[position+'x']**2 + df_z[position+'y']**2)<=r_max )]
for phi_i in range(N_phi[r_i]):
bin_data['z_i'].append(z_i)
bin_data['z'].append( (z_max + z_min)/2 )
bin_data['r_i'].append(r_i)
bin_data['r'].append( (r_max + r_min)/2 )
bin_data['phi_i'].append(phi_i)
phi_min = phi_i * phi_widths[r_i]
phi_max = (phi_i+1) * phi_widths[r_i]
bin_data['phi'].append( (phi_max + phi_min)/2 )
df_phi = df_r[ (atan(df_r[position+'y'].values, df_r[position+'x'].values) > phi_min)
& (atan(df_r[position+'y'].values, df_r[position+'x'].values) <= phi_max )]
bin_data['N'].append(len(df_phi))
c1 = ROOT.TCanvas('','', 800, 700)
hist = ROOT.TH1D('','', 100, 0, s1_spec_max)
for i in range(len(df_phi[peak+'Area'])):
hist.Fill(df_phi[peak+'Area'].values[i])
if hist.GetEntries() < 1:
bin_data['ly'].append(-1)
bin_data['errly'].append(-1)
bin_data['S1AreaMean'].append(-1)
bin_data['S1AreaMeanError'].append(-1)
continue
hist.SetTitle(peak+' Spectrum: \
%.1f > z > %.1f, %.1f < r < %.1f, %.1f < phi < %.1f,'
%(z_min, z_max, r_min, r_max, phi_min, phi_max))
hist.GetXaxis().SetTitle(peak+'Area (pe)')
hist.GetXaxis().CenterTitle()
hist.Sumw2()
hist.SetStats(False)
hist.Draw()
hist.Fit('gaus')
fit = hist.GetFunction('gaus')
p1 = fit.GetParameter(1)
e1 = fit.GetParError(1)
bin_data['S1AreaMean'].append(p1)
bin_data['S1AreaMeanError'].append(e1)
bin_data['ly'].append(p1/s1_ene)
bin_data['errly'].append(e1/s1_ene)
if bin_spec_dir != 'none':
call('mkdir '+bin_spec_dir,shell=True)
chi2 = fit.GetChisquare()
ndf = fit.GetNDF()
p0 = fit.GetParameter(0)
e0 = fit.GetParError(0)
p2 = fit.GetParameter(2)
e2 = fit.GetParError(2)
pt = ROOT.TPaveText(.58, .68, .88, .88, 'NDC')
pt.AddText('Entries = %d'%len(df_phi))
pt.AddText('#mu = %1.3f #pm %1.3f'%(p1, e1))
pt.AddText('#sigma = %1.3f #pm %1.3f' %(p2, e2))
pt.AddText('Amplitude = %1.3f #pm %1.3f' %(p0, e0))
pt.AddText('#chi^{2}/NDF = %1.3f/%1.3f' %(chi2, ndf))
pt.Draw()
c1.Print(bin_spec_dir+'/f_'+peak+'_z%d_r%d_phi%d.png' %(z_i, r_i, phi_i))
c1.Clear()
hist.Delete()
return bin_data
##################################################################################################
def lyBins_to_txt(bin_data,out_file):
f = open(out_file, 'w')
header = 'z t r zmid tmid rmid ly errly\n'
f.write(header)
for i in range(len(bin_data['z'])):
bin_values = (str(bin_data['z_i'][i])+' '+str(bin_data['phi_i'][i])+' '+str(bin_data['r_i'][i])+' '
+str(bin_data['z'][i])+' '+str(bin_data['phi'][i])+' '+str(bin_data['r'][i])+' '
+str(bin_data['ly'][i])+' '+str(bin_data['errly'][i])+'\n')
f.write(bin_values)
f.close()
return
##################################################################################################
def bins_to_plot(bin_dict, peak, bin_settings, outfile, diff = False):
if False:
x=1
else:
df = pd.DataFrame(bin_dict)
dummyH_list=[]
c1 = ROOT.TCanvas( '','', 2400, 3200 )
ROOT.gStyle.SetOptStat(0)
c1.Divide(3,4,0.02,0.02)
z_hists = []
max_ly = max(df['ly'])
min_ly = min(df['ly'])
zjump = bin_settings[1]/bin_settings[4]
for z_i in range(int(bin_settings[4])):
dummyH_list.append(ROOT.TH2D("","",100,-1*bin_settings[0],bin_settings[0],100,-1*bin_settings[0],bin_settings[0]))
df_new = df[ df['z_i'] == z_i ]
r_hists = []
for r_i in range(len(bin_settings[2])):
r_hists.append(ROOT.TH2D('','', bin_settings[3][r_i], 0, 2*np.pi, len(bin_settings[2]), 0, bin_settings[0]))
df_newer = df_new[ df_new['r_i'] == r_i ]
for i in range(len(df_newer)):
r_hists[r_i].Fill(df_newer['phi'].values[i], df_newer['r'].values[i],
df_newer['ly'].values[i] )
z_hists.append(r_hists)
c1.cd(z_i+1)
dummyH_list[z_i].Draw('colz')
dummyH_list[z_i].SetTitle("%.2fcm < z < %.2fcm" %(z_i*zjump, (z_i+1)*zjump ))
dummyH_list[z_i].GetZaxis().SetTitle("<s1Area>")
dummyH_list[z_i].GetXaxis().SetTitle("x position [cm]")
dummyH_list[z_i].GetXaxis().CenterTitle()
dummyH_list[z_i].GetYaxis().SetTitle("y position [cm]")
dummyH_list[z_i].GetYaxis().CenterTitle()
# c1.SetTopMargin(0.2)
c1.SetRightMargin(0.2)
for i in range(len(z_hists[z_i])):
z_hists[z_i][i].GetZaxis().SetRangeUser(0, max_ly)
if diff:
z_hists[z_i][i].GetZaxis().SetTitle("(pax_ly - xerawdp_ly)^{2} [pe/keV]")
else:
z_hists[z_i][i].GetZaxis().SetTitle(peak + " ly [pe/keV]")
z_hists[z_i][i].GetZaxis().SetTitleOffset(1.8)
z_hists[z_i][i].Draw('pol colz a same')
c1.Print(outfile)
c1.Clear()
return
##################################################################################################
| gpl-3.0 | -3,472,338,365,560,601,000 | 36.586854 | 126 | 0.402823 | false |
adobe-type-tools/robofont-scripts | Anchors/AnchorsOutput.py | 1 | 3389 | from __future__ import print_function
__copyright__ = __license__ = """
Copyright (c) 2013-2019 Adobe Systems Incorporated. All rights reserved.
Permission is hereby granted, free of charge, to any person obtaining a
copy of this software and associated documentation files (the "Software"),
to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense,
and/or sell copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
"""
__doc__ = """
Anchors Input v1.3 - 30 Jul 2019
Updates print commands to use python 3 syntax.
Anchors Output v1.1 - 26 Apr 2016
Outputs all the anchor data to external text file(s) named 'anchors'.
If the family has more than one master, '_X' is appended to the name,
where 'X' represents the index of the font master, counting from 0.
==================================================
Versions:
v1.3 - 30 Jul 2019 - Updates print commands to use python 3 syntax
v1.1 - 26 Apr 2016 - Use the same file naming logic as the derivedchars files
v1.0 - 21 Feb 2013 - Initial release
"""
#----------------------------------------
kAnchorsFileName = "anchors"
#----------------------------------------
import os
def run(font, masterNumber):
anchorsList = []
glyphOrder = font.lib['public.glyphOrder']
if len(glyphOrder) != len(font.keys()):
glyphOrder = font.keys()
# Collect anchors data
for glyphName in glyphOrder:
glyph = font[glyphName]
for anchorIndex in range(len(glyph.anchors)):
anchor = glyph.anchors[anchorIndex]
# Skip nameless anchors
if not len(anchor.name):
print('ERROR: Glyph %s has a nameless anchor. Skipped.' % glyphName)
continue
anchorData = "%s\t%s\t%d\t%d\n" % (glyphName, anchor.name, anchor.x, anchor.y)
anchorsList.append(anchorData)
if not len(anchorsList):
print('The font has no anchors.')
return
# Write file
if masterNumber:
filename = "%s_%s" % (kAnchorsFileName, masterNumber)
else:
filename = kAnchorsFileName
print('Writing file %s ...' % filename)
outfile = open(filename, 'w')
outfile.writelines(anchorsList)
outfile.close()
print('Done!')
if __name__ == "__main__":
font = CurrentFont()
if font == None:
print('Open a font first.')
else:
if not font.path:
print('Save the font first.')
elif not len(font):
print('The font has no glyphs.')
else:
folderPath, fileName = os.path.split(font.path)
fileNameNoExtension, fileExtension = os.path.splitext(fileName)
masterNumber = fileNameNoExtension.split('_')[-1]
if not masterNumber.isdigit():
masterNumber = None
os.chdir(folderPath) # Change current directory to the location of the opened font
run(font, masterNumber)
| mit | 5,904,105,215,958,060,000 | 29.258929 | 85 | 0.701682 | false |
infothrill/python-viscosity-app | viscosity_app/vpn.py | 1 | 4333 | """
This module provides procedures to interact in a programmatic way with the
application "Viscosity" from http://www.sparklabs.com/viscosity/ using the
OS X applescripting interface.
"""
import logging
import time
import applescript
from .observer import Subject
EVT_VPN_STOPPED = 100
EVT_VPN_STARTED = 101
def connect(connection_name):
thescript = """tell application "Viscosity" to connect \"%s\"""" % connection_name
logging.info("VPN: connecting to '%s'", connection_name)
return applescript.AppleScript(thescript).run()
def disconnect_all():
thescript = """tell application "Viscosity" to disconnectall\n"""
logging.debug("disconnecting all viscosity connections")
return applescript.AppleScript(thescript).run()
def disconnect(connection_name):
thescript = """tell application "Viscosity" to disconnect \"%s\"\n""" % connection_name
logging.debug("disconnecting viscosity connection '%s'", connection_name)
return applescript.AppleScript(thescript).run()
def get_active_connection_names():
thescript = """tell application "Viscosity"
set connames to name of connections where state is equal to "Connected"
return connames
end tell"""
try:
names = applescript.AppleScript(thescript).run()
except applescript.ScriptError as exc:
logging.debug("An Apple script error occured while querying active connections", exc_info=exc)
return ()
else:
return names
def get_all_connection_names():
thescript = """tell application "Viscosity"
set connames to name of connections
end tell
return connames"""
logging.debug("getting viscosity connection names")
return applescript.AppleScript(thescript).run()
class VpnConnection(object):
'''
An Applescript based controller for Viscosity.app
(http://www.sparklabs.com/viscosity/)
'''
def __init__(self, connection_name):
super(VpnConnection, self).__init__()
if connection_name not in get_all_connection_names():
raise ValueError("Connection '%s' not found in Viscosity!" % connection_name)
self.__connection_name = connection_name
@property
def name(self):
return self.__connection_name
def connect(self):
_cur_conns = get_active_connection_names()
if self.__connection_name in _cur_conns:
return True
elif len(_cur_conns) > 0:
logging.info("VPN connect(%s): already connected to non-preferred VPN(s): %r", self.__connection_name, _cur_conns)
connect(self.__connection_name)
# wait for it to connect
max_wait = 30 # seconds
current_wait = 0
while current_wait < max_wait:
_cur_conns = get_active_connection_names()
if self.__connection_name in _cur_conns:
break
time.sleep(0.5)
if self.__connection_name in _cur_conns:
logging.info("VPN: connected to '%s'", self.__connection_name)
return True
else:
logging.warn("VPN: failed to connect to '%s'", self.__connection_name)
return False
def disconnect(self):
if self.is_connected():
disconnect(self.__connection_name)
def is_connected(self):
return self.__connection_name in get_active_connection_names()
class VpnControllerSubject(Subject):
'''
A class capable of monitoring a specific Viscosity VPN connection and
notifying observers about changes in the status of the connection.
'''
def __init__(self, vpn):
super(VpnControllerSubject, self).__init__()
self.connection = vpn
def refresh(self):
self.connected = self.connection.is_connected()
@property
def connected(self):
if not hasattr(self, '_connected'):
return None
else:
return self._connected
@connected.setter
def connected(self, value):
oldvalue = self.connected
self._connected = value # pylint: disable=W0201
if oldvalue != value:
if value is True:
self.notifyObservers(EVT_VPN_STARTED, "VPN('%s') is connected" % self.connection.name)
else:
self.notifyObservers(EVT_VPN_STOPPED, "VPN('%s') is disconnected" % self.connection.name)
| mit | 6,369,262,081,582,387,000 | 31.825758 | 126 | 0.648973 | false |
klnusbaum/UDJ-Server | udjserver/udj/views/views07/user_modification.py | 1 | 2613 | import json
import re
from udj.views.views07.decorators import NeedsJSON
from udj.views.views07.decorators import AcceptsMethods
from udj.views.views07.decorators import HasNZJSONParams
from udj.views.views07.authdecorators import NeedsAuth
from udj.views.views07.responses import HttpResponseConflictingResource
from udj.views.views07.responses import HttpResponseNotAcceptable
from django.views.decorators.csrf import csrf_exempt
from django.http import HttpRequest
from django.http import HttpResponse
from django.http import HttpResponseBadRequest
from django.core.exceptions import ValidationError
from django.core.validators import validate_email
from django.contrib.auth.models import User
from django.db import transaction
@NeedsJSON
@AcceptsMethods(['PUT', 'POST'])
@HasNZJSONParams(['username', 'email', 'password'])
def userMod(request, json_params):
#Validate inputs
username = json_params['username']
email = json_params['email']
password = json_params['password']
first_name = json_params.get('first_name', '')
last_name = json_params.get('last_name', '')
if len(password) < 8:
return HttpResponseNotAcceptable("password")
try:
validate_email(email)
except ValidationError:
return HttpResponseNotAcceptable("email")
#actuall do stuff
if request.method == 'PUT':
return createUser(request, username, email, password, first_name, last_name)
else:
return modifyUser(request, username, email, password, first_name, last_name)
@NeedsAuth
@transaction.commit_on_success
def modifyUser(request, username, email, password, first_name, last_name):
user = request.udjuser
if user.email != email and User.objects.filter(email=email).exists():
return HttpResponseConflictingResource('email')
if username != user.username:
return HttpResponseNotAcceptable('username')
user.email = email
user.first_name = first_name
user.last_name = last_name
user.save()
user.set_password(password)
return HttpResponse()
@transaction.commit_on_success
def createUser(request, username, email, password, first_name, last_name):
if User.objects.filter(username=username).exists():
return HttpResponseConflictingResource('username')
if User.objects.filter(email=email).exists():
return HttpResponseConflictingResource('email')
if not re.compile(r'^[\w.@+-]+$').match(username):
return HttpResponseNotAcceptable("username")
newUser = User.objects.create_user(
username,
email,
password
)
newUser.first_name = first_name
newUser.last_name = last_name
newUser.save()
return HttpResponse(status=201)
| gpl-2.0 | 8,605,335,649,776,238,000 | 29.741176 | 80 | 0.760046 | false |
google-research/google-research | aqt/utils/summary_utils_test.py | 1 | 3467 | # coding=utf-8
# Copyright 2021 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for aqt.summary_utils."""
from absl.testing import absltest
from absl.testing import parameterized
import jax.numpy as jnp
import numpy as np
from aqt.jax.stats import Stats
from aqt.utils import summary_utils
class SummaryUtilsTest(parameterized.TestCase):
def assertNestedDictEqual(self, a, b):
np.testing.assert_equal(a.keys(), b.keys())
for key in a:
np.testing.assert_array_equal(a[key], b[key])
@parameterized.named_parameters(
dict(testcase_name='no_keys', keys=[]),
dict(testcase_name='key_not_in_dict', keys=['bounds']))
def test_empty_get_state_dict_summary(self, keys):
state_dict = {}
distr_summary = summary_utils.get_state_dict_summary(state_dict, keys=keys)
self.assertEmpty(distr_summary)
@parameterized.named_parameters(
dict(
testcase_name='keys_in_dict',
keys=['bounds', 'min_per_ch', 'max_per_ch'],
expected_summary={
'/decoder/attention/dense_out/bounds':
np.array([[1., 2.], [2., 4.], [3., 6.]]),
'/decoder/attention/dense_out/min_per_ch':
np.array([-6., -5., -4.]),
'/decoder/attention/dense_out/max_per_ch':
np.array([20., 21., 22.]),
}),
dict(
testcase_name='key_not_in_dict',
keys=['other_key'],
expected_summary={})
)
def test_get_state_dict_summary(self, keys, expected_summary):
state_dict = {
'decoder': {
'attention': {
'dense_out': {
'bounds':
jnp.array([[1., 2.], [2., 4.], [3., 6.]]),
'min_per_ch':
jnp.array([-6., -5., -4.]),
'max_per_ch':
jnp.array([20., 21., 22.]),
'stats':
Stats(
n=1,
mean=jnp.ones(()),
mean_abs=jnp.ones(()),
mean_sq=jnp.ones(()),
mean_batch_maximum=jnp.ones(()),
mean_batch_minimum=jnp.ones(()))
}
},
'mlp': {
'dense_1': {
'stats':
Stats(
n=1,
mean=jnp.ones(()),
mean_abs=jnp.ones(()),
mean_sq=jnp.ones(()),
mean_batch_maximum=jnp.ones(()),
mean_batch_minimum=jnp.ones(()))
}
},
}
}
summary = summary_utils.get_state_dict_summary(state_dict, keys=keys)
self.assertNestedDictEqual(summary, expected_summary)
if __name__ == '__main__':
absltest.main()
| apache-2.0 | 6,234,843,658,226,991,000 | 33.67 | 79 | 0.507067 | false |
StellarCN/py-stellar-base | stellar_sdk/base_transaction_envelope.py | 1 | 5708 | from abc import abstractmethod
from typing import List, Union, Generic, TypeVar
from . import xdr as stellar_xdr
from .exceptions import SignatureExistError
from .keypair import Keypair
from .network import Network
from .utils import hex_to_bytes, sha256
T = TypeVar("T")
class BaseTransactionEnvelope(Generic[T]):
def __init__(
self,
network_passphrase: str,
signatures: List[stellar_xdr.DecoratedSignature] = None,
) -> None:
self.network_passphrase: str = network_passphrase
self.signatures: List[stellar_xdr.DecoratedSignature] = signatures or []
self._network_id: bytes = Network(network_passphrase).network_id()
def hash(self) -> bytes:
"""Get the XDR Hash of the signature base.
This hash is ultimately what is signed before transactions are sent
over the network. See :meth:`signature_base` for more details about
this process.
:return: The XDR Hash of this transaction envelope's signature base.
"""
return sha256(self.signature_base())
def hash_hex(self) -> str:
"""Return a hex encoded hash for this transaction envelope.
:return: A hex encoded hash for this transaction envelope.
"""
return self.hash().hex()
def sign(self, signer: Union[Keypair, str]) -> None:
"""Sign this transaction envelope with a given keypair.
Note that the signature must not already be in this instance's list of
signatures.
:param signer: The keypair or secret to use for signing this transaction
envelope.
:raise: :exc:`SignatureExistError <stellar_sdk.exception.SignatureExistError>`:
if this signature already exists.
"""
if isinstance(signer, str):
signer = Keypair.from_secret(signer)
tx_hash = self.hash()
sig = signer.sign_decorated(tx_hash)
sig_dict = [signature.__dict__ for signature in self.signatures]
if sig.__dict__ in sig_dict:
raise SignatureExistError("The keypair has already signed.")
else:
self.signatures.append(sig)
@abstractmethod
def signature_base(self) -> bytes:
"""Get the signature base of this transaction envelope.
Return the "signature base" of this transaction, which is the value
that, when hashed, should be signed to create a signature that
validators on the Stellar Network will accept.
It is composed of a 4 prefix bytes followed by the xdr-encoded form of
this transaction.
:return: The signature base of this transaction envelope.
"""
raise NotImplementedError("The method has not been implemented.")
def sign_hashx(self, preimage: Union[bytes, str]) -> None:
"""Sign this transaction envelope with a Hash(x) signature.
See Stellar's documentation on `Multi-Sig
<https://www.stellar.org/developers/guides/concepts/multi-sig.html>`_
for more details on Hash(x) signatures.
:param preimage: Preimage of hash used as signer, byte hash or hex encoded string
"""
preimage_bytes: bytes = hex_to_bytes(preimage)
hash_preimage = sha256(preimage_bytes)
hint = stellar_xdr.SignatureHint(hash_preimage[-4:])
sig = stellar_xdr.DecoratedSignature(
hint, stellar_xdr.Signature(preimage_bytes)
)
sig_dict = [signature.__dict__ for signature in self.signatures]
if sig.__dict__ in sig_dict:
raise SignatureExistError("The preimage has already signed.")
else:
self.signatures.append(sig)
def to_xdr_object(self) -> stellar_xdr.TransactionEnvelope:
"""Get an XDR object representation of this :class:`BaseTransactionEnvelope`.
:return: XDR TransactionEnvelope object
"""
raise NotImplementedError("The method has not been implemented.")
def to_xdr(self) -> str:
"""Get the base64 encoded XDR string representing this
:class:`BaseTransactionEnvelope`.
:return: XDR TransactionEnvelope base64 string object
"""
return self.to_xdr_object().to_xdr()
@classmethod
def from_xdr_object(
cls, xdr_object: stellar_xdr.TransactionEnvelope, network_passphrase: str
) -> T:
"""Create a new :class:`BaseTransactionEnvelope` from an XDR object.
:param xdr_object: The XDR object that represents a transaction envelope.
:param network_passphrase: The network to connect to for verifying and retrieving additional attributes from.
:return: A new :class:`TransactionEnvelope` object from the given XDR TransactionEnvelope object.
"""
raise NotImplementedError("The method has not been implemented.")
@classmethod
def from_xdr(cls, xdr: str, network_passphrase: str) -> T:
"""Create a new :class:`BaseTransactionEnvelope` from an XDR string.
:param xdr: The XDR string that represents a transaction
envelope.
:param network_passphrase: which network this transaction envelope is associated with.
:return: A new :class:`BaseTransactionEnvelope` object from the given XDR TransactionEnvelope base64 string object.
"""
xdr_object = stellar_xdr.TransactionEnvelope.from_xdr(xdr)
return cls.from_xdr_object(xdr_object, network_passphrase)
@abstractmethod
def __eq__(self, other: object) -> bool:
pass # pragma: no cover
def __str__(self):
return (
f"<BaseTransactionEnvelope [network_passphrase={self.network_passphrase}, "
f"signatures={self.signatures}]>"
)
| apache-2.0 | -428,335,434,075,978,200 | 37.567568 | 123 | 0.658549 | false |
hail-is/hail | hail/python/hailtop/cleanup_gcr/__main__.py | 1 | 2922 | import sys
import time
import logging
import asyncio
import aiohttp
import hailtop.aiogoogle as aiogoogle
log = logging.getLogger(__name__)
class AsyncIOExecutor:
def __init__(self, parallelism):
self._semaphore = asyncio.Semaphore(parallelism)
async def _run(self, fut, aw):
async with self._semaphore:
try:
fut.set_result(await aw)
except asyncio.CancelledError: # pylint: disable=try-except-raise
raise
except Exception as e: # pylint: disable=broad-except
fut.set_exception(e)
def submit(self, aw):
fut = asyncio.Future()
asyncio.ensure_future(self._run(fut, aw))
return fut
async def gather(self, aws):
futs = [self.submit(aw) for aw in aws]
return [await fut for fut in futs]
class CleanupImages:
def __init__(self, client):
self._executor = AsyncIOExecutor(8)
self._client = client
async def cleanup_digest(self, image, digest, tags):
log.info(f'cleaning up digest {image}@{digest}')
await self._executor.gather([
self._client.delete(f'/{image}/manifests/{tag}')
for tag in tags])
await self._executor.submit(self._client.delete(f'/{image}/manifests/{digest}'))
log.info(f'cleaned up digest {image}@{digest}')
async def cleanup_image(self, image):
log.info(f'cleaning up image {image}')
log.info(f'listing tags for {image}')
result = await self._executor.submit(self._client.get(f'/{image}/tags/list'))
manifests = result['manifest']
manifests = [(digest, int(data['timeUploadedMs']) / 1000, data['tag']) for digest, data in manifests.items()]
log.info(f'got {len(manifests)} manifests for {image}')
# sort is ascending, oldest first
manifests = sorted(manifests, key=lambda x: x[1])
# keep the most recent 10
manifests = manifests[:-10]
now = time.time()
await asyncio.gather(*[
self.cleanup_digest(image, digest, tags)
for digest, time_uploaded, tags in manifests
if (now - time_uploaded) >= (7 * 24 * 60 * 60) or len(tags) == 0])
log.info(f'cleaned up image {image}')
async def run(self):
images = await self._executor.submit(self._client.get('/tags/list'))
await asyncio.gather(*[
self.cleanup_image(image)
for image in images['child']
])
async def main():
logging.basicConfig(level=logging.INFO)
if len(sys.argv) != 2:
raise ValueError('usage: cleanup_gcr <project>')
project = sys.argv[1]
async with aiogoogle.ContainerClient(
project=project,
timeout=aiohttp.ClientTimeout(total=60)) as client:
cleanup_images = CleanupImages(client)
await cleanup_images.run()
asyncio.run(main())
| mit | 2,255,893,551,007,446,000 | 29.123711 | 117 | 0.605065 | false |
chrislit/abydos | abydos/stemmer/_stemmer.py | 1 | 1218 | # Copyright 2018-2020 by Christopher C. Little.
# This file is part of Abydos.
#
# Abydos is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Abydos is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Abydos. If not, see <http://www.gnu.org/licenses/>.
"""abydos.stemmer._stemmer.
abstract class _Stemmer
"""
__all__ = ['_Stemmer']
class _Stemmer:
"""Abstract Stemmer class.
.. versionadded:: 0.3.6
"""
def stem(self, word: str) -> str:
"""Return stem.
Parameters
----------
word : str
The word to stem
Returns
-------
str
Word stem
.. versionadded:: 0.3.6
"""
return word
if __name__ == '__main__':
import doctest
doctest.testmod()
| gpl-3.0 | 6,629,885,516,102,265,000 | 21.555556 | 70 | 0.630542 | false |
partofthething/home-assistant | tests/components/unifi/conftest.py | 1 | 1068 | """Fixtures for UniFi methods."""
from typing import Optional
from unittest.mock import patch
from aiounifi.websocket import SIGNAL_CONNECTION_STATE, SIGNAL_DATA
import pytest
@pytest.fixture(autouse=True)
def mock_unifi_websocket():
"""No real websocket allowed."""
with patch("aiounifi.controller.WSClient") as mock:
def make_websocket_call(data: Optional[dict] = None, state: str = ""):
"""Generate a websocket call."""
if data:
mock.return_value.data = data
mock.call_args[1]["callback"](SIGNAL_DATA)
elif state:
mock.return_value.state = state
mock.call_args[1]["callback"](SIGNAL_CONNECTION_STATE)
else:
raise NotImplementedError
yield make_websocket_call
@pytest.fixture(autouse=True)
def mock_discovery():
"""No real network traffic allowed."""
with patch(
"homeassistant.components.unifi.config_flow.async_discover_unifi",
return_value=None,
) as mock:
yield mock
| mit | -5,269,824,949,856,575,000 | 29.514286 | 78 | 0.627341 | false |
schanzen/gnunet-mirror | src/integration-tests/test_mem_consumption.py | 1 | 5199 | #!/usr/bin/python
# This file is part of GNUnet.
# (C) 2010 Christian Grothoff (and other contributing authors)
#
# GNUnet is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published
# by the Free Software Foundation; either version 2, or (at your
# option) any later version.
#
# GNUnet is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with GNUnet; see the file COPYING. If not, write to the
# Free Software Foundation, Inc., 59 Temple Place - Suite 330,
# Boston, MA 02111-1307, USA.
#
#
#
# This test starts 3 peers and expects bootstrap and a connected clique
#
# Conditions for successful exit:
# Both peers have 1 connected peer in transport, core, topology, fs
import sys
import os
import subprocess
import re
import shutil
import time
from gnunet_testing import Peer
from gnunet_testing import Test
from gnunet_testing import Check
from gnunet_testing import Condition
from gnunet_testing import *
if os.name == "nt":
tmp = os.getenv ("TEMP")
else:
tmp = "/tmp"
#definitions
testname = "test_integration_clique"
verbose = True
check_timeout = 180
def cleanup ():
shutil.rmtree (os.path.join (tmp, "c_bootstrap_server"), True)
shutil.rmtree (os.path.join (tmp, "c_no_nat_client"), True)
shutil.rmtree (os.path.join (tmp, "c_no_nat_client_2"), True)
def check_disconnect ():
check = Check (test)
check.add (StatisticsCondition (client, 'transport', '# peers connected',1))
check.add (StatisticsCondition (client, 'core', '# neighbour entries allocated',1))
check.add (StatisticsCondition (client, 'core', '# peers connected',1))
check.add (StatisticsCondition (client, 'topology', '# peers connected',1))
check.add (StatisticsCondition (client, 'fs', '# peers connected',1))
check.add (StatisticsCondition (server, 'transport', '# peers connected',1))
check.add (StatisticsCondition (server, 'core', '# neighbour entries allocated',1))
check.add (StatisticsCondition (server, 'core', '# peers connected',1))
check.add (StatisticsCondition (server, 'topology', '# peers connected',1))
check.add (StatisticsCondition (server, 'fs', '# peers connected',1))
check.run_blocking (check_timeout, None, None)
def check_connect ():
check = Check (test)
check.add (StatisticsCondition (client, 'transport', '# peers connected',2))
check.add (StatisticsCondition (client, 'core', '# neighbour entries allocated',2))
check.add (StatisticsCondition (client, 'core', '# peers connected',2))
check.add (StatisticsCondition (client, 'topology', '# peers connected',2))
check.add (StatisticsCondition (client, 'fs', '# peers connected',2))
check.add (StatisticsCondition (client2, 'transport', '# peers connected',2))
check.add (StatisticsCondition (client2, 'core', '# neighbour entries allocated',2))
check.add (StatisticsCondition (client2, 'core', '# peers connected',2))
check.add (StatisticsCondition (client2, 'topology', '# peers connected',2))
check.add (StatisticsCondition (client2, 'fs', '# peers connected',2))
check.add (StatisticsCondition (server, 'transport', '# peers connected',2))
check.add (StatisticsCondition (server, 'core', '# neighbour entries allocated',2))
check.add (StatisticsCondition (server, 'core', '# peers connected',2))
check.add (StatisticsCondition (server, 'topology', '# peers connected',2))
check.add (StatisticsCondition (server, 'fs', '# peers connected',2))
check.run_blocking (check_timeout, None, None)
#
# Test execution
#
def run ():
global success
global test
global server
global client
global client2
restarts = 0
iterations = 10000
success = False
test = Test ('test_memory_consumption', verbose)
server = Peer(test, './confs/c_bootstrap_server_w_massif.conf');
server.start();
client = Peer(test, './confs/c_no_nat_client.conf');
client.start();
while (restarts < iterations):
print 'Iteration #' + str (restarts) + ' of ' + str (iterations)
print '---------------------'
restarts += 1
client2 = Peer(test, './confs/c_no_nat_client_2.conf');
client2.start();
if ((client.started == True) and (client2.started == True) and (server.started == True)):
test.p ('Peers started, waiting for clique connection')
check_connect ()
test.p ('All peers connected, stopping client2')
client2.stop ()
check_disconnect ()
test.p ('Peer disconnected\n')
print str (iterations) + " Iteration executed"
server.stop ()
client.stop ()
cleanup ()
try:
run ()
except (KeyboardInterrupt, SystemExit):
print 'Test interrupted'
server.stop ()
client.stop ()
client2.stop ()
cleanup ()
if (success == False):
sys.exit(1)
else:
sys.exit(0)
| gpl-3.0 | 3,829,938,605,057,254,400 | 33.892617 | 96 | 0.670898 | false |
DayGitH/Family-Tree | worker.py | 1 | 28764 | # -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'worker.ui'
#
# Created: Tue Jul 12 16:21:50 2016
# by: pyside-uic 0.2.15 running on PySide 1.2.4
#
# WARNING! All changes made in this file will be lost!
from PySide import QtCore, QtGui
class Ui_MainWindow(object):
def setupUi(self, MainWindow):
MainWindow.setObjectName("MainWindow")
MainWindow.setEnabled(True)
MainWindow.resize(1125, 653)
self.centralwidget = QtGui.QWidget(MainWindow)
self.centralwidget.setObjectName("centralwidget")
self.gridLayout = QtGui.QGridLayout(self.centralwidget)
self.gridLayout.setObjectName("gridLayout")
self.groupBox = QtGui.QGroupBox(self.centralwidget)
self.groupBox.setMaximumSize(QtCore.QSize(16777215, 16777215))
self.groupBox.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignTop)
self.groupBox.setObjectName("groupBox")
self.horizontalLayout = QtGui.QHBoxLayout(self.groupBox)
self.horizontalLayout.setSizeConstraint(QtGui.QLayout.SetMinimumSize)
self.horizontalLayout.setObjectName("horizontalLayout")
self.maleRadio = QtGui.QRadioButton(self.groupBox)
self.maleRadio.setObjectName("maleRadio")
self.horizontalLayout.addWidget(self.maleRadio)
self.femaleRadio = QtGui.QRadioButton(self.groupBox)
self.femaleRadio.setObjectName("femaleRadio")
self.horizontalLayout.addWidget(self.femaleRadio)
self.gridLayout.addWidget(self.groupBox, 4, 2, 1, 1)
self.horizontalLayout_7 = QtGui.QHBoxLayout()
self.horizontalLayout_7.setObjectName("horizontalLayout_7")
self.newFamilyButton = QtGui.QPushButton(self.centralwidget)
self.newFamilyButton.setObjectName("newFamilyButton")
self.horizontalLayout_7.addWidget(self.newFamilyButton)
self.saveFamilyButton = QtGui.QPushButton(self.centralwidget)
self.saveFamilyButton.setObjectName("saveFamilyButton")
self.horizontalLayout_7.addWidget(self.saveFamilyButton)
self.loadFamilyButton = QtGui.QPushButton(self.centralwidget)
self.loadFamilyButton.setObjectName("loadFamilyButton")
self.horizontalLayout_7.addWidget(self.loadFamilyButton)
self.gridLayout.addLayout(self.horizontalLayout_7, 11, 0, 1, 1)
self.primaryList = QtGui.QListWidget(self.centralwidget)
self.primaryList.setMaximumSize(QtCore.QSize(16777215, 120))
self.primaryList.setObjectName("primaryList")
self.gridLayout.addWidget(self.primaryList, 10, 4, 1, 1)
self.secondaryList = QtGui.QListWidget(self.centralwidget)
self.secondaryList.setMaximumSize(QtCore.QSize(16777215, 120))
self.secondaryList.setObjectName("secondaryList")
self.gridLayout.addWidget(self.secondaryList, 10, 2, 1, 1)
self.peopleList = QtGui.QListWidget(self.centralwidget)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Expanding)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.peopleList.sizePolicy().hasHeightForWidth())
self.peopleList.setSizePolicy(sizePolicy)
self.peopleList.setMinimumSize(QtCore.QSize(180, 0))
self.peopleList.setObjectName("peopleList")
self.gridLayout.addWidget(self.peopleList, 1, 0, 9, 1)
self.horizontalLayout_3 = QtGui.QHBoxLayout()
self.horizontalLayout_3.setSizeConstraint(QtGui.QLayout.SetDefaultConstraint)
self.horizontalLayout_3.setObjectName("horizontalLayout_3")
self.attachSpouseButton = QtGui.QPushButton(self.centralwidget)
self.attachSpouseButton.setObjectName("attachSpouseButton")
self.horizontalLayout_3.addWidget(self.attachSpouseButton)
self.attachParentsButton = QtGui.QPushButton(self.centralwidget)
self.attachParentsButton.setObjectName("attachParentsButton")
self.horizontalLayout_3.addWidget(self.attachParentsButton)
self.attachChildButton = QtGui.QPushButton(self.centralwidget)
self.attachChildButton.setObjectName("attachChildButton")
self.horizontalLayout_3.addWidget(self.attachChildButton)
self.gridLayout.addLayout(self.horizontalLayout_3, 11, 4, 1, 1)
self.bdayEdit = QtGui.QLineEdit(self.centralwidget)
self.bdayEdit.setObjectName("bdayEdit")
self.gridLayout.addWidget(self.bdayEdit, 3, 2, 1, 1)
self.verticalLayout_2 = QtGui.QVBoxLayout()
self.verticalLayout_2.setObjectName("verticalLayout_2")
self.moveUpSpouse = QtGui.QToolButton(self.centralwidget)
self.moveUpSpouse.setMaximumSize(QtCore.QSize(25, 16777215))
self.moveUpSpouse.setObjectName("moveUpSpouse")
self.verticalLayout_2.addWidget(self.moveUpSpouse)
self.moveDnSpouse = QtGui.QToolButton(self.centralwidget)
self.moveDnSpouse.setMaximumSize(QtCore.QSize(25, 16777215))
self.moveDnSpouse.setObjectName("moveDnSpouse")
self.verticalLayout_2.addWidget(self.moveDnSpouse)
spacerItem = QtGui.QSpacerItem(20, 40, QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Expanding)
self.verticalLayout_2.addItem(spacerItem)
self.gridLayout.addLayout(self.verticalLayout_2, 3, 5, 3, 1)
self.verticalLayout = QtGui.QVBoxLayout()
self.verticalLayout.setObjectName("verticalLayout")
self.moveUpChild = QtGui.QToolButton(self.centralwidget)
self.moveUpChild.setMaximumSize(QtCore.QSize(25, 16777215))
self.moveUpChild.setObjectName("moveUpChild")
self.verticalLayout.addWidget(self.moveUpChild)
self.moveDnChild = QtGui.QToolButton(self.centralwidget)
self.moveDnChild.setMaximumSize(QtCore.QSize(25, 16777215))
self.moveDnChild.setObjectName("moveDnChild")
self.verticalLayout.addWidget(self.moveDnChild)
spacerItem1 = QtGui.QSpacerItem(20, 40, QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Expanding)
self.verticalLayout.addItem(spacerItem1)
self.gridLayout.addLayout(self.verticalLayout, 8, 5, 2, 1)
self.spouseList = QtGui.QListWidget(self.centralwidget)
self.spouseList.setMaximumSize(QtCore.QSize(16777215, 100))
self.spouseList.setObjectName("spouseList")
self.gridLayout.addWidget(self.spouseList, 3, 4, 3, 1)
self.spouseInfo = QtGui.QLineEdit(self.centralwidget)
self.spouseInfo.setObjectName("spouseInfo")
self.gridLayout.addWidget(self.spouseInfo, 6, 4, 1, 1)
self.childrenList = QtGui.QListWidget(self.centralwidget)
self.childrenList.setObjectName("childrenList")
self.gridLayout.addWidget(self.childrenList, 8, 4, 2, 1)
self.notesEdit = QtGui.QTextEdit(self.centralwidget)
self.notesEdit.setObjectName("notesEdit")
self.gridLayout.addWidget(self.notesEdit, 8, 2, 2, 1)
self.horizontalLayout_4 = QtGui.QHBoxLayout()
self.horizontalLayout_4.setObjectName("horizontalLayout_4")
self.createSpouseButton = QtGui.QPushButton(self.centralwidget)
self.createSpouseButton.setObjectName("createSpouseButton")
self.horizontalLayout_4.addWidget(self.createSpouseButton)
self.createParentsButton = QtGui.QPushButton(self.centralwidget)
self.createParentsButton.setObjectName("createParentsButton")
self.horizontalLayout_4.addWidget(self.createParentsButton)
self.createChildButton = QtGui.QPushButton(self.centralwidget)
self.createChildButton.setObjectName("createChildButton")
self.horizontalLayout_4.addWidget(self.createChildButton)
self.gridLayout.addLayout(self.horizontalLayout_4, 11, 2, 1, 1)
self.deletePersonButton = QtGui.QPushButton(self.centralwidget)
self.deletePersonButton.setObjectName("deletePersonButton")
self.gridLayout.addWidget(self.deletePersonButton, 12, 2, 1, 1)
self.nicknameEdit = QtGui.QLineEdit(self.centralwidget)
self.nicknameEdit.setObjectName("nicknameEdit")
self.gridLayout.addWidget(self.nicknameEdit, 1, 2, 1, 1)
self.keyEdit = QtGui.QLineEdit(self.centralwidget)
self.keyEdit.setReadOnly(True)
self.keyEdit.setObjectName("keyEdit")
self.gridLayout.addWidget(self.keyEdit, 0, 2, 1, 1)
self.realnameEdit = QtGui.QLineEdit(self.centralwidget)
self.realnameEdit.setObjectName("realnameEdit")
self.gridLayout.addWidget(self.realnameEdit, 2, 2, 1, 1)
self.peopleListLabel = QtGui.QLabel(self.centralwidget)
self.peopleListLabel.setObjectName("peopleListLabel")
self.gridLayout.addWidget(self.peopleListLabel, 0, 0, 1, 1)
self.fatherButton = QtGui.QPushButton(self.centralwidget)
self.fatherButton.setText("")
self.fatherButton.setObjectName("fatherButton")
self.gridLayout.addWidget(self.fatherButton, 1, 4, 1, 1)
self.motherButton = QtGui.QPushButton(self.centralwidget)
self.motherButton.setText("")
self.motherButton.setObjectName("motherButton")
self.gridLayout.addWidget(self.motherButton, 2, 4, 1, 1)
self.horizontalLayout_5 = QtGui.QHBoxLayout()
self.horizontalLayout_5.setObjectName("horizontalLayout_5")
self.unattachSpouseButton = QtGui.QPushButton(self.centralwidget)
self.unattachSpouseButton.setObjectName("unattachSpouseButton")
self.horizontalLayout_5.addWidget(self.unattachSpouseButton)
self.unattachParentsButton = QtGui.QPushButton(self.centralwidget)
self.unattachParentsButton.setObjectName("unattachParentsButton")
self.horizontalLayout_5.addWidget(self.unattachParentsButton)
self.unattachChildButton = QtGui.QPushButton(self.centralwidget)
self.unattachChildButton.setObjectName("unattachChildButton")
self.horizontalLayout_5.addWidget(self.unattachChildButton)
self.gridLayout.addLayout(self.horizontalLayout_5, 12, 4, 1, 1)
self.exitButton = QtGui.QPushButton(self.centralwidget)
self.exitButton.setObjectName("exitButton")
self.gridLayout.addWidget(self.exitButton, 12, 0, 1, 1)
self.saveBox = QtGui.QDialogButtonBox(self.centralwidget)
self.saveBox.setOrientation(QtCore.Qt.Horizontal)
self.saveBox.setStandardButtons(QtGui.QDialogButtonBox.Cancel|QtGui.QDialogButtonBox.Save)
self.saveBox.setCenterButtons(False)
self.saveBox.setObjectName("saveBox")
self.gridLayout.addWidget(self.saveBox, 13, 4, 1, 1)
self.horizontalLayout_2 = QtGui.QHBoxLayout()
self.horizontalLayout_2.setObjectName("horizontalLayout_2")
self.ddayRadio = QtGui.QRadioButton(self.centralwidget)
self.ddayRadio.setText("")
self.ddayRadio.setAutoExclusive(False)
self.ddayRadio.setObjectName("ddayRadio")
self.horizontalLayout_2.addWidget(self.ddayRadio)
self.ddayEdit = QtGui.QLineEdit(self.centralwidget)
self.ddayEdit.setEnabled(False)
self.ddayEdit.setObjectName("ddayEdit")
self.horizontalLayout_2.addWidget(self.ddayEdit)
self.gridLayout.addLayout(self.horizontalLayout_2, 6, 2, 1, 1)
self.impRadio = QtGui.QRadioButton(self.centralwidget)
self.impRadio.setAutoExclusive(False)
self.impRadio.setObjectName("impRadio")
self.gridLayout.addWidget(self.impRadio, 7, 2, 1, 1)
self.bdayLabel = QtGui.QLabel(self.centralwidget)
self.bdayLabel.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTop|QtCore.Qt.AlignTrailing)
self.bdayLabel.setObjectName("bdayLabel")
self.gridLayout.addWidget(self.bdayLabel, 3, 1, 1, 1)
self.createLabel = QtGui.QLabel(self.centralwidget)
self.createLabel.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTop|QtCore.Qt.AlignTrailing)
self.createLabel.setObjectName("createLabel")
self.gridLayout.addWidget(self.createLabel, 11, 1, 1, 1)
self.notesLabel = QtGui.QLabel(self.centralwidget)
self.notesLabel.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTop|QtCore.Qt.AlignTrailing)
self.notesLabel.setObjectName("notesLabel")
self.gridLayout.addWidget(self.notesLabel, 8, 1, 1, 1)
self.IDLabel = QtGui.QLabel(self.centralwidget)
self.IDLabel.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTop|QtCore.Qt.AlignTrailing)
self.IDLabel.setObjectName("IDLabel")
self.gridLayout.addWidget(self.IDLabel, 0, 1, 1, 1)
self.genderLabel = QtGui.QLabel(self.centralwidget)
self.genderLabel.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTop|QtCore.Qt.AlignTrailing)
self.genderLabel.setObjectName("genderLabel")
self.gridLayout.addWidget(self.genderLabel, 4, 1, 1, 1)
self.realnameLabel = QtGui.QLabel(self.centralwidget)
self.realnameLabel.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTop|QtCore.Qt.AlignTrailing)
self.realnameLabel.setObjectName("realnameLabel")
self.gridLayout.addWidget(self.realnameLabel, 2, 1, 1, 1)
self.statusLabel = QtGui.QLabel(self.centralwidget)
self.statusLabel.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTop|QtCore.Qt.AlignTrailing)
self.statusLabel.setObjectName("statusLabel")
self.gridLayout.addWidget(self.statusLabel, 5, 1, 1, 1)
self.ddayLabel = QtGui.QLabel(self.centralwidget)
self.ddayLabel.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTop|QtCore.Qt.AlignTrailing)
self.ddayLabel.setObjectName("ddayLabel")
self.gridLayout.addWidget(self.ddayLabel, 6, 1, 1, 1)
self.primaryListLabel = QtGui.QLabel(self.centralwidget)
self.primaryListLabel.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTop|QtCore.Qt.AlignTrailing)
self.primaryListLabel.setObjectName("primaryListLabel")
self.gridLayout.addWidget(self.primaryListLabel, 10, 3, 1, 1)
self.secondaryListLabel = QtGui.QLabel(self.centralwidget)
self.secondaryListLabel.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTop|QtCore.Qt.AlignTrailing)
self.secondaryListLabel.setObjectName("secondaryListLabel")
self.gridLayout.addWidget(self.secondaryListLabel, 10, 1, 1, 1)
self.attachLabel = QtGui.QLabel(self.centralwidget)
self.attachLabel.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTop|QtCore.Qt.AlignTrailing)
self.attachLabel.setObjectName("attachLabel")
self.gridLayout.addWidget(self.attachLabel, 11, 3, 1, 1)
self.anniversaryLabel = QtGui.QLabel(self.centralwidget)
self.anniversaryLabel.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTop|QtCore.Qt.AlignTrailing)
self.anniversaryLabel.setObjectName("anniversaryLabel")
self.gridLayout.addWidget(self.anniversaryLabel, 6, 3, 1, 1)
self.nicknameLabel = QtGui.QLabel(self.centralwidget)
self.nicknameLabel.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTop|QtCore.Qt.AlignTrailing)
self.nicknameLabel.setObjectName("nicknameLabel")
self.gridLayout.addWidget(self.nicknameLabel, 1, 1, 1, 1)
self.unattachLabel = QtGui.QLabel(self.centralwidget)
self.unattachLabel.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTop|QtCore.Qt.AlignTrailing)
self.unattachLabel.setObjectName("unattachLabel")
self.gridLayout.addWidget(self.unattachLabel, 12, 3, 1, 1)
self.childrenLabel = QtGui.QLabel(self.centralwidget)
self.childrenLabel.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTop|QtCore.Qt.AlignTrailing)
self.childrenLabel.setObjectName("childrenLabel")
self.gridLayout.addWidget(self.childrenLabel, 8, 3, 1, 1)
self.marriageLabel = QtGui.QLabel(self.centralwidget)
self.marriageLabel.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTop|QtCore.Qt.AlignTrailing)
self.marriageLabel.setObjectName("marriageLabel")
self.gridLayout.addWidget(self.marriageLabel, 3, 3, 1, 1)
self.motherLabel = QtGui.QLabel(self.centralwidget)
self.motherLabel.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTop|QtCore.Qt.AlignTrailing)
self.motherLabel.setObjectName("motherLabel")
self.gridLayout.addWidget(self.motherLabel, 2, 3, 1, 1)
self.fatherLabel = QtGui.QLabel(self.centralwidget)
self.fatherLabel.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTop|QtCore.Qt.AlignTrailing)
self.fatherLabel.setObjectName("fatherLabel")
self.gridLayout.addWidget(self.fatherLabel, 1, 3, 1, 1)
self.groupBox_1 = QtGui.QGroupBox(self.centralwidget)
self.groupBox_1.setMaximumSize(QtCore.QSize(16777215, 16777215))
self.groupBox_1.setObjectName("groupBox_1")
self.horizontalLayout_6 = QtGui.QHBoxLayout(self.groupBox_1)
self.horizontalLayout_6.setSizeConstraint(QtGui.QLayout.SetMinimumSize)
self.horizontalLayout_6.setObjectName("horizontalLayout_6")
self.secondStatusRadio = QtGui.QRadioButton(self.groupBox_1)
self.secondStatusRadio.setObjectName("secondStatusRadio")
self.horizontalLayout_6.addWidget(self.secondStatusRadio)
self.firstStatusRadio = QtGui.QRadioButton(self.groupBox_1)
self.firstStatusRadio.setObjectName("firstStatusRadio")
self.horizontalLayout_6.addWidget(self.firstStatusRadio)
self.thirdStatusRadio = QtGui.QRadioButton(self.groupBox_1)
self.thirdStatusRadio.setObjectName("thirdStatusRadio")
self.horizontalLayout_6.addWidget(self.thirdStatusRadio)
self.gridLayout.addWidget(self.groupBox_1, 5, 2, 1, 1)
MainWindow.setCentralWidget(self.centralwidget)
self.menubar = QtGui.QMenuBar(MainWindow)
self.menubar.setGeometry(QtCore.QRect(0, 0, 1125, 26))
self.menubar.setObjectName("menubar")
MainWindow.setMenuBar(self.menubar)
self.statusbar = QtGui.QStatusBar(MainWindow)
self.statusbar.setObjectName("statusbar")
MainWindow.setStatusBar(self.statusbar)
self.actionNew_Family = QtGui.QAction(MainWindow)
self.actionNew_Family.setObjectName("actionNew_Family")
self.actionParents = QtGui.QAction(MainWindow)
self.actionParents.setObjectName("actionParents")
self.actionSpouse = QtGui.QAction(MainWindow)
self.actionSpouse.setObjectName("actionSpouse")
self.actionChildren = QtGui.QAction(MainWindow)
self.actionChildren.setObjectName("actionChildren")
self.actionDelete_Person = QtGui.QAction(MainWindow)
self.actionDelete_Person.setObjectName("actionDelete_Person")
self.actionExit = QtGui.QAction(MainWindow)
self.actionExit.setObjectName("actionExit")
self.actionSave_Family = QtGui.QAction(MainWindow)
self.actionSave_Family.setObjectName("actionSave_Family")
self.actionOpen_Family = QtGui.QAction(MainWindow)
self.actionOpen_Family.setObjectName("actionOpen_Family")
self.actionMerge_Person = QtGui.QAction(MainWindow)
self.actionMerge_Person.setObjectName("actionMerge_Person")
self.actionEdit_Spouse = QtGui.QAction(MainWindow)
self.actionEdit_Spouse.setObjectName("actionEdit_Spouse")
self.actionUnParents = QtGui.QAction(MainWindow)
self.actionUnParents.setObjectName("actionUnParents")
self.actionMother = QtGui.QAction(MainWindow)
self.actionMother.setObjectName("actionMother")
self.actionUnSpouse = QtGui.QAction(MainWindow)
self.actionUnSpouse.setObjectName("actionUnSpouse")
self.actionUnChild = QtGui.QAction(MainWindow)
self.actionUnChild.setObjectName("actionUnChild")
self.actionUnAll = QtGui.QAction(MainWindow)
self.actionUnAll.setObjectName("actionUnAll")
self.actionAttach = QtGui.QAction(MainWindow)
self.actionAttach.setObjectName("actionAttach")
self.retranslateUi(MainWindow)
QtCore.QObject.connect(self.ddayRadio, QtCore.SIGNAL("toggled(bool)"), self.ddayEdit.setEnabled)
QtCore.QMetaObject.connectSlotsByName(MainWindow)
MainWindow.setTabOrder(self.keyEdit, self.nicknameEdit)
MainWindow.setTabOrder(self.nicknameEdit, self.realnameEdit)
MainWindow.setTabOrder(self.realnameEdit, self.bdayEdit)
MainWindow.setTabOrder(self.bdayEdit, self.maleRadio)
MainWindow.setTabOrder(self.maleRadio, self.femaleRadio)
MainWindow.setTabOrder(self.femaleRadio, self.ddayRadio)
MainWindow.setTabOrder(self.ddayRadio, self.ddayEdit)
MainWindow.setTabOrder(self.ddayEdit, self.impRadio)
MainWindow.setTabOrder(self.impRadio, self.motherButton)
MainWindow.setTabOrder(self.motherButton, self.fatherButton)
def retranslateUi(self, MainWindow):
MainWindow.setWindowTitle(QtGui.QApplication.translate("MainWindow", "MainWindow", None, QtGui.QApplication.UnicodeUTF8))
self.maleRadio.setText(QtGui.QApplication.translate("MainWindow", "Male", None, QtGui.QApplication.UnicodeUTF8))
self.femaleRadio.setText(QtGui.QApplication.translate("MainWindow", "Female", None, QtGui.QApplication.UnicodeUTF8))
self.newFamilyButton.setText(QtGui.QApplication.translate("MainWindow", "New Family", None, QtGui.QApplication.UnicodeUTF8))
self.saveFamilyButton.setText(QtGui.QApplication.translate("MainWindow", "Save Family", None, QtGui.QApplication.UnicodeUTF8))
self.loadFamilyButton.setText(QtGui.QApplication.translate("MainWindow", "Load Family", None, QtGui.QApplication.UnicodeUTF8))
self.attachSpouseButton.setText(QtGui.QApplication.translate("MainWindow", "Spouse", None, QtGui.QApplication.UnicodeUTF8))
self.attachParentsButton.setText(QtGui.QApplication.translate("MainWindow", "Parents", None, QtGui.QApplication.UnicodeUTF8))
self.attachChildButton.setText(QtGui.QApplication.translate("MainWindow", "Child", None, QtGui.QApplication.UnicodeUTF8))
self.bdayEdit.setToolTip(QtGui.QApplication.translate("MainWindow", "<html><head/><body><p><br/></p></body></html>", None, QtGui.QApplication.UnicodeUTF8))
self.moveUpSpouse.setText(QtGui.QApplication.translate("MainWindow", "Up", None, QtGui.QApplication.UnicodeUTF8))
self.moveDnSpouse.setText(QtGui.QApplication.translate("MainWindow", "Dn", None, QtGui.QApplication.UnicodeUTF8))
self.moveUpChild.setText(QtGui.QApplication.translate("MainWindow", "Up", None, QtGui.QApplication.UnicodeUTF8))
self.moveDnChild.setText(QtGui.QApplication.translate("MainWindow", "Dn", None, QtGui.QApplication.UnicodeUTF8))
self.createSpouseButton.setText(QtGui.QApplication.translate("MainWindow", "Spouse", None, QtGui.QApplication.UnicodeUTF8))
self.createParentsButton.setText(QtGui.QApplication.translate("MainWindow", "Parents", None, QtGui.QApplication.UnicodeUTF8))
self.createChildButton.setText(QtGui.QApplication.translate("MainWindow", "Child", None, QtGui.QApplication.UnicodeUTF8))
self.deletePersonButton.setText(QtGui.QApplication.translate("MainWindow", "Delete Person", None, QtGui.QApplication.UnicodeUTF8))
self.peopleListLabel.setText(QtGui.QApplication.translate("MainWindow", "List", None, QtGui.QApplication.UnicodeUTF8))
self.unattachSpouseButton.setText(QtGui.QApplication.translate("MainWindow", "Spouse", None, QtGui.QApplication.UnicodeUTF8))
self.unattachParentsButton.setText(QtGui.QApplication.translate("MainWindow", "Parents", None, QtGui.QApplication.UnicodeUTF8))
self.unattachChildButton.setText(QtGui.QApplication.translate("MainWindow", "Child", None, QtGui.QApplication.UnicodeUTF8))
self.exitButton.setText(QtGui.QApplication.translate("MainWindow", "Exit Program", None, QtGui.QApplication.UnicodeUTF8))
self.impRadio.setText(QtGui.QApplication.translate("MainWindow", "Important", None, QtGui.QApplication.UnicodeUTF8))
self.bdayLabel.setText(QtGui.QApplication.translate("MainWindow", "Birthday:", None, QtGui.QApplication.UnicodeUTF8))
self.createLabel.setText(QtGui.QApplication.translate("MainWindow", "Create:", None, QtGui.QApplication.UnicodeUTF8))
self.notesLabel.setText(QtGui.QApplication.translate("MainWindow", "Notes:", None, QtGui.QApplication.UnicodeUTF8))
self.IDLabel.setText(QtGui.QApplication.translate("MainWindow", "ID:", None, QtGui.QApplication.UnicodeUTF8))
self.genderLabel.setText(QtGui.QApplication.translate("MainWindow", "Gender:", None, QtGui.QApplication.UnicodeUTF8))
self.realnameLabel.setText(QtGui.QApplication.translate("MainWindow", "Real Name:", None, QtGui.QApplication.UnicodeUTF8))
self.statusLabel.setText(QtGui.QApplication.translate("MainWindow", "Status:", None, QtGui.QApplication.UnicodeUTF8))
self.ddayLabel.setText(QtGui.QApplication.translate("MainWindow", "Death:", None, QtGui.QApplication.UnicodeUTF8))
self.primaryListLabel.setText(QtGui.QApplication.translate("MainWindow", "TextLabel", None, QtGui.QApplication.UnicodeUTF8))
self.secondaryListLabel.setText(QtGui.QApplication.translate("MainWindow", "TextLabel", None, QtGui.QApplication.UnicodeUTF8))
self.attachLabel.setText(QtGui.QApplication.translate("MainWindow", "Attach:", None, QtGui.QApplication.UnicodeUTF8))
self.anniversaryLabel.setText(QtGui.QApplication.translate("MainWindow", "Anniversary:", None, QtGui.QApplication.UnicodeUTF8))
self.nicknameLabel.setText(QtGui.QApplication.translate("MainWindow", "Spoken Name:", None, QtGui.QApplication.UnicodeUTF8))
self.unattachLabel.setText(QtGui.QApplication.translate("MainWindow", "Unattach:", None, QtGui.QApplication.UnicodeUTF8))
self.childrenLabel.setText(QtGui.QApplication.translate("MainWindow", "Children:", None, QtGui.QApplication.UnicodeUTF8))
self.marriageLabel.setText(QtGui.QApplication.translate("MainWindow", "Marriage:", None, QtGui.QApplication.UnicodeUTF8))
self.motherLabel.setText(QtGui.QApplication.translate("MainWindow", "Mother:", None, QtGui.QApplication.UnicodeUTF8))
self.fatherLabel.setText(QtGui.QApplication.translate("MainWindow", "Father:", None, QtGui.QApplication.UnicodeUTF8))
self.secondStatusRadio.setText(QtGui.QApplication.translate("MainWindow", "Engaged", None, QtGui.QApplication.UnicodeUTF8))
self.firstStatusRadio.setText(QtGui.QApplication.translate("MainWindow", "Single", None, QtGui.QApplication.UnicodeUTF8))
self.thirdStatusRadio.setText(QtGui.QApplication.translate("MainWindow", "Divorced", None, QtGui.QApplication.UnicodeUTF8))
self.actionNew_Family.setText(QtGui.QApplication.translate("MainWindow", "New Family", None, QtGui.QApplication.UnicodeUTF8))
self.actionParents.setText(QtGui.QApplication.translate("MainWindow", "Parents", None, QtGui.QApplication.UnicodeUTF8))
self.actionSpouse.setText(QtGui.QApplication.translate("MainWindow", "Spouse", None, QtGui.QApplication.UnicodeUTF8))
self.actionChildren.setText(QtGui.QApplication.translate("MainWindow", "Child", None, QtGui.QApplication.UnicodeUTF8))
self.actionDelete_Person.setText(QtGui.QApplication.translate("MainWindow", "Delete Person", None, QtGui.QApplication.UnicodeUTF8))
self.actionExit.setText(QtGui.QApplication.translate("MainWindow", "Exit", None, QtGui.QApplication.UnicodeUTF8))
self.actionSave_Family.setText(QtGui.QApplication.translate("MainWindow", "Save Family", None, QtGui.QApplication.UnicodeUTF8))
self.actionOpen_Family.setText(QtGui.QApplication.translate("MainWindow", "Open Family", None, QtGui.QApplication.UnicodeUTF8))
self.actionMerge_Person.setText(QtGui.QApplication.translate("MainWindow", "Merge Person", None, QtGui.QApplication.UnicodeUTF8))
self.actionEdit_Spouse.setText(QtGui.QApplication.translate("MainWindow", "Edit Spouse", None, QtGui.QApplication.UnicodeUTF8))
self.actionUnParents.setText(QtGui.QApplication.translate("MainWindow", "Parents", None, QtGui.QApplication.UnicodeUTF8))
self.actionMother.setText(QtGui.QApplication.translate("MainWindow", "Mother", None, QtGui.QApplication.UnicodeUTF8))
self.actionUnSpouse.setText(QtGui.QApplication.translate("MainWindow", "Spouse", None, QtGui.QApplication.UnicodeUTF8))
self.actionUnChild.setText(QtGui.QApplication.translate("MainWindow", "Child", None, QtGui.QApplication.UnicodeUTF8))
self.actionUnAll.setText(QtGui.QApplication.translate("MainWindow", "All", None, QtGui.QApplication.UnicodeUTF8))
self.actionAttach.setText(QtGui.QApplication.translate("MainWindow", "Attach", None, QtGui.QApplication.UnicodeUTF8))
| cc0-1.0 | -7,275,819,988,399,845,000 | 71.453401 | 163 | 0.742212 | false |
rasbt/advent-of-code-2016 | python_code/aoc_08_02.py | 1 | 10351 | # Sebastian Raschka, 2016
"""
source: http://adventofcode.com/2016/day/8
DESCRIPTION
You come across a door implementing what you can only assume is an
implementation of two-factor authentication after a long
game of requirements telephone.
To get past the door, you first swipe a keycard (no problem; there was one on
a nearby desk). Then, it displays a code on a little screen, and you type
that code on a keypad. Then, presumably, the door unlocks.
Unfortunately, the screen has been smashed. After a few minutes, you've taken
everything apart and figured out how it works. Now you just have to work out
what the screen would have displayed.
The magnetic strip on the card you swiped encodes a series of instructions for
the screen; these instructions are your puzzle input. The screen is 50 pixels
wide and 6 pixels tall, all of which start off, and is capable of three
somewhat peculiar operations:
rect AxB turns on all of the pixels in a rectangle at the top-left of the
screen which is A wide and B tall.
rotate row y=A by B shifts all of the pixels in row A (0 is the top row)
right by B pixels. Pixels that would fall off the right end appear at the
left end of the row.
rotate column x=A by B shifts all of the pixels in column A
(0 is the left column) down by B pixels. Pixels that would fall
off the bottom appear at the top of the column.
For example, here is a simple sequence on a smaller screen:
rect 3x2 creates a small rectangle in the top-left corner:
###....
###....
.......
rotate column x=1 by 1 rotates the second column down by one pixel:
#.#....
###....
.#.....
rotate row y=0 by 4 rotates the top row right by four pixels:
....#.#
###....
.#.....
rotate row x=1 by 1 again rotates the second column down by one pixel,
causing the bottom pixel to wrap back to the top:
.#..#.#
#.#....
.#.....
As you can see, this display technology is extremely powerful, and will soon
dominate the tiny-code-displaying-screen market. That's what the advertisement
on the back of the display tries to convince you, anyway.
There seems to be an intermediate check of the voltage used by the display:
after you swipe your card, if the screen did work,
how many pixels should be lit?
--- Part Two ---
You notice that the screen is only capable of displaying capital letters; in the font it uses, each letter is 5 pixels wide and 6 tall.
After you swipe your card, what code should is the screen trying to display?
"""
from collections import deque
def init_screen(screen, rect_str):
rect_str = rect_str.split(' ')[-1]
x, y = rect_str.strip().split('x')
x, y = int(x), int(y)
for i in range(y):
for j in range(x):
screen[i][j] = '#'
def rotate(screen, rot_str):
s = rot_str.split()[1:]
idx = int(s[1].split('=')[-1])
by = int(s[-1])
if s[0] == 'row':
screen[idx].rotate(by)
else:
dq = deque([i[idx] for i in screen])
dq.rotate(by)
for i, j in zip(screen, dq):
i[idx] = j
if __name__ == '__main__':
data = """rect 1x1
rotate row y=0 by 5
rect 1x1
rotate row y=0 by 5
rect 1x1
rotate row y=0 by 5
rect 1x1
rotate row y=0 by 5
rect 1x1
rotate row y=0 by 2
rect 1x1
rotate row y=0 by 2
rect 1x1
rotate row y=0 by 3
rect 1x1
rotate row y=0 by 3
rect 2x1
rotate row y=0 by 2
rect 1x1
rotate row y=0 by 3
rect 2x1
rotate row y=0 by 2
rect 1x1
rotate row y=0 by 3
rect 2x1
rotate row y=0 by 5
rect 4x1
rotate row y=0 by 5
rotate column x=0 by 1
rect 4x1
rotate row y=0 by 10
rotate column x=5 by 2
rotate column x=0 by 1
rect 9x1
rotate row y=2 by 5
rotate row y=0 by 5
rotate column x=0 by 1
rect 4x1
rotate row y=2 by 5
rotate row y=0 by 5
rotate column x=0 by 1
rect 4x1
rotate column x=40 by 1
rotate column x=27 by 1
rotate column x=22 by 1
rotate column x=17 by 1
rotate column x=12 by 1
rotate column x=7 by 1
rotate column x=2 by 1
rotate row y=2 by 5
rotate row y=1 by 3
rotate row y=0 by 5
rect 1x3
rotate row y=2 by 10
rotate row y=1 by 7
rotate row y=0 by 2
rotate column x=3 by 2
rotate column x=2 by 1
rotate column x=0 by 1
rect 4x1
rotate row y=2 by 5
rotate row y=1 by 3
rotate row y=0 by 3
rect 1x3
rotate column x=45 by 1
rotate row y=2 by 7
rotate row y=1 by 10
rotate row y=0 by 2
rotate column x=3 by 1
rotate column x=2 by 2
rotate column x=0 by 1
rect 4x1
rotate row y=2 by 13
rotate row y=0 by 5
rotate column x=3 by 1
rotate column x=0 by 1
rect 4x1
rotate row y=3 by 10
rotate row y=2 by 10
rotate row y=0 by 5
rotate column x=3 by 1
rotate column x=2 by 1
rotate column x=0 by 1
rect 4x1
rotate row y=3 by 8
rotate row y=0 by 5
rotate column x=3 by 1
rotate column x=2 by 1
rotate column x=0 by 1
rect 4x1
rotate row y=3 by 17
rotate row y=2 by 20
rotate row y=0 by 15
rotate column x=13 by 1
rotate column x=12 by 3
rotate column x=10 by 1
rotate column x=8 by 1
rotate column x=7 by 2
rotate column x=6 by 1
rotate column x=5 by 1
rotate column x=3 by 1
rotate column x=2 by 2
rotate column x=0 by 1
rect 14x1
rotate row y=1 by 47
rotate column x=9 by 1
rotate column x=4 by 1
rotate row y=3 by 3
rotate row y=2 by 10
rotate row y=1 by 8
rotate row y=0 by 5
rotate column x=2 by 2
rotate column x=0 by 2
rect 3x2
rotate row y=3 by 12
rotate row y=2 by 10
rotate row y=0 by 10
rotate column x=8 by 1
rotate column x=7 by 3
rotate column x=5 by 1
rotate column x=3 by 1
rotate column x=2 by 1
rotate column x=1 by 1
rotate column x=0 by 1
rect 9x1
rotate row y=0 by 20
rotate column x=46 by 1
rotate row y=4 by 17
rotate row y=3 by 10
rotate row y=2 by 10
rotate row y=1 by 5
rotate column x=8 by 1
rotate column x=7 by 1
rotate column x=6 by 1
rotate column x=5 by 1
rotate column x=3 by 1
rotate column x=2 by 2
rotate column x=1 by 1
rotate column x=0 by 1
rect 9x1
rotate column x=32 by 4
rotate row y=4 by 33
rotate row y=3 by 5
rotate row y=2 by 15
rotate row y=0 by 15
rotate column x=13 by 1
rotate column x=12 by 3
rotate column x=10 by 1
rotate column x=8 by 1
rotate column x=7 by 2
rotate column x=6 by 1
rotate column x=5 by 1
rotate column x=3 by 1
rotate column x=2 by 1
rotate column x=1 by 1
rotate column x=0 by 1
rect 14x1
rotate column x=39 by 3
rotate column x=35 by 4
rotate column x=20 by 4
rotate column x=19 by 3
rotate column x=10 by 4
rotate column x=9 by 3
rotate column x=8 by 3
rotate column x=5 by 4
rotate column x=4 by 3
rotate row y=5 by 5
rotate row y=4 by 5
rotate row y=3 by 33
rotate row y=1 by 30
rotate column x=48 by 1
rotate column x=47 by 5
rotate column x=46 by 5
rotate column x=45 by 1
rotate column x=43 by 1
rotate column x=38 by 3
rotate column x=37 by 3
rotate column x=36 by 5
rotate column x=35 by 1
rotate column x=33 by 1
rotate column x=32 by 5
rotate column x=31 by 5
rotate column x=30 by 1
rotate column x=23 by 4
rotate column x=22 by 3
rotate column x=21 by 3
rotate column x=20 by 1
rotate column x=12 by 2
rotate column x=11 by 2
rotate column x=3 by 5
rotate column x=2 by 5
rotate column x=1 by 3
rotate column x=0 by 4"""
screen = [deque(50 * '.') for _ in range(6)]
for row in data.split('\n'):
row = row.strip()
if not row:
continue
elif row.startswith('rect'):
init_screen(screen, rect_str=row)
else:
rotate(screen, rot_str=row)
for row in screen:
print(''.join(row))
| mit | -4,137,628,623,510,542,300 | 33.274834 | 135 | 0.495314 | false |
raiden-network/raiden | raiden/tests/unit/network/rtc/test_web_rtc_manager.py | 1 | 1676 | from typing import Any
import pytest
from gevent.event import Event
from raiden.constants import ICEConnectionState
from raiden.network.transport.matrix.rtc.aiogevent import yield_future
from raiden.network.transport.matrix.rtc.web_rtc import WebRTCManager
from raiden.tests.utils.factories import make_signer
from raiden.tests.utils.transport import ignore_web_rtc_messages
pytestmark = pytest.mark.asyncio
def _dummy_send(*_args: Any) -> None:
pass
def test_rtc_partner_close() -> None:
node_address = make_signer().address
stop_event = Event()
web_rtc_manager = WebRTCManager(node_address, ignore_web_rtc_messages, _dummy_send, stop_event)
partner_address = make_signer().address
rtc_partner = web_rtc_manager.get_rtc_partner(partner_address)
peer_connection_first = rtc_partner.peer_connection
msg = "ICEConnectionState should be 'new'"
assert peer_connection_first.iceConnectionState == "new", msg
close_task = web_rtc_manager.close_connection(rtc_partner.partner_address)
yield_future(close_task)
peer_connection_second = rtc_partner.peer_connection
msg = "peer connections should be different objects"
assert peer_connection_first != peer_connection_second, msg
msg = "New peer connection should be in state 'new'"
assert peer_connection_second.iceConnectionState == ICEConnectionState.NEW.value, msg
msg = "Old RTCPeerConnection state should be 'closed' after close()"
assert peer_connection_first.iceConnectionState == ICEConnectionState.CLOSED.value, msg
msg = "Should not have ready channel after close()"
assert not web_rtc_manager.has_ready_channel(partner_address), msg
| mit | -1,773,934,289,486,246,700 | 37.976744 | 99 | 0.756563 | false |
SCUT16K/SmsSender | server/utils/commands.py | 1 | 1286 | # -*- coding: utf-8 -*-
import sys
import gevent.wsgi
import gevent.monkey
from werkzeug.contrib import profiler
from flask_script import Command
class ProfileServer(Command):
"""
Run the server with profiling tools
"""
def __init__(self, host='localhost', port=9000, **options):
self.port = port
self.host = host
self.server_options = options
def __call__(self, app, **kwargs):
f = open('profiler.log', 'w')
stream = profiler.MergeStream(sys.stdout, f)
app.config['PROFILE'] = True
app.wsgi_app = profiler.ProfilerMiddleware(app.wsgi_app, stream,
restrictions=[30])
app.run(debug=True)
class GEventServer(Command):
"""
Run the server with gevent
"""
def __init__(self, host='127.0.0.1', port=5000, **options):
self.port = port
self.host = host
self.server_options = options
def __call__(self, app, **kwargs):
gevent.monkey.patch_all()
ws = gevent.wsgi.WSGIServer(listener=(self.host, self.port),
application=app)
print "* Running on http://{}:{}/ (Press CTRL+C to quit)".format(self.host, self.port)
ws.serve_forever()
| apache-2.0 | -2,121,802,192,813,314,800 | 26.956522 | 94 | 0.566874 | false |
matt77hias/Clipping | src/intersection.py | 1 | 1377 | import numpy as np
###############################################################################
## Intersection utilities 2D
###############################################################################
def intersect2D(c_v1, c_v2, p_v1, p_v2):
A1 = c_v2[1] - c_v1[1]
B1 = c_v1[0] - c_v2[0]
C1 = c_v1[0] * A1 + c_v1[1] * B1
A2 = p_v2[1] - p_v1[1]
B2 = p_v1[0] - p_v2[0]
C2 = p_v1[0] * A2 + p_v1[1] * B2
det = A1 * B2 - B1 * A2
X1 = (C1 * B2 - B1 * C2) / det;
X2 = (A1 * C2 - C1 * A2) / det;
return np.array([X1, X2])
###############################################################################
## Intersection utilities 3D
###############################################################################
def intersect3D(c_v1, c_v2, p_v1, p_v2, a0, a1):
A1 = c_v2[a1] - c_v1[a1]
B1 = c_v1[a0] - c_v2[a0]
C1 = c_v1[a0] * A1 + c_v1[a1] * B1
A2 = p_v2[a1] - p_v1[a1]
B2 = p_v1[a0] - p_v2[a0]
C2 = p_v1[a0] * A2 + p_v1[a1] * B2
det = A1 * B2 - B1 * A2
X1 = (C1 * B2 - B1 * C2) / det;
X2 = (A1 * C2 - C1 * A2) / det;
alpha = -1.0
if B2 != 0:
alpha = (X1 - p_v2[a0]) / B2
else:
alpha = (p_v2[a1] - X2) / A2
a2 = 3 - (a1 + a0)
X3 = alpha * p_v1[a2] + (1.0 - alpha) * p_v2[a2]
X = np.zeros((3))
X[a0] = X1
X[a1] = X2
X[a2] = X3
return X
| gpl-3.0 | 6,211,066,492,931,171,000 | 26 | 79 | 0.335512 | false |
robmcmullen/peppy | peppy/project/editra/BZR.py | 1 | 9992 | ###############################################################################
# Name: Cody Precord #
# Purpose: SourceControl implementation for Bazaar #
# Author: Cody Precord <[email protected]> #
# Copyright: (c) 2008 Cody Precord <[email protected]> #
# License: wxWindows License #
###############################################################################
"""Bazaar implementation of the SourceControl object """
__author__ = "Cody Precord <[email protected]>"
__revision__ = "$Revision: 867 $"
__scid__ = "$Id: BZR.py 867 2009-05-06 12:10:55Z CodyPrecord $"
#------------------------------------------------------------------------------#
# Imports
import os
import datetime
import re
import time
# Local imports
from SourceControl import SourceControl, DecodeString
#------------------------------------------------------------------------------#
class BZR(SourceControl):
""" Bazaar source control class """
name = 'Bazaar'
command = 'bzr'
ccache = list() # Cache of paths that are under bazaar control
repocache = dict()
def __repr__(self):
return 'BZR.BZR()'
def getAuthOptions(self, path):
""" Get the repository authentication info """
output = []
return output
def getRepository(self, path):
""" Get the repository of a given path """
if path in self.repocache:
return self.repocache[path]
if not os.path.isdir(path):
root = os.path.split(path)[0]
else:
root = path
while True:
if not root:
break
if os.path.exists(os.path.join(root, '.bzr')):
break
else:
root = os.path.split(root)[0]
# Cache the repo of this path for faster lookups next time
self.repocache[path] = root
return root
def isControlled(self, path):
""" Is the path controlled by BZR? """
t1 = time.time()
# Check for cached paths to speed up lookup
if path in self.ccache:
return True
if not os.path.isdir(path):
root = os.path.split(path)[0]
else:
root = path
last = False
while True:
if os.path.exists(os.path.join(root, '.bzr')):
# If a containing directory of the given path has a .bzr
# directory in it run status to find out if the file is being
# tracked or not.
retval = False
out = self.run(root + os.sep, ['status', '-S', path])
if out:
lines = out.stdout.readline()
if lines.startswith('?'):
fname = lines.split(None, 1)[1].strip()
fname = fname.rstrip(os.sep)
retval = not path.endswith(fname)
else:
retval = True
self.closeProcess(out)
if retval:
self.ccache.append(path)
return retval
elif last:
break
else:
root, tail = os.path.split(root)
# If tail is None or '' then this has gotten to the root
# so mark it as the last run
if not tail:
last = True
return False
def add(self, paths):
""" Add paths to the repository """
root, files = self.splitFiles(paths)
out = self.run(root, ['add'] + files)
self.logOutput(out)
self.closeProcess(out)
def checkout(self, paths):
""" Checkout files at the given path """
root, files = self.splitFiles(paths)
out = self.run(root, ['checkout',], files)
self.logOutput(out)
self.closeProcess(out)
def commit(self, paths, message=''):
""" Commit paths to the repository """
root, files = self.splitFiles(paths)
out = self.run(root, ['commit', '-m', message] + files)
self.logOutput(out)
self.closeProcess(out)
def diff(self, paths):
""" Run the diff program on the given files """
root, files = self.splitFiles(paths)
out = self.run(root, ['diff'] + files)
self.closeProcess(out)
def makePatch(self, paths):
""" Make a patch of the given paths """
root, files = self.splitFiles(paths)
patches = list()
for fname in files:
out = self.run(root, ['diff', fname])
lines = [ line for line in out.stdout ]
self.closeProcess(out)
patches.append((fname, ''.join(lines)))
return patches
def history(self, paths, history=None):
""" Get the revision history of the given paths """
if history is None:
history = []
root, files = self.splitFiles(paths)
for fname in files:
out = self.run(root, ['log', fname])
logstart = False
if out:
for line in out.stdout:
self.log(line)
if line.strip().startswith('-----------'):
logstart = False
current = dict(path=fname, revision=None,
author=None, date=None, log=u'')
history.append(current)
elif line.startswith('message:'):
logstart = True
elif logstart:
current['log'] += DecodeString(line)
elif line.startswith('revno:'):
current['revision'] = DecodeString(line.split(None, 1)[-1].strip())
elif line.startswith('committer:'):
author = line.split(None, 1)[-1]
current['author'] = DecodeString(author.strip())
elif line.startswith('timestamp:'):
date = line.split(None, 1)[-1]
current['date'] = self.str2datetime(date.strip())
else:
pass
self.logOutput(out)
self.closeProcess(out)
return history
def str2datetime(self, tstamp):
""" Convert a timestamp string to a datetime object """
parts = tstamp.split()
ymd = [int(x.strip()) for x in parts[1].split('-')]
hms = [int(x.strip()) for x in parts[2].split(':')]
date = ymd + hms
return datetime.datetime(*date)
def remove(self, paths):
""" Recursively remove paths from repository """
root, files = self.splitFiles(paths)
out = self.run(root, ['remove', '--force'] + files)
self.logOutput(out)
def status(self, paths, recursive=False, status=dict()):
""" Get BZR status information from given file/directory """
codes = {' ':'uptodate', 'N':'added', 'C':'conflict', 'D':'deleted',
'M':'modified'}
root, files = self.splitFiles(paths)
# -S gives output similar to svn which is a little easier to work with
out = self.run(root, ['status', '-S'] + files)
repo = self.getRepository(paths[0])
relpath = root.replace(repo, '', 1).lstrip(os.sep)
unknown = list()
if out:
for line in out.stdout:
self.log(line)
txt = line.lstrip(' +-')
# Split the status code and relative file path
code, fname = txt.split(None, 1)
fname = fname.replace(u'/', os.sep).strip().rstrip(os.sep)
fname = fname.replace(relpath, '', 1).lstrip(os.sep)
code = code.rstrip('*')
# Skip unknown files
if code == '?':
unknown.append(fname)
continue
# Get the absolute file path
current = dict()
try:
current['status'] = codes[code]
status[fname] = current
except KeyError:
pass
# Find up to date files
unknown += status.keys()
for path in os.listdir(root):
if path not in unknown:
status[path] = dict(status='uptodate')
self.logOutput(out)
return status
def update(self, paths):
""" Recursively update paths """
root, files = self.splitFiles(paths)
out = self.run(root, ['update'] + files)
self.logOutput(out)
def revert(self, paths):
""" Recursively revert paths to repository version """
root, files = self.splitFiles(paths)
if not files:
files = ['.']
out = self.run(root, ['revert'] + files)
self.logOutput(out)
def fetch(self, paths, rev=None, date=None):
""" Fetch a copy of the paths' contents """
output = []
for path in paths:
if os.path.isdir(path):
continue
root, files = self.splitFiles(path)
options = []
if rev:
options.append('-r')
options.append(str(rev))
if date:
# Date format YYYY-MM-DD,HH:MM:SS
options.append('-r')
options.append('date:%s' % date)
out = self.run(root, ['cat'] + options + files)
if out:
output.append(out.stdout.read())
self.logOutput(out)
else:
output.append(None)
return output
| gpl-2.0 | 5,564,929,580,805,322,000 | 34.942446 | 91 | 0.470877 | false |
fafaschiavo/lol-api-webapp | lolapiwebapp/stock/views.py | 1 | 17940 | from pprint import pprint
from django.shortcuts import render
from django.http import HttpResponse
from django.conf import settings
from stock.models import Hero, mastery, Rune
import json, requests, grequests
# Create your procedures here.
def searchSummonerStats(summoner_id):
context = {}
if type(summoner_id) != list:
url = 'https://na.api.pvp.net/api/lol/'+ settings.LOL_REGION +'/v1.3/stats/by-summoner/'+ str(summoner_id) +'/summary?api_key=' + settings.LOL_API_KEY2
else:
urls = []
for summoner in summoner_id:
urls.append('https://na.api.pvp.net/api/lol/'+ settings.LOL_REGION +'/v1.3/stats/by-summoner/'+ str(summoner) +'/summary?api_key=' + settings.LOL_API_KEY2)
rs = (grequests.get(u) for u in urls)
resp = grequests.map(rs)
stat_success = 1
for response in resp:
values_json = json.loads(response.text)
context[values_json['summonerId']] = values_json
if str(response) != '<Response [200]>':
stat_success = '0'
return (context, stat_success)
def searchSummonnerId(summoner_name):
context = {}
summoner_name = summoner_name.lower()
summoner_name = summoner_name.replace(" ", "")
url = 'https://na.api.pvp.net/api/lol/'+ settings.LOL_REGION +'/v1.4/summoner/by-name/'+ summoner_name +'?api_key=' + settings.LOL_API_KEY
resp = requests.get(url=url)
if resp.status_code == 200:
data = json.loads(resp.text)
try:
context['success'] = 1
context['summonerName'] = summoner_name
context['summonerLevel'] = data[summoner_name]['summonerLevel']
context['id'] = data[summoner_name]['id']
context['profileIcon'] = data[summoner_name]['profileIconId']
return context
except KeyError, e:
context['success'] = 0
return context
else:
context['success'] = 0
return context
def searchSummonerName(summoner_id):
if type(summoner_id) != list:
id_list = str(summoner_id)
else:
id_list = ''
for summoner in summoner_id:
id_list = id_list + str(summoner) + ','
url = 'https://na.api.pvp.net/api/lol/'+ settings.LOL_REGION +'/v1.4/summoner/'+ id_list +'?api_key=' + settings.LOL_API_KEY
resp = requests.get(url=url)
data = json.loads(resp.text)
return data
def searchSummonerRank(summoner_id):
if type(summoner_id) != list:
id_list = str(summoner_id)
else:
id_list = ''
for summoner in summoner_id:
id_list = id_list + str(summoner) + ','
url = 'https://na.api.pvp.net/api/lol/'+ settings.LOL_REGION +'/v2.5/league/by-summoner/'+ id_list +'?api_key=' + settings.LOL_API_KEY
resp = requests.get(url=url)
data = json.loads(resp.text)
return data
def searchSummonerChampionMastery(summoner_id, champion_id):
url = 'https://na.api.pvp.net/championmastery/location/'+ settings.LOL_PLATFORM_ID +'/player/'+ str(summoner_id) +'/champion/'+ str(champion_id) +'?api_key=' + settings.LOL_API_KEY
resp = requests.get(url=url)
try:
data = json.loads(resp.text)
except ValueError, e:
data = {}
data['championLevel'] = 0
return data
def searchTierImage(tier):
tier = tier.lower()
tier = tier.title()
imgage_dict = {
'Unranked': 'http://s18.postimg.org/5t36g8pf9/unranked_1_92a5f4dfbb5ffab13f901c80a9d14384.png',
'Bronze': 'https://s3.amazonaws.com/f.cl.ly/items/3q1f0B2j1E0Y0a3P310V/Bronze.png',
'Silver': 'https://s3.amazonaws.com/f.cl.ly/items/0J253J1z3o1d2Z152M2b/Silver.png',
'Gold': 'https://s3.amazonaws.com/f.cl.ly/items/1Y360o3N261b020g0h1r/Gold.png',
'Platinum': 'https://s3.amazonaws.com/f.cl.ly/items/3F2j1u2d3f0w0l260m3E/Platinum.png',
'Diamond': 'https://s3.amazonaws.com/f.cl.ly/items/2X2F2r192B3K1j0p0n3d/Diamond.png',
'Master': 'https://s3.amazonaws.com/f.cl.ly/items/083C392i0t1p1a3h1C3i/Master.png',
'Challenger': 'https://s3.amazonaws.com/f.cl.ly/items/0K350Q2C0b0E0n043e0L/Challenger.png',
}
return imgage_dict.get(tier, 'http://s18.postimg.org/5t36g8pf9/unranked_1_92a5f4dfbb5ffab13f901c80a9d14384.png')
def refreshRuneDatabase(request):
context ={}
# request the mastery list from the riot API
url = 'https://na.api.pvp.net/api/lol/static-data/'+ settings.LOL_REGION +'/v1.2/rune?api_key=' + settings.LOL_API_KEY
resp = requests.get(url=url)
data = json.loads(resp.text)
# delete all the existing masteries so the new information can be added
old_runes = Rune.objects.all()
old_runes.delete()
for rune in data['data']:
rune_id_riot = data['data'][rune]['id']
rune_name = data['data'][rune]['name']
rune_description = data['data'][rune]['description'].encode('ascii', 'ignore')
rune_tier = data['data'][rune]['rune']['tier']
rune_type_data = data['data'][rune]['rune']['type']
rune_bonus = rune_description.split(' de')[0]
rune_honest_text = rune_description.split(rune_bonus)[1]
rune_honest_text = rune_honest_text.split(' (')[0]
try:
rune_bonus = rune_bonus.split('+')[1]
except:
rune_bonus = rune_bonus.split('-')[1]
try:
rune_is_percentage = rune_bonus.split('%')[1]
rune_bonus = rune_bonus.split('%')[0]
rune_is_percentage = 1
except:
rune_is_percentage = 0
# rune_bonus = rune_bonus.replace(' ', '')
rune_bonus = rune_bonus.split(' ')[0]
rune_bonus = rune_bonus.replace(',', '.')
rune_bonus = rune_bonus.replace(' ', '')
new_rune = Rune(id_riot = rune_id_riot, name = rune_name, description = rune_description, tier = rune_tier, rune_type = rune_type_data, bonus = float(rune_bonus), honest_text = rune_honest_text, is_percentage = rune_is_percentage)
new_rune.save()
return render(request, 'refresh-rune-database.html', context)
def refreshMasteryDatabase(request):
context ={}
# request the mastery list from the riot API
url = 'https://na.api.pvp.net/api/lol/static-data/'+ settings.LOL_REGION +'/v1.2/mastery?api_key=' + settings.LOL_API_KEY
resp = requests.get(url=url)
data = json.loads(resp.text)
# delete all the existing masteries so the new information can be added
old_masteries = mastery.objects.all()
old_masteries.delete()
for mastery_item in data['data']:
mastery_id_riot = data['data'][mastery_item]['id']
mastery_name = data['data'][mastery_item]['name']
mastery_description = data['data'][mastery_item]['description']
table_position = str(mastery_id_riot)[1]
for item in mastery_description:
mastery_description_single_var = item
new_mastery = mastery(id_riot = mastery_id_riot, name = mastery_name, description = mastery_description_single_var, position = table_position)
new_mastery.save()
return render(request, 'refresh-mastery-database.html', context)
def refreshChampionDatabase(request):
context ={}
# request the champion list from the riot API
url = 'https://na.api.pvp.net/api/lol/static-data/'+ settings.LOL_REGION +'/v1.2/champion?api_key=' + settings.LOL_API_KEY
resp = requests.get(url=url)
data = json.loads(resp.text)
# delete all the existing heroes so the new information can be added
old_heroes = Hero.objects.all()
old_heroes.delete()
for champion in data['data']:
champion_id_riot = data['data'][champion]['id']
champion_name = data['data'][champion]['name']
champion_title = data['data'][champion]['title']
champion_key = data['data'][champion]['key']
new_champion = Hero(id_riot = champion_id_riot, name = champion_name, title = champion_title, key = champion_key)
new_champion.save()
return render(request, 'refresh-champion-database.html', context)
# Create your views here.
def index(request):
context = {}
return render(request, 'index.html', context)
def getSummonerId(request):
context = {}
return render(request, 'getid.html', context)
def requestId(request):
#receive data from the template
template_form = request.POST['requestId']
#Transform the data into string, then transform into lowercase and remove all the whitespaces
summoner_name = str(template_form)
context = searchSummonnerId(summoner_name)
return render(request, 'requestid.html', context)
def getmatchhistory(request):
context = {}
return render(request, 'getmatchhistory.html', context)
def requestmatchhistory(request):
#receive data from the template
template_form = request.POST['requestmatchhistory']
#Transform the data into string, then transform into lowercase and remove all the whitespaces
summoner_name = str(template_form)
summoner_info = searchSummonnerId(summoner_name)
context = {}
context['summoner_name'] = summoner_name
try:
url = 'https://na.api.pvp.net/api/lol/' + settings.LOL_REGION + '/v2.2/matchlist/by-summoner/' + str(summoner_info['id']) + '?api_key=' + settings.LOL_API_KEY
resp = requests.get(url=url)
data = json.loads(resp.text)
context['header'] = []
context['header'].append('Lane')
context['header'].append('Champion')
context['header'].append('Season')
context['header'].append('Match ID')
context['header'].append('Duration')
context['matches'] = []
match_data_to_context = []
for match in data['matches']:
match_data_to_context = []
match_data_to_context.append(match['lane'])
champion_name = Hero.objects.filter(id_riot = match['champion'])
try:
match_data_to_context.append(champion_name[0].name)
except IndexError:
match_data_to_context.append('-')
match_data_to_context.append(match['season'])
match_data_to_context.append(match['matchId'])
match_data_to_context.append(match['timestamp'])
context['matches'].append(match_data_to_context)
return render(request, 'requestmatchhistory.html', context)
except KeyError:
context['success'] = 'false'
return render(request, 'requestmatchhistory.html', context)
def getcurrentgame(request):
context = {}
return render(request, 'getcurrentgame.html', context)
def requestcurrentgame(request):
#receive data from the template
template_form = request.POST['requestcurrentgame']
#Transform the data into string, then transform into lowercase and remove all the whitespaces
summoner_name = str(template_form)
summoner_info = searchSummonnerId(summoner_name)
context = {}
context2 = {}
# check if the the player name was found in the lol database (1)
if summoner_info['success'] == 1:
url = 'https://na.api.pvp.net/observer-mode/rest/consumer/getSpectatorGameInfo/'+ settings.LOL_PLATFORM_ID +'/'+ str(summoner_info['id']) +'?api_key=' + settings.LOL_API_KEY
resp = requests.get(url=url)
# check if this player is currently in game (2)
if resp.status_code == 200:
data = json.loads(resp.text)
data_formated={}
#search for the participant names based on their IDs
players_ids_list = []
for player in data['participants']:
players_ids_list.append(player['summonerId'])
player_objects = searchSummonerName(players_ids_list)
player_ranks = searchSummonerRank(players_ids_list)
player_stats, stat_success = searchSummonerStats(players_ids_list)
# fill the data array with the name
for player in player_objects:
data_formated[player] ={}
data_formated[player]['name'] = player_objects[player]['name']
for player in data['participants']:
data_formated[str(player['summonerId'])]['side'] = player['teamId']
if stat_success == 1:
for stat in player_stats[int(player['summonerId'])]['playerStatSummaries']:
if stat['playerStatSummaryType'] == 'Unranked':
data_formated[str(player['summonerId'])]['wins'] = stat['wins']
# fill the data array with the tier
for player in player_ranks:
data_formated[player]['tier'] = player_ranks[player][0]['tier']
#fill the data array with the champion name
for player in data['participants']:
heroes_ids = player['championId']
champion = Hero.objects.filter(id_riot = heroes_ids)
data_formated[str(player['summonerId'])]['champion'] = champion[0].__str__()
# champion_name_process = champion[0].__str__()
# champion_name_process = champion_name_process.replace(' ', '')
# champion_name_process = champion_name_process.replace('.', '')
champion_name_process = champion[0].__key__()
data_formated[str(player['summonerId'])]['champion'] = '<span style="margin-left: 12px;"><img style="margin-right: 6px;" src="http://ddragon.leagueoflegends.com/cdn/6.6.1/img/champion/' + champion_name_process + '.png" class="rank--img tier-img"><a style="color: rgba(0,0,0,.87);" href="http://champion.gg/champion/' + champion_name_process + '">' + data_formated[str(player['summonerId'])]['champion'] + '</a><span>'
try:
data_formated[str(player['summonerId'])]['tier']
data_formated[str(player['summonerId'])]['tier'] = '<span style="margin-left: 12px;"><img style="margin-right: 2px;" src="'+ searchTierImage(data_formated[str(player['summonerId'])]['tier']) +'" class="rank--img tier-img">' + data_formated[str(player['summonerId'])]['tier'] + '<span>'
except:
data_formated[str(player['summonerId'])]['tier'] = 'UNRANKED'
data_formated[str(player['summonerId'])]['tier'] = '<span style="margin-left: 12px;"><img style="margin-right: 2px;" src="'+ searchTierImage(data_formated[str(player['summonerId'])]['tier']) +'" class="rank--img tier-img">' + data_formated[str(player['summonerId'])]['tier'] + '<span>'
mastery_set = {}
# fill the data array with the masteries stats
for player in data['participants']:
mastery_set[1] = 0
mastery_set[2] = 0
mastery_set[3] = 0
masteries = player['masteries']
for diff_mastery in masteries:
mastery_object = mastery.objects.get(id_riot = diff_mastery['masteryId'])
mastery_set[mastery_object.__position__()] = mastery_set[mastery_object.__position__()] + diff_mastery['rank']
data_formated[str(player['summonerId'])]['masteries'] = str(mastery_set[1]) + ' / ' + str(mastery_set[3]) + ' / ' +str(mastery_set[2])
context['header'] = []
context['header'].append('Champion')
context['header'].append('Name')
context['header'].append('Tier')
if stat_success == 1:
context['header'].append('Wins')
context['header'].append('Masteries')
context['players'] = []
player_data_to_context = []
for player in data_formated:
if data_formated[player]['side'] == 100:
player_data_to_context = []
player_data_to_context.append(data_formated[player]['champion'])
player_data_to_context.append(data_formated[player]['name'])
player_data_to_context.append(data_formated[player]['tier'])
if stat_success == 1:
player_data_to_context.append(data_formated[player]['wins'])
player_data_to_context.append(data_formated[player]['masteries'])
context['players'].append(player_data_to_context)
context2['header'] = []
context2['header'].append('Champion')
context2['header'].append('Name')
context2['header'].append('Tier')
if stat_success == 1:
context2['header'].append('Wins')
context2['header'].append('Masteries')
context2['players'] = []
player_data_to_context = []
for player in data_formated:
if data_formated[player]['side'] == 200:
player_data_to_context = []
player_data_to_context.append(data_formated[player]['champion'])
player_data_to_context.append(data_formated[player]['name'])
player_data_to_context.append(data_formated[player]['tier'])
if stat_success == 1:
player_data_to_context.append(data_formated[player]['wins'])
player_data_to_context.append(data_formated[player]['masteries'])
context2['players'].append(player_data_to_context)
return render(request, 'requestcurrentgame.html', {'context': context, 'context2': context2, 'summoner_name': summoner_name, 'summoner_info': summoner_info})
# check if this player is currently in game (2)
else:
return render(request, 'general-error.html', context)
# check if the the player name was found in the lol database (1)
else:
return render(request, 'general-error.html', context)
#settings.LOL_PLATFORM_ID
#str(summoner_info['id'])
#settings.LOL_API_KEY
#id do bazetinho 7523004
#id do fafis 454451
#id do leo 514850 | gpl-3.0 | -8,273,637,906,974,307,000 | 39.046875 | 434 | 0.608696 | false |
cfusting/arctic-browning | utilities/learning_data.py | 1 | 4957 | import os
import ntpath
import re
from functools import partial
import h5py
import design_matrix as dm
class LearningData:
DEFAULT_PREFIX = 'ARG'
CSV = '.csv'
HDF = '.hdf'
def __init__(self):
self.num_variables = None
self.num_observations = None
self.predictors = None
self.response = None
self.variable_names = None
self.unique_variable_prefixes = None
self.variable_type_indices = None
self.variable_dict = None
self.name = None
self.design_matrix = None
self.attributes = {}
self.meta_layers = {}
def from_file(self, file_name, header=False):
file_type = os.path.splitext(file_name)[1]
if file_type == self.HDF:
self.from_hdf(file_name)
elif file_type == self.CSV and header:
self.from_headed_csv(file_name)
elif file_type == self.CSV:
self.from_csv(file_name)
else:
raise ValueError("Bad file: " + file_name + ". File extension must be one of csv, hdf.")
def init_common(self, file_name):
self.name = os.path.splitext(ntpath.basename(file_name))[0]
self.predictors = self.design_matrix.predictors
self.response = self.design_matrix.response
self.num_observations, self.num_variables = self.predictors.shape
self.variable_names = self.design_matrix.variable_names
self.unique_variable_prefixes = get_unique_variable_prefixes(self.variable_names)
variable_groups = get_variable_groups(self.variable_names, self.unique_variable_prefixes)
self.variable_type_indices = get_variable_type_indices(variable_groups)
self.variable_dict = get_variable_dict(self.variable_names, self.DEFAULT_PREFIX)
def from_csv(self, csv_file):
self.design_matrix = dm.DesignMatrix()
self.design_matrix.from_csv(csv_file)
self.init_common(csv_file)
def from_headed_csv(self, csv_file):
self.design_matrix = dm.DesignMatrix()
self.design_matrix.from_headed_csv(csv_file)
self.init_common(csv_file)
def from_hdf(self, hdf_file):
self.design_matrix = dm.DesignMatrix()
self.design_matrix.from_hdf(hdf_file)
self.init_common(hdf_file)
self.get_meta_layers(hdf_file)
self.get_layer_attributes(hdf_file, 'design_matrix')
def from_data(self, matrix, variable_names, name):
self.design_matrix = dm.DesignMatrix()
self.design_matrix.from_data(matrix, variable_names)
self.init_common(name)
def to_hdf(self, file_name):
self.design_matrix.to_hdf(file_name)
self.save_meta_layers(file_name)
self.save_layer_attributes(file_name, 'design_matrix')
def to_headed_csv(self, file_name):
self.design_matrix.to_headed_csv(file_name)
def get_meta_layers(self, file_name):
with h5py.File(file_name, 'r') as f:
layers = filter(lambda x: x != 'design_matrix', f.keys())
for layer in layers:
self.meta_layers[layer] = f[layer][:]
def save_meta_layers(self, file_name):
with h5py.File(file_name, 'r+') as f:
for k, v in self.meta_layers.items():
f.create_dataset(k, data=v)
def get_layer_attributes(self, file_name, layer):
with h5py.File(file_name, 'r') as f:
dset = f[layer]
for k, v in dset.attrs.iteritems():
self.attributes[k] = v
def save_layer_attributes(self, file_name, layer):
with h5py.File(file_name, 'r+') as f:
dset = f[layer]
for k, v in self.attributes.items():
dset.attrs[k] = v
def get_variable_dict(names, default_prefix):
args = [default_prefix + str(x) for x in range(0, len(names))]
return dict(zip(args, names))
def get_variable_type_indices(variable_groups):
indices = []
previous = 0
for i in variable_groups:
current = previous + len(i)
if len(i) != 0:
indices.append(current - 1)
previous = current
return indices
def get_unique_variable_prefixes(variable_names):
"""
Assumes the form prefixnumber.
:param variable_names:
:return:
"""
expr = re.compile('([a-zA-Z]+)')
def get_prefix(name, expression):
result = re.match(expression, name)
if result:
return result.group(1)
return ''
prefixes = map(partial(get_prefix, expression=expr), variable_names)
unique_prefixes = []
seen = []
for prefix in prefixes:
if prefix not in seen:
unique_prefixes.append(prefix)
seen.append(prefix)
return unique_prefixes
def get_variable_groups(variable_names, unique_prefixes):
variable_groups = []
for prefix in unique_prefixes:
variable_groups.append(filter(lambda x: prefix in x, variable_names))
return variable_groups
| gpl-3.0 | 630,932,586,585,630,200 | 32.268456 | 100 | 0.617511 | false |
nonemaw/Flask_nonemaw | app/science/views.py | 1 | 2703 |
from flask import render_template, request, redirect, url_for
from flask_login import current_user, login_required
from bson import ObjectId
from . import science
from .compute import compute
from .forms import populate_form_from_instance, ComputeForm
from .. import db_s
from ..models_science import Compute
# http://hplgit.github.io/web4sciapps/doc/pub/._web4sa_flask015.html
@science.route('/', methods=['GET', 'POST'])
@login_required
def index_science():
result = None
form = ComputeForm(request.form)
if request.method == "POST" and form.validate():
result = compute(form.A.data, form.b.data,form.w.data, form.T.data)
if current_user.is_authenticated:
Compute(form.A.data, form.b.data,form.w.data, form.T.data,
form.resolution.data, result, current_user.id).insert_doc()
elif current_user.is_authenticated:
if db_s.Compute.count() > 0:
# get first item of cursor after sorting
latest = db_s.Compute.find({}).sort([('timestamp', -1)]).next()
result = latest.get('result')
form = populate_form_from_instance(latest)
return render_template("science/index.html", form=form, result=result,
user=current_user)
@science.route('/old')
@login_required
def old():
data = []
if current_user.is_authenticated:
instances = db_s.Compute.find({}).sort([('timestamp', -1)])
for instance_dict in instances:
form = populate_form_from_instance(instance_dict)
result = instance_dict.get('result')
if instance_dict.get('comments'):
comments = "<h3>Comments</h3>" + instance_dict.get('comments')
else:
comments = ''
data.append(
{'form': form, 'result': result,
'id': str(instance_dict.get('_id')),
'comments': comments})
return render_template("science/old.html", data=data)
# @science.route('/add_comment', methods=['GET', 'POST'])
# @login_required
# def add_comment():
# if request.method == 'POST' and current_user.is_authenticated():
# instance = user.Compute.order_by('-id').first()
# instance.comments = request.form.get("comments", None)
# db.session.commit()
# return redirect(url_for('index'))
@science.route('/delete/<id>', methods=['GET', 'POST'])
@login_required
def delete_post(id):
if current_user.is_authenticated:
db_s.Compute.delete_one({'_id': ObjectId(id)})
return redirect(url_for('old'))
@science.route('/graph')
@login_required
def graph():
return render_template('science/graph.html')
| mit | -692,888,562,903,513,600 | 34.565789 | 79 | 0.616352 | false |
IanLewis/homepage | homepage/runner.py | 1 | 2657 | # :coding=utf-8:
import os
import argparse
import django
from django.core.management import call_command
from waitress import serve
from homepage import __version__ as VERSION
from homepage.wsgi import application
def start(args):
"""
Starts the homepage application server.
"""
serve(application, host=args.addr, port=args.port)
def migrate(args):
"""
Runs migrations for the homepage server.
"""
call_command(
"migrate", fake=args.fake, interactive=False,
)
def createsuperuser(args):
"""
Creates a superuser.
"""
from django.contrib.auth.models import User
User.objects.create_superuser(
username=args.username, email=args.email, password=args.password,
)
def main():
os.environ["DJANGO_SETTINGS_MODULE"] = "homepage.settings"
django.setup()
parser = argparse.ArgumentParser(description="The Homepage App")
parser.add_argument(
"--version",
action="version",
version=VERSION,
help="Print the version number and exit.",
)
subparsers = parser.add_subparsers(help="Sub-command help")
# start
start_parser = subparsers.add_parser("start", help="Run the app server.")
start_parser.add_argument(
"--addr", default="0.0.0.0", help="Optional IP address to bind to"
)
start_parser.add_argument("--port", default=8000, type=int, help="Port to bind to")
# migrate
start_parser.set_defaults(func=start)
migrate_parser = subparsers.add_parser("migrate", help="Migrate the database.")
migrate_parser.add_argument(
"--fake",
action="store_true",
dest="fake",
default=False,
help="Mark migrations as run without actually " "running them.",
)
migrate_parser.set_defaults(func=migrate)
# createsuperuser
createsuperuser_parser = subparsers.add_parser(
"createsuperuser", help="Create a superuser."
)
createsuperuser_parser.add_argument(
"--username",
default="admin",
help="Specifies the username for the " "superuser. [Default: admin]",
)
createsuperuser_parser.add_argument(
"--email",
default="[email protected]",
help="Specifies the email address for "
"the superuser. [Default: [email protected]]",
)
createsuperuser_parser.add_argument(
"--password",
default="admin",
help="Specifies the password for the " "superuser. [Default: admin]",
)
createsuperuser_parser.set_defaults(func=createsuperuser)
args = parser.parse_args()
args.func(args)
if __name__ == "__main__":
main()
| mit | 8,793,610,769,817,218,000 | 23.601852 | 87 | 0.641325 | false |
GoeGaming/lutris | lutris/config.py | 1 | 10735 | #!/usr/bin/python
# -*- coding:Utf-8 -*-
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
"""Handle the basic configuration of Lutris."""
import os
import sys
import yaml
import logging
from os.path import join
from gi.repository import Gio
from lutris import pga, settings, sysoptions
from lutris.runners import import_runner
from lutris.util.log import logger
def register_handler():
"""Register the lutris: protocol to open with the application."""
logger.debug("registering protocol")
executable = os.path.abspath(sys.argv[0])
base_key = "desktop.gnome.url-handlers.lutris"
schema_directory = "/usr/share/glib-2.0/schemas/"
schema_source = Gio.SettingsSchemaSource.new_from_directory(
schema_directory, None, True
)
schema = schema_source.lookup(base_key, True)
if schema:
settings = Gio.Settings.new(base_key)
settings.set_string('command', executable)
else:
logger.warning("Schema not installed, cannot register url-handler")
def check_config(force_wipe=False):
"""Check if initial configuration is correct."""
directories = [settings.CONFIG_DIR,
join(settings.CONFIG_DIR, "runners"),
join(settings.CONFIG_DIR, "games"),
settings.DATA_DIR,
join(settings.DATA_DIR, "covers"),
settings.ICON_PATH,
join(settings.DATA_DIR, "banners"),
join(settings.DATA_DIR, "runners"),
join(settings.DATA_DIR, "lib"),
settings.RUNTIME_DIR,
settings.CACHE_DIR,
join(settings.CACHE_DIR, "installer"),
join(settings.CACHE_DIR, "tmp")]
for directory in directories:
if not os.path.exists(directory):
logger.debug("creating directory %s" % directory)
os.makedirs(directory)
if force_wipe:
os.remove(settings.PGA_DB)
pga.syncdb()
def read_yaml_from_file(filename):
"""Read filename and return parsed yaml"""
if not filename or not os.path.exists(filename):
return {}
try:
content = file(filename, 'r').read()
yaml_content = yaml.load(content) or {}
except (yaml.scanner.ScannerError, yaml.parser.ParserError):
logger.error("error parsing file %s", filename)
yaml_content = {}
return yaml_content
def write_yaml_to_file(filepath, config):
if not filepath:
raise ValueError('Missing filepath')
yaml_config = yaml.dump(config, default_flow_style=False)
with open(filepath, "w") as filehandler:
filehandler.write(yaml_config)
class LutrisConfig(object):
"""Class where all the configuration handling happens.
Description
===========
Lutris' configuration uses a cascading mecanism where
each higher, more specific level overrides the lower ones
The levels are (highest to lowest): `game`, `runner` and `system`.
Each level has its own set of options (config section), available to and
overriden by upper levels:
```
level | Config sections
-------|----------------------
game | system, runner, game
runner | system, runner
system | system
```
Example: if requesting runner options at game level, their returned value
will be from the game level config if it's set at this level; if not it
will be the value from runner level if available; and if not, the default
value set in the runner's module, or None.
The config levels are stored in separate YAML format text files.
Usage
=====
The config level will be auto set depending on what you pass to __init__:
- For game level, pass game slug and optionally runner_slug (better perfs)
- For runner level, pass runner_slug
- For system level, pass nothing
If need be, you can pass the level manually.
To read, use the config sections dicts: game_config, runner_config and
system_config.
To write, modify the relevant `raw_XXXX_config` section dict, then run
`save()`.
"""
def __init__(self, runner_slug=None, game_slug=None, level=None):
self.game_slug = game_slug
self.runner_slug = runner_slug
if game_slug and not runner_slug:
self.runner_slug = pga.get_game_by_slug(game_slug).get('runner')
# Cascaded config sections (for reading)
self.game_config = {}
self.runner_config = {}
self.system_config = {}
# Raw (non-cascaded) sections (for writing)
self.raw_game_config = {}
self.raw_runner_config = {}
self.raw_system_config = {}
self.raw_config = {}
# Set config level
self.level = level
if not level:
if game_slug:
self.level = 'game'
elif runner_slug:
self.level = 'runner'
else:
self.level = 'system'
# Init and load config files
self.game_level = {'system': {}, self.runner_slug: {}, 'game': {}}
self.runner_level = {'system': {}, self.runner_slug: {}}
self.system_level = {'system': {}}
self.game_level.update(read_yaml_from_file(self.game_config_path))
self.runner_level.update(read_yaml_from_file(self.runner_config_path))
self.system_level.update(read_yaml_from_file(self.system_config_path))
self.update_cascaded_config()
self.update_raw_config()
@property
def system_config_path(self):
return os.path.join(settings.CONFIG_DIR, "system.yml")
@property
def runner_config_path(self):
if not self.runner_slug:
return
return os.path.join(settings.CONFIG_DIR, "runners/%s.yml" %
self.runner_slug)
@property
def game_config_path(self):
if not self.game_slug:
return
return os.path.join(settings.CONFIG_DIR, "games/%s.yml" %
self.game_slug)
def update_cascaded_config(self):
if self.system_level.get('system') is None:
self.system_level['system'] = {}
self.system_config.clear()
self.system_config.update(self.get_defaults('system'))
self.system_config.update(self.system_level.get('system'))
if self.level in ['runner', 'game'] and self.runner_slug:
if self.runner_level.get(self.runner_slug) is None:
self.runner_level[self.runner_slug] = {}
if self.runner_level.get('system') is None:
self.runner_level['system'] = {}
self.runner_config.clear()
self.runner_config.update(self.get_defaults('runner'))
self.runner_config.update(self.runner_level.get(self.runner_slug))
self.system_config.update(self.runner_level.get('system'))
if self.level == 'game' and self.runner_slug:
if self.game_level.get('game') is None:
self.game_level['game'] = {}
if self.game_level.get(self.runner_slug) is None:
self.game_level[self.runner_slug] = {}
if self.game_level.get('system') is None:
self.game_level['system'] = {}
self.game_config.clear()
self.game_config.update(self.get_defaults('game'))
self.game_config.update(self.game_level.get('game'))
self.runner_config.update(self.game_level.get(self.runner_slug))
self.system_config.update(self.game_level.get('system'))
def update_raw_config(self):
# Select the right level of config
if self.level == 'game':
raw_config = self.game_level
elif self.level == 'runner':
raw_config = self.runner_level
else:
raw_config = self.system_level
# Load config sections
self.raw_system_config = raw_config['system']
if self.level in ['runner', 'game']:
self.raw_runner_config = raw_config[self.runner_slug]
if self.level == 'game':
self.raw_game_config = raw_config['game']
self.raw_config = raw_config
def remove(self, game=None):
"""Delete the configuration file from disk."""
if game is None:
game = self.game_slug
logging.debug("removing config for %s", game)
if os.path.exists(self.game_config_path):
os.remove(self.game_config_path)
else:
logger.debug("No config file at %s" % self.game_config_path)
def save(self):
"""Save configuration file according to its type"""
if self.level == "system":
config = self.system_level
config_path = self.system_config_path
elif self.level == "runner":
config = self.runner_level
config_path = self.runner_config_path
elif self.level == "game":
config = self.game_level
config_path = self.game_config_path
else:
raise ValueError("Invalid config level '%s'" % self.level)
write_yaml_to_file(config_path, config)
self.update_cascaded_config()
def get_defaults(self, options_type):
"""Return a dict of options' default value."""
options_dict = self.options_as_dict(options_type)
defaults = {}
for option, params in options_dict.iteritems():
if 'default' in params:
defaults[option] = params['default']
return defaults
def options_as_dict(self, options_type):
"""Convert the option list to a dict with option name as keys"""
options = {}
runner = (import_runner(self.runner_slug)()
if self.runner_slug
else None)
if options_type == 'system':
options = (sysoptions.with_runner_overrides(runner)
if runner
else sysoptions.system_options)
elif options_type == 'runner' and runner:
options = runner.runner_options
elif options_type == 'game' and runner:
options = runner.game_options
return dict((opt['option'], opt) for opt in options)
| gpl-3.0 | 1,786,690,622,427,660,800 | 36.274306 | 78 | 0.606148 | false |
geophysics/mtpy | mtpy/uofa/bayesian1d.py | 1 | 2126 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Created on 31.07.2013
@author: LK@UofA
mtpy/uofa/bayesian1d.py
Module for handling the UofA Bayesian 1D inversion/modelling code.
"""
import os
import sys
import os.path as op
import mtpy.utils.filehandling as MTfh
import mtpy.core.edi as EDI
import mtpy.utils.exceptions as MTex
import numpy as np
def generate_input_file(edifilename, outputdir=None):
eo = EDI.Edi()
eo.readfile(edifilename)
filebase = op.splitext(op.split(edifilename)[-1])[0]
outfilename1 = '{0}_bayesian1d_z.in'.format(filebase)
outfilename2 = '{0}_bayesian1d_zvar.in'.format(filebase)
outdir = op.split(edifilename)[0]
if outputdir is not None:
try:
if not op.isdir(outputdir):
os.makedirs(outputdir)
outdir = outputdir
except:
pass
outfn1 = op.join(outdir,outfilename1)
outfn2 = op.join(outdir,outfilename2)
outfn1 = MTfh.make_unique_filename(outfn1)
outfn2 = MTfh.make_unique_filename(outfn2)
freqs = eo.freq
z_array = eo.Z.z
zerr_array = eo.Z.zerr
if len(freqs) != len(z_array):
raise MTex.MTpyError_edi_file('ERROR in Edi file {0} - number of '\
'freqs different from length of Z array'.format(eo.filename))
sorting = np.argsort(freqs)
outstring1 = ''
outstring2 = ''
for idx in sorting:
z = z_array[idx]
zerr = zerr_array[idx]
f = freqs[idx]
outstring1 += '{0}\t'.format(f)
outstring2 += '{0}\t'.format(f)
for i in np.arange(2):
for j in np.arange(2):
if np.imag(z[i%2,(j+1)/2]) < 0 :
z_string = '{0}-{1}i'.format(np.real(z[i%2,(j+1)/2]),
np.abs(np.imag(z[i%2,(j+1)/2])))
else:
z_string = '{0}+{1}i'.format(np.real(z[i%2,(j+1)/2]),
np.imag(z[i%2,(j+1)/2]))
zerr_string = '{0}'.format(zerr[i%2,(j+1)/2])
outstring1 += '{0}\t'.format(z_string)
outstring2 += '{0}\t'.format(zerr_string)
outstring1 = outstring1.rstrip() + '\n'
outstring2 = outstring2.rstrip() + '\n'
Fout1 = open(outfn1,'w')
Fout2 = open(outfn2,'w')
Fout1.write(outstring1.expandtabs(4))
Fout2.write(outstring2.expandtabs(4))
Fout1.close()
Fout2.close()
return outfn1,outfn2
| gpl-3.0 | 2,121,404,373,755,960,000 | 20.049505 | 69 | 0.648636 | false |
di/vladiate | vladiate/inputs.py | 1 | 2379 | import io
try:
from urlparse import urlparse
except ImportError:
from urllib.parse import urlparse
try:
from StringIO import StringIO
except ImportError:
from io import StringIO
from vladiate.exceptions import MissingExtraException
class VladInput(object):
""" A generic input class """
def __init__(self):
raise NotImplementedError
def open(self):
raise NotImplementedError
def __repr__(self):
raise NotImplementedError
class LocalFile(VladInput):
""" Read from a local file path """
def __init__(self, filename):
self.filename = filename
def open(self):
with open(self.filename, "r") as f:
return f.readlines()
def __repr__(self):
return "{}('{}')".format(self.__class__.__name__, self.filename)
class S3File(VladInput):
""" Read from a file in S3 """
def __init__(self, path=None, bucket=None, key=None):
try:
import boto # noqa
self.boto = boto
except ImportError:
# 2.7 workaround, should just be `raise Exception() from None`
exc = MissingExtraException()
exc.__context__ = None
raise exc
if path and not any((bucket, key)):
self.path = path
parse_result = urlparse(path)
self.bucket = parse_result.netloc
self.key = parse_result.path
elif all((bucket, key)):
self.bucket = bucket
self.key = key
self.path = "s3://{}{}"
else:
raise ValueError(
"Either 'path' argument or 'bucket' and 'key' argument must " "be set."
)
def open(self):
s3 = self.boto.connect_s3()
bucket = s3.get_bucket(self.bucket)
key = bucket.new_key(self.key)
contents = key.get_contents_as_string()
ret = io.BytesIO(bytes(contents))
return ret
def __repr__(self):
return "{}('{}')".format(self.__class__.__name__, self.path)
class String(VladInput):
""" Read a file from a string """
def __init__(self, string_input=None, string_io=None):
self.string_io = string_io if string_io else StringIO(string_input)
def open(self):
return self.string_io
def __repr__(self):
return "{}('{}')".format(self.__class__.__name__, "...")
| mit | 376,685,929,278,033,800 | 24.858696 | 87 | 0.565784 | false |
dagon666/napi | tests/integration_tests/napi/scpmocker.py | 1 | 1963 | import os
import subprocess
class ScpMocker(object):
"""
This class interfaces to scpmocker - a programmable command mock.
"""
def __init__(self, scpMockerPath, sandboxPath):
self.scpMockerPath = scpMockerPath
self.sandboxPath = sandboxPath
self.binPath = os.path.join(self.sandboxPath, 'bin')
self.dbPath = os.path.join(self.sandboxPath, 'db')
def __enter__(self):
os.mkdir(self.binPath)
os.mkdir(self.dbPath)
self.envOrig = os.environ.copy()
os.environ["PATH"] = ':'.join((self.binPath, os.environ["PATH"]))
os.environ["SCPMOCKER_BIN_PATH"] = self.binPath
os.environ["SCPMOCKER_DB_PATH"] = self.dbPath
return self
def __exit__(self, *args):
os.environ.clear()
os.environ.update(self.envOrig)
def getPath(self, cmd):
return os.path.join(self.binPath, cmd)
def patchCmd(self, cmd):
cmdPath = self.getPath(cmd)
os.symlink(self.scpMockerPath, cmdPath)
def getCallCount(self, cmd):
inv = [ self.scpMockerPath, '-c', cmd, 'status', '-C' ]
output = subprocess.check_output(inv).strip()
return int(output.strip())
def getCallArgs(self, cmd, n):
inv = [ self.scpMockerPath, '-c', cmd, 'status', '-A', str(n) ]
output = subprocess.check_output(inv).strip()
return output
def program(self, cmd, stdoutStr = "", exitStatus = 0, n = 0):
inv = [ self.scpMockerPath, '-c', cmd, 'program',
'-e', str(exitStatus),
'-s', stdoutStr,
]
if n == 0:
inv.append('-a')
subprocess.call(inv)
else:
for _ in xrange(n):
subprocess.call(inv)
def unPatchCmd(self, cmd):
cmdPath = self.getPath(cmd)
try:
os.unlink(cmdPath)
except OSError as e:
# TODO add logging?
pass
| gpl-3.0 | -5,285,952,767,629,290,000 | 27.449275 | 73 | 0.55731 | false |
drewcsillag/skunkweb | pylibs/vfs/__init__.py | 1 | 1223 | # $Id$
# Time-stamp: <02/05/25 15:05:54 smulloni>
########################################################################
#
# Copyright (C) 2001 Jacob Smullyan <[email protected]>
#
# You may distribute under the terms of either the GNU General
# Public License or the SkunkWeb License, as specified in the
# README file.
########################################################################
from rosio import RO_StringIO
from vfs import MST_SIZE, MST_ATIME, MST_MTIME, MST_CTIME, \
VFSException, FS, LocalFS, PathPropertyStore, MultiFS, \
VFSRegistry, registerFS, FileNotFoundException, NotWriteableException
from zipfs import ZipFS
from shelfProps import ShelfPathPropertyStore
import importer
# and now, try to import stuff with optional dependencies
try:
from tarfs import TarFS
except ImportError:
pass
try:
from zodbProps import ZODBPathPropertyStore
except ImportError:
pass
# I'd like to do this, but it leads to a circular dependency with AE:
##try:
## from aeProps import AEPathPropertyStore
##except ImportError:
## pass
# SkunkWeb would die on startup with an AttributeError (AE has no
# attribute "Cache") were I to uncomment the above.
| gpl-2.0 | -6,009,414,850,838,430,000 | 28.119048 | 74 | 0.649223 | false |
esrf-bliss/Lima-camera-andor3 | tango/Andor3.py | 1 | 13277 | ############################################################################
# This file is part of LImA, a Library for Image Acquisition
#
# Copyright (C) : 2009-2014
# European Synchrotron Radiation Facility
# BP 220, Grenoble 38043
# FRANCE
#
# This is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This software is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, see <http://www.gnu.org/licenses/>.
############################################################################
#=============================================================================
#
# file : Andor3.py
#
# description : Python source for the Andor3 and its commands.
# The class is derived from Device. It represents the
# CORBA servant object which will be accessed from the
# network. All commands which can be executed on the
# Pilatus are implemented in this file.
#
# project : TANGO Device Server
#
# copyleft : European Synchrotron Radiation Facility
# BP 220, Grenoble 38043
# FRANCE
#
#=============================================================================
# (c) - Bliss - ESRF
#=============================================================================
#
import PyTango
import sys, types, os, time
from Lima import Core
from Lima import Andor3 as Andor3Module
# import some useful helpers to create direct mapping between tango attributes
# and Lima interfaces.
from Lima.Server import AttrHelper
class Andor3(PyTango.Device_4Impl):
Core.DEB_CLASS(Core.DebModApplication, 'LimaCCDs')
#==================================================================
# Andor3 Class Description:
#
#
#==================================================================
class Andor3(PyTango.Device_4Impl):
#--------- Add you global variables here --------------------------
Core.DEB_CLASS(Core.DebModApplication, 'LimaCCDs')
#------------------------------------------------------------------
# Device constructor
#------------------------------------------------------------------
@Core.DEB_MEMBER_FUNCT
def __init__(self,cl, name):
PyTango.Device_4Impl.__init__(self,cl,name)
# dictionnaries to be used with AttrHelper.get_attr_4u
self.__AdcGain = {'B11_HI_GAIN': _Andor3Camera.b11_hi_gain,
'B11_LOW_GAIN': _Andor3Camera.b11_low_gain,
'B16_LH_GAIN': _Andor3Camera.b16_lh_gain,
}
self.__AdcRate = {'MHZ10': _Andor3Camera.MHz10,
'MHZ100': _Andor3Camera.MHz100,
'MHZ200': _Andor3Camera.MHz200,
'MHZ280': _Andor3Camera.MHz280,
}
self.__Cooler = {'ON': True,
'OFF': False}
self.__FanSpeed = {'OFF': _Andor3Camera.Off,
'LOW': _Andor3Camera.Low,
'HIGH': _Andor3Camera.On,
}
self.__ElectronicShutterMode = {'ROLLING': _Andor3Camera.Rolling,
'GLOBAL': _Andor3Camera.Global,
}
self.__Overlap = {'ON': True,
'OFF': False}
self.__SpuriousNoiseFilter = {'ON': True,
'OFF': False}
self.__Attribute2FunctionBase = {'adc_gain': 'SimpleGain',
'adc_rate': 'AdcRate',
'temperature': 'Temperature',
'temperature_sp': 'TemperatureSP',
'cooler': 'Cooler',
'cooling_status': 'CoolingStatus',
'fan_speed': 'FanSpeed',
'electronic_shutter_mode': 'ElectronicShutterMode',
'frame_rate': 'FrameRate',
'max_frame_rate_transfer': 'MaxFrameRateTransfer',
'readout_time': 'ReadoutTime',
'overlap': 'Overlap',
'spurious_noise_filter': 'SpuriousNoiseFilter',
}
self.init_device()
#------------------------------------------------------------------
# Device destructor
#------------------------------------------------------------------
def delete_device(self):
pass
#------------------------------------------------------------------
# Device initialization
#------------------------------------------------------------------
@Core.DEB_MEMBER_FUNCT
def init_device(self):
self.set_state(PyTango.DevState.ON)
# Load the properties
self.get_device_properties(self.get_device_class())
# Apply properties if any
if self.adc_gain:
_Andor3Interface.setAdcGain(self.__AdcGain[self.adc_gain])
if self.adc_rate:
_Andor3Interface.setAdcRate(self.__AdcRate[self.adc_rate])
if self.temperature_sp:
_Andor3Camera.setTemperatureSP(self.temperature_sp)
if self.cooler:
_Andor3Camera.setCooler(self.__Cooler[self.cooler])
#==================================================================
#
# Andor3 read/write attribute methods
#
#==================================================================
def __getattr__(self,name) :
try:
return AttrHelper.get_attr_4u(self, name, _Andor3Interface)
except:
return AttrHelper.get_attr_4u(self, name, _Andor3Camera)
#==================================================================
#
# Andor3 command methods
#
#==================================================================
#------------------------------------------------------------------
# getAttrStringValueList command:
#
# Description: return a list of authorized values if any
# argout: DevVarStringArray
#------------------------------------------------------------------
@Core.DEB_MEMBER_FUNCT
def getAttrStringValueList(self, attr_name):
return AttrHelper.get_attr_string_value_list(self, attr_name)
#==================================================================
#
# Andor3 class definition
#
#==================================================================
class Andor3Class(PyTango.DeviceClass):
# Class Properties
class_property_list = {
}
# Device Properties
device_property_list = {
'config_path':
[PyTango.DevString,
'configuration path directory', []],
'camera_number':
[PyTango.DevShort,
'Camera number', []],
'adc_gain':
[PyTango.DevString,
'Adc Gain', []],
'adc_rate':
[PyTango.DevString,
'Adc readout rate', []],
'temperature_sp':
[PyTango.DevShort,
'Temperature set point in Celsius', []],
'cooler':
[PyTango.DevString,
'Start or stop the cooler ("ON"/"OFF")', []],
}
# Command definitions
cmd_list = {
'getAttrStringValueList':
[[PyTango.DevString, "Attribute name"],
[PyTango.DevVarStringArray, "Authorized String value list"]]
}
# Attribute definitions
attr_list = {
'temperature_sp':
[[PyTango.DevDouble,
PyTango.SCALAR,
PyTango.READ_WRITE],
{
'label':'Set/get the temperature set-point',
'unit': 'C',
'format': '%f',
'description': 'in Celsius',
}],
'temperature':
[[PyTango.DevDouble,
PyTango.SCALAR,
PyTango.READ],
{
'label':'get the current temperature sensor',
'unit': 'C',
'format': '%f',
'description': 'in Celsius',
}],
'cooler':
[[PyTango.DevString,
PyTango.SCALAR,
PyTango.READ_WRITE],
{
'label':'Start/stop the cooler',
'unit': 'N/A',
'format': '',
'description': 'OFF or ON',
}],
'cooling_status':
[[PyTango.DevString,
PyTango.SCALAR,
PyTango.READ],
{
'label':'Fast trigger mode, see manual for usage',
'unit': 'N/A',
'format': '',
'description': '0-OFF / 1-ON',
}],
'adc_gain':
[[PyTango.DevString,
PyTango.SCALAR,
PyTango.READ_WRITE],
{
'label':'ADC Gain',
'unit': 'N/A',
'format': '',
'description': 'ADC Gain which can be apply to the preamplifier',
}],
'adc_rate':
[[PyTango.DevString,
PyTango.SCALAR,
PyTango.READ_WRITE],
{
'label': 'ADC Rate',
'unit': 'N/A',
'format': '',
'description': 'ADC Readout Rate',
}],
'electronic_shutter_mode':
[[PyTango.DevString,
PyTango.SCALAR,
PyTango.READ_WRITE],
{
'label':'Electronic Shutter Mode',
'unit': 'N/A',
'format': '',
'description': 'Electronic shutter mode, Rolling or Global',
}],
'fan_speed':
[[PyTango.DevString,
PyTango.SCALAR,
PyTango.READ_WRITE],
{
'label':'Fan speed',
'unit': 'N/A',
'format': '',
'description': 'Fan speed, off, low or High',
}],
'frame_rate':
[[PyTango.DevDouble,
PyTango.SCALAR,
PyTango.READ],
{
'label':'Frame rate',
'unit': 'Hz',
'format': '%f',
'description': 'the rate at which frames are delivered to the use',
}],
'max_frame_rate_transfer':
[[PyTango.DevDouble,
PyTango.SCALAR,
PyTango.READ],
{
'label':'Maximum frame rate transfer',
'unit': 'byte per sec.',
'format': '%f',
'description': 'Returns the maximum sustainable transfer rate of the interface for the current shutter mode and ROI',
}],
'readout_time':
[[PyTango.DevDouble,
PyTango.SCALAR,
PyTango.READ],
{
'label':'Readout time',
'unit': 'sec',
'format': '%f',
'description': 'return the time to readout data from the sensor',
}],
'overlap':
[[PyTango.DevString,
PyTango.SCALAR,
PyTango.READ_WRITE],
{
'label':' Enable/Disable overlap mode',
'unit': 'N/A',
'format': '',
'description': 'OFF or ON',
}],
'spurious_noise_filter':
[[PyTango.DevString,
PyTango.SCALAR,
PyTango.READ_WRITE],
{
'label':'Enable/Disable spurious noise filter',
'unit': 'N/A',
'format': '',
'description': 'OFF or ON',
}],
}
#------------------------------------------------------------------
# Andor3Class Constructor
#------------------------------------------------------------------
def __init__(self, name):
PyTango.DeviceClass.__init__(self, name)
self.set_type(name)
#----------------------------------------------------------------------------
# Plugins
#----------------------------------------------------------------------------
from Lima import Andor3 as Andor3Acq
_Andor3Camera = None
_Andor3Interface = None
def get_control(config_path='/users/blissadm/local/Andor3/andor/bitflow', camera_number = '0', **keys) :
#properties are passed here as string
global _Andor3Camera
global _Andor3Interface
if _Andor3Camera is None:
print ('\n\nStarting and configuring the Andor3 camera ...')
_Andor3Camera = Andor3Acq.Camera(config_path, int(camera_number))
_Andor3Interface = Andor3Acq.Interface(_Andor3Camera)
print ('\n\nAndor3 Camera #%s (%s:%s) is started'%(camera_number,_Andor3Camera.getDetectorType(),_Andor3Camera.getDetectorModel()))
return Core.CtControl(_Andor3Interface)
def get_tango_specific_class_n_device():
return Andor3Class,Andor3
| gpl-3.0 | -8,108,087,734,343,641,000 | 34.031662 | 139 | 0.442269 | false |
ronghanghu/vqa-mcb | train/v4_glove/visualize_tools.py | 1 | 10507 | import numpy as np
import matplotlib.pyplot as plt
import os
import sys
import json
import re
import shutil
from PIL import Image
from PIL import ImageFont, ImageDraw
import caffe
from caffe import layers as L
from caffe import params as P
from vqa_data_provider_layer import VQADataProvider
from vqa_data_provider_layer import VQADataProviderLayer
import config
sys.path.append(config.VQA_TOOLS_PATH)
sys.path.append(config.VQA_EVAL_TOOLS_PATH)
from vqaTools.vqa import VQA
from vqaEvaluation.vqaEval import VQAEval
def visualize_failures(stat_list,mode):
def save_qtype(qtype_list, save_filename, mode):
if mode == 'val':
savepath = os.path.join('./eval', save_filename)
# TODO
img_pre = '/home/dhpseth/vqa/02_tools/VQA/Images/val2014'
elif mode == 'test-dev':
savepath = os.path.join('./test-dev', save_filename)
# TODO
img_pre = '/home/dhpseth/vqa/02_tools/VQA/Images/test2015'
elif mode == 'test':
savepath = os.path.join('./test', save_filename)
# TODO
img_pre = '/home/dhpseth/vqa/02_tools/VQA/Images/test2015'
else:
raise Exception('Unsupported mode')
if os.path.exists(savepath): shutil.rmtree(savepath)
if not os.path.exists(savepath): os.makedirs(savepath)
for qt in qtype_list:
count = 0
for t_question in stat_list:
#print count, t_question
if count < 40/len(qtype_list):
t_question_list = t_question['q_list']
saveflag = False
#print 'debug****************************'
#print qt
#print t_question_list
#print t_question_list[0] == qt[0]
#print t_question_list[1] == qt[1]
if t_question_list[0] == qt[0] and t_question_list[1] == qt[1]:
saveflag = True
else:
saveflag = False
if saveflag == True:
t_iid = t_question['iid']
if mode == 'val':
t_img = Image.open(os.path.join(img_pre, \
'COCO_val2014_' + str(t_iid).zfill(12) + '.jpg'))
elif mode == 'test-dev' or 'test':
t_img = Image.open(os.path.join(img_pre, \
'COCO_test2015_' + str(t_iid).zfill(12) + '.jpg'))
# for caption
#print t_iid
#annIds = caps.getAnnIds(t_iid)
#anns = caps.loadAnns(annIds)
#cap_list = [ann['caption'] for ann in anns]
ans_list = t_question['ans_list']
draw = ImageDraw.Draw(t_img)
for i in range(len(ans_list)):
try:
draw.text((10,10*i), str(ans_list[i]))
except:
pass
ans = t_question['answer']
pred = t_question['pred']
if ans == -1:
pre = ''
elif ans == pred:
pre = 'correct '
else:
pre = 'failure '
#print ' aaa ', ans, pred
ans = re.sub( '/', ' ', str(ans))
pred = re.sub( '/', ' ', str(pred))
img_title = pre + str(' '.join(t_question_list)) + '. a_' + \
str(ans) + ' p_' + str(pred) + '.png'
count += 1
print((os.path.join(savepath,img_title)))
t_img.save(os.path.join(savepath,img_title))
print('saving whatis')
qt_color_list = [['what','color']]
save_qtype(qt_color_list, 'colors', mode)
print('saving whatis')
qt_whatis_list = [['what','is'],['what','kind'],['what','are']]
save_qtype(qt_whatis_list, 'whatis', mode)
print('saving is')
qt_is_list = [['is','the'], ['is','this'],['is','there']]
save_qtype(qt_is_list, 'is', mode)
print('saving how many')
qt_howmany_list =[['how','many']]
save_qtype(qt_howmany_list, 'howmany', mode)
def exec_validation(device_id, mode, it='', visualize=False):
caffe.set_device(device_id)
caffe.set_mode_gpu()
net = caffe.Net('./result/proto_test.prototxt',\
'./result/tmp.caffemodel',\
caffe.TEST)
dp = VQADataProvider(mode=mode,batchsize=64)
total_questions = len(dp.getQuesIds())
epoch = 0
pred_list = []
testloss_list = []
stat_list = []
while epoch == 0:
t_word, t_cont, t_img_feature, t_answer, t_glove_matrix, t_qid_list, t_iid_list, epoch = dp.get_batch_vec()
net.blobs['data'].data[...] = np.transpose(t_word,(1,0))
net.blobs['cont'].data[...] = np.transpose(t_cont,(1,0))
net.blobs['img_feature'].data[...] = t_img_feature
net.blobs['label'].data[...] = t_answer
net.blobs['glove'].data[...] = np.transpose(t_glove_matrix, (1,0,2))
net.forward()
t_pred_list = net.blobs['prediction'].data.argmax(axis=1)
t_pred_str = [dp.vec_to_answer(pred_symbol) for pred_symbol in t_pred_list]
testloss_list.append(net.blobs['loss'].data)
for qid, iid, ans, pred in zip(t_qid_list, t_iid_list, t_answer.tolist(), t_pred_str):
pred_list.append({'answer':pred, 'question_id': int(dp.getStrippedQuesId(qid))})
if visualize:
q_list = dp.seq_to_list(dp.getQuesStr(qid))
if mode == 'test-dev' or 'test':
ans_str = ''
ans_list = ['']*10
else:
ans_str = dp.vec_to_answer(ans)
ans_list = [ dp.getAnsObj(qid)[i]['answer'] for i in range(10)]
stat_list.append({\
'qid' : qid,
'q_list' : q_list,
'iid' : iid,
'answer': ans_str,
'ans_list': ans_list,
'pred' : pred })
percent = 100 * float(len(pred_list)) / total_questions
sys.stdout.write('\r' + ('%.2f' % percent) + '%')
sys.stdout.flush()
mean_testloss = np.array(testloss_list).mean()
if mode == 'val':
valFile = './result/val2015_resfile'
with open(valFile, 'w') as f:
json.dump(pred_list, f)
if visualize:
visualize_failures(stat_list,mode)
annFile = config.DATA_PATHS['val']['ans_file']
quesFile = config.DATA_PATHS['val']['ques_file']
vqa = VQA(annFile, quesFile)
vqaRes = vqa.loadRes(valFile, quesFile)
vqaEval = VQAEval(vqa, vqaRes, n=2)
vqaEval.evaluate()
acc_overall = vqaEval.accuracy['overall']
acc_perQuestionType = vqaEval.accuracy['perQuestionType']
acc_perAnswerType = vqaEval.accuracy['perAnswerType']
return mean_testloss, acc_overall, acc_perQuestionType, acc_perAnswerType
elif mode == 'test-dev':
filename = './result/vqa_OpenEnded_mscoco_test-dev2015_v3t'+str(it).zfill(8)+'_results'
with open(filename+'.json', 'w') as f:
json.dump(pred_list, f)
if visualize:
visualize_failures(stat_list,mode)
elif mode == 'test':
filename = './result/vqa_OpenEnded_mscoco_test2015_v3c'+str(it).zfill(8)+'_results'
with open(filename+'.json', 'w') as f:
json.dump(pred_list, f)
if visualize:
visualize_failures(stat_list,mode)
def drawgraph(results, save_question_type_graphs=False):
# 0:it
# 1:trainloss
# 2:testloss
# 3:oa_acc
# 4:qt_acc
# 5:at_acc
# training curve
it = np.array([l[0] for l in results])
loss = np.array([l[1] for l in results])
valloss = np.array([l[2] for l in results])
valacc = np.array([l[3] for l in results])
fig = plt.figure()
ax1 = fig.add_subplot(111)
ax2 = ax1.twinx()
ax1.plot(it,loss, color='blue', label='train loss')
ax1.plot(it,valloss, '--', color='blue', label='test loss')
ax2.plot(it,valacc, color='red', label='acc on val')
plt.legend(loc='lower left')
ax1.set_xlabel('Iterations')
ax1.set_ylabel('Loss Value')
ax2.set_ylabel('Accuracy on Val [%]')
plt.savefig('./learning_curve max_%2.2f.png'%valacc.max())
plt.clf()
plt.close("all")
# question type
it = np.array([l[0] for l in results])
oa_acc = np.array([l[3] for l in results])
qt_dic_list = [l[4] for l in results]
def draw_qt_acc(target_key_list, figname):
fig = plt.figure()
for k in target_key_list:
print((k,type(k)))
t_val = np.array([ qt_dic[k] for qt_dic in qt_dic_list])
plt.plot(it,t_val,label=str(k))
plt.legend(fontsize='small')
plt.ylim(0,100.)
#plt.legend(prop={'size':6})
plt.xlabel('Iterations')
plt.ylabel('Accuracy on Val [%]')
plt.savefig(figname,dpi=200)
plt.clf()
plt.close("all")
if save_question_type_graphs:
s_keys = sorted(qt_dic_list[0].keys())
draw_qt_acc(s_keys[ 0:13]+[s_keys[31],], './ind_qt_are.png')
draw_qt_acc(s_keys[13:17]+s_keys[49:], './ind_qt_how_where_who_why.png')
draw_qt_acc(s_keys[17:31]+[s_keys[32],], './ind_qt_is.png')
draw_qt_acc(s_keys[33:49], './ind_qt_what.png')
draw_qt_acc(['what color is the','what color are the','what color is',\
'what color','what is the color of the'],'./qt_color.png')
draw_qt_acc(['how many','how','how many people are',\
'how many people are in'],'./qt_number.png')
draw_qt_acc(['who is','why','why is the','where is the','where are the',\
'which'],'./qt_who_why_where_which.png')
draw_qt_acc(['what is the man','is the man','are they','is he',\
'is the woman','is this person','what is the woman','is the person',\
'what is the person'],'./qt_human.png')
| bsd-2-clause | 5,928,542,490,714,291,000 | 38.205224 | 115 | 0.504901 | false |
gadeleon/chromatic_circle | questions.py | 1 | 1751 | '''
Questions generation functions
'''
import random
def degree(note, scale, degree):
'''
What is the <Number> of <Note> <Scale>?
'''
try:
answer = raw_input('What is the {} of {} {}: '.format(str(degree + 1), note, scale.capitalize()))
return answer, degree
except KeyboardInterrupt:
print '\nQUITTER!'
raise SystemExit
def grade_degree(key, note, scale):
deg = random.randint(0, 6)
answer = key[deg]
correct = False
while not correct:
my_answer, my_degree = degree(note, scale, deg)
if my_answer == answer:
print 'You Done got it Right!'
correct = True
else:
continue
def triad(note, scale):
'''
What are the notes in a <NOTE> <Scale> triad?
'''
try:
answer = raw_input('What notes are in a {} {} triad: '.format(note, scale.capitalize()))
return answer
except KeyboardInterrupt:
print '\nQUITTER!'
raise SystemExit
def grade_triad(key, note, scale):
correct = False
answer_triad = [key[0], key[2], key[4]]
my_triad = []
while not correct:
answer = triad(note, scale)
if ',' in answer:
my_triad = answer.split(', ')
print my_triad
if len(my_triad) != 3:
my_triad = answer.split(',')
else:
my_triad = answer.split(' ')
if len(my_triad) != 3:
print 'Answer with commas or spaces between notes'
raise SystemExit
validation = [i for i, x in zip(answer_triad, my_triad) if i == x]
if len(validation) == 3:
print 'You Done got it Right! '
correct = True
else:
continue
| mit | -1,124,495,998,507,797,200 | 25.530303 | 105 | 0.537978 | false |
benkonrath/django-guardian | guardian/testapp/tests/test_decorators.py | 1 | 13291 | from __future__ import unicode_literals
from django.conf import settings, global_settings
from django.contrib.auth.models import Group, AnonymousUser
from django.core.exceptions import PermissionDenied
from django.db.models.base import ModelBase
from django.http import HttpRequest
from django.http import HttpResponse
from django.http import HttpResponseForbidden
from django.http import HttpResponseRedirect
from django.shortcuts import get_object_or_404
from django.template import TemplateDoesNotExist
from django.test import TestCase
from guardian.compat import get_user_model
from guardian.compat import get_user_model_path
from guardian.compat import get_user_permission_full_codename
import mock
from guardian.decorators import permission_required, permission_required_or_403
from guardian.exceptions import GuardianError
from guardian.exceptions import WrongAppError
from guardian.shortcuts import assign_perm
from guardian.testapp.tests.conf import TEST_SETTINGS
from guardian.testapp.tests.conf import TestDataMixin
from guardian.testapp.tests.conf import override_settings
from guardian.testapp.tests.conf import skipUnlessTestApp
from django import get_version as django_get_version
User = get_user_model()
user_model_path = get_user_model_path()
@override_settings(**TEST_SETTINGS)
@skipUnlessTestApp
class PermissionRequiredTest(TestDataMixin, TestCase):
def setUp(self):
super(PermissionRequiredTest, self).setUp()
self.anon = AnonymousUser()
self.user = User.objects.get_or_create(username='jack')[0]
self.group = Group.objects.get_or_create(name='jackGroup')[0]
def _get_request(self, user=None):
if user is None:
user = AnonymousUser()
request = HttpRequest()
request.user = user
return request
def test_no_args(self):
try:
@permission_required
def dummy_view(request):
return HttpResponse('dummy_view')
except GuardianError:
pass
else:
self.fail("Trying to decorate using permission_required without "
"permission as first argument should raise exception")
def test_RENDER_403_is_false(self):
request = self._get_request(self.anon)
@permission_required_or_403('not_installed_app.change_user')
def dummy_view(request):
return HttpResponse('dummy_view')
with mock.patch('guardian.conf.settings.RENDER_403', False):
response = dummy_view(request)
self.assertEqual(response.content, b'')
self.assertTrue(isinstance(response, HttpResponseForbidden))
@mock.patch('guardian.conf.settings.RENDER_403', True)
def test_TEMPLATE_403_setting(self):
request = self._get_request(self.anon)
@permission_required_or_403('not_installed_app.change_user')
def dummy_view(request):
return HttpResponse('dummy_view')
with mock.patch('guardian.conf.settings.TEMPLATE_403', 'dummy403.html'):
response = dummy_view(request)
self.assertEqual(response.content, b'foobar403\n')
@mock.patch('guardian.conf.settings.RENDER_403', True)
def test_403_response_raises_error(self):
request = self._get_request(self.anon)
@permission_required_or_403('not_installed_app.change_user')
def dummy_view(request):
return HttpResponse('dummy_view')
with mock.patch('guardian.conf.settings.TEMPLATE_403',
'_non-exisitng-403.html'):
self.assertRaises(TemplateDoesNotExist, dummy_view, request)
@mock.patch('guardian.conf.settings.RENDER_403', False)
@mock.patch('guardian.conf.settings.RAISE_403', True)
def test_RAISE_403_setting_is_true(self):
request = self._get_request(self.anon)
@permission_required_or_403('not_installed_app.change_user')
def dummy_view(request):
return HttpResponse('dummy_view')
self.assertRaises(PermissionDenied, dummy_view, request)
def test_anonymous_user_wrong_app(self):
request = self._get_request(self.anon)
@permission_required_or_403('not_installed_app.change_user')
def dummy_view(request):
return HttpResponse('dummy_view')
self.assertEqual(dummy_view(request).status_code, 403)
def test_anonymous_user_wrong_codename(self):
request = self._get_request()
@permission_required_or_403('auth.wrong_codename')
def dummy_view(request):
return HttpResponse('dummy_view')
self.assertEqual(dummy_view(request).status_code, 403)
def test_anonymous_user(self):
request = self._get_request()
@permission_required_or_403('auth.change_user')
def dummy_view(request):
return HttpResponse('dummy_view')
self.assertEqual(dummy_view(request).status_code, 403)
def test_wrong_lookup_variables_number(self):
request = self._get_request()
try:
@permission_required_or_403('auth.change_user', (User, 'username'))
def dummy_view(request, username):
pass
dummy_view(request, username='jack')
except GuardianError:
pass
else:
self.fail("If lookup variables are passed they must be tuple of: "
"(ModelClass/app_label.ModelClass/queryset, "
"<pair of lookup_string and view_arg>)\n"
"Otherwise GuardianError should be raised")
def test_wrong_lookup_variables(self):
request = self._get_request()
args = (
(2010, 'username', 'username'),
('User', 'username', 'username'),
(User, 'username', 'no_arg'),
)
for tup in args:
try:
@permission_required_or_403('auth.change_user', tup)
def show_user(request, username):
user = get_object_or_404(User, username=username)
return HttpResponse("It's %s here!" % user.username)
show_user(request, 'jack')
except GuardianError:
pass
else:
self.fail("Wrong arguments given but GuardianError not raised")
def test_user_has_no_access(self):
request = self._get_request()
@permission_required_or_403('auth.change_user')
def dummy_view(request):
return HttpResponse('dummy_view')
self.assertEqual(dummy_view(request).status_code, 403)
def test_user_has_access(self):
perm = get_user_permission_full_codename('change')
joe, created = User.objects.get_or_create(username='joe')
assign_perm(perm, self.user, obj=joe)
request = self._get_request(self.user)
@permission_required_or_403(perm, (
user_model_path, 'username', 'username'))
def dummy_view(request, username):
return HttpResponse('dummy_view')
response = dummy_view(request, username='joe')
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, b'dummy_view')
def test_user_has_access_on_model_with_metaclass(self):
"""
Test to the fix issues of comparaison made via type()
in the decorator. In the case of a `Model` implementing
a custom metaclass, the decorator fail because type
doesn't return `ModelBase`
"""
perm = get_user_permission_full_codename('change')
class TestMeta(ModelBase):
pass
class ProxyUser(User):
class Meta:
proxy = True
app_label = User._meta.app_label
__metaclass__ = TestMeta
joe, created = ProxyUser.objects.get_or_create(username='joe')
assign_perm(perm, self.user, obj=joe)
request = self._get_request(self.user)
@permission_required_or_403(perm, (
ProxyUser, 'username', 'username'))
def dummy_view(request, username):
return HttpResponse('dummy_view')
response = dummy_view(request, username='joe')
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, b'dummy_view')
def test_user_has_obj_access_even_if_we_also_check_for_global(self):
perm = get_user_permission_full_codename('change')
joe, created = User.objects.get_or_create(username='joe')
assign_perm(perm, self.user, obj=joe)
request = self._get_request(self.user)
@permission_required_or_403(perm, (
user_model_path, 'username', 'username'), accept_global_perms=True)
def dummy_view(request, username):
return HttpResponse('dummy_view')
response = dummy_view(request, username='joe')
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, b'dummy_view')
def test_user_has_no_obj_perm_access(self):
perm = get_user_permission_full_codename('change')
joe, created = User.objects.get_or_create(username='joe')
request = self._get_request(self.user)
@permission_required_or_403(perm, (
user_model_path, 'username', 'username'))
def dummy_view(request, username):
return HttpResponse('dummy_view')
response = dummy_view(request, username='joe')
self.assertEqual(response.status_code, 403)
def test_user_has_global_perm_access_but_flag_not_set(self):
perm = get_user_permission_full_codename('change')
joe, created = User.objects.get_or_create(username='joe')
assign_perm(perm, self.user)
request = self._get_request(self.user)
@permission_required_or_403(perm, (
user_model_path, 'username', 'username'))
def dummy_view(request, username):
return HttpResponse('dummy_view')
response = dummy_view(request, username='joe')
self.assertEqual(response.status_code, 403)
def test_user_has_global_perm_access(self):
perm = get_user_permission_full_codename('change')
joe, created = User.objects.get_or_create(username='joe')
assign_perm(perm, self.user)
request = self._get_request(self.user)
@permission_required_or_403(perm, (
user_model_path, 'username', 'username'), accept_global_perms=True)
def dummy_view(request, username):
return HttpResponse('dummy_view')
response = dummy_view(request, username='joe')
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, b'dummy_view')
def test_model_lookup(self):
request = self._get_request(self.user)
perm = get_user_permission_full_codename('change')
joe, created = User.objects.get_or_create(username='joe')
assign_perm(perm, self.user, obj=joe)
models = (
user_model_path,
User,
User.objects.filter(is_active=True),
)
for model in models:
@permission_required_or_403(perm, (model, 'username', 'username'))
def dummy_view(request, username):
get_object_or_404(User, username=username)
return HttpResponse('hello')
response = dummy_view(request, username=joe.username)
self.assertEqual(response.content, b'hello')
def test_redirection_raises_wrong_app_error(self):
from guardian.testapp.models import Project
request = self._get_request(self.user)
User.objects.create(username='foo')
Project.objects.create(name='foobar')
@permission_required('auth.change_group',
(Project, 'name', 'group_name'),
login_url='/foobar/')
def dummy_view(request, project_name):
pass
# 'auth.change_group' is wrong permission codename (should be one
# related with User
self.assertRaises(WrongAppError, dummy_view,
request, group_name='foobar')
def test_redirection(self):
from guardian.testapp.models import Project
request = self._get_request(self.user)
User.objects.create(username='foo')
Project.objects.create(name='foobar')
@permission_required('testapp.change_project',
(Project, 'name', 'project_name'),
login_url='/foobar/')
def dummy_view(request, project_name):
pass
response = dummy_view(request, project_name='foobar')
self.assertTrue(isinstance(response, HttpResponseRedirect))
self.assertTrue(response._headers['location'][1].startswith(
'/foobar/'))
@override_settings(LOGIN_URL='django.contrib.auth.views.login')
def test_redirection_class(self):
view_url = '/permission_required/'
if django_get_version() < "1.5":
# skip this test for django versions < 1.5
return
response = self.client.get(view_url)
# this should be '/account/login'
self.assertRedirects(
response, global_settings.LOGIN_URL + "?next=" + view_url)
| bsd-2-clause | -5,353,922,090,276,023,000 | 36.229692 | 80 | 0.630351 | false |
jupyter-widgets/ipywidgets | ipywidgets/widgets/widget_media.py | 1 | 7783 | # Copyright (c) Jupyter Development Team.
# Distributed under the terms of the Modified BSD License.
import mimetypes
from .widget_core import CoreWidget
from .domwidget import DOMWidget
from .valuewidget import ValueWidget
from .widget import register
from traitlets import Unicode, CUnicode, Bool
from .trait_types import CByteMemoryView
@register
class _Media(DOMWidget, ValueWidget, CoreWidget):
"""Base class for Image, Audio and Video widgets.
The `value` of this widget accepts a byte string. The byte string is the
raw data that you want the browser to display.
If you pass `"url"` to the `"format"` trait, `value` will be interpreted
as a URL as bytes encoded in UTF-8.
"""
# Define the custom state properties to sync with the front-end
value = CByteMemoryView(help="The media data as a memory view of bytes.").tag(sync=True)
@classmethod
def _from_file(cls, tag, filename, **kwargs):
"""
Create an :class:`Media` from a local file.
Parameters
----------
filename: str
The location of a file to read into the value from disk.
**kwargs:
The keyword arguments for `Media`
Returns an `Media` with the value set from the filename.
"""
value = cls._load_file_value(filename)
if 'format' not in kwargs:
format = cls._guess_format(tag, filename)
if format is not None:
kwargs['format'] = format
return cls(value=value, **kwargs)
@classmethod
def from_url(cls, url, **kwargs):
"""
Create an :class:`Media` from a URL.
:code:`Media.from_url(url)` is equivalent to:
.. code-block: python
med = Media(value=url, format='url')
But both unicode and bytes arguments are allowed for ``url``.
Parameters
----------
url: [str, bytes]
The location of a URL to load.
"""
if isinstance(url, str):
# If str, it needs to be encoded to bytes
url = url.encode('utf-8')
return cls(value=url, format='url', **kwargs)
def set_value_from_file(self, filename):
"""
Convenience method for reading a file into `value`.
Parameters
----------
filename: str
The location of a file to read into value from disk.
"""
value = self._load_file_value(filename)
self.value = value
@classmethod
def _load_file_value(cls, filename):
if getattr(filename, 'read', None) is not None:
return filename.read()
else:
with open(filename, 'rb') as f:
return f.read()
@classmethod
def _guess_format(cls, tag, filename):
# file objects may have a .name parameter
name = getattr(filename, 'name', None)
name = name or filename
try:
mtype, _ = mimetypes.guess_type(name)
if not mtype.startswith('{}/'.format(tag)):
return None
return mtype[len('{}/'.format(tag)):]
except Exception:
return None
def _get_repr(self, cls):
# Truncate the value in the repr, since it will
# typically be very, very large.
class_name = self.__class__.__name__
# Return value first like a ValueWidget
signature = []
sig_value = 'value={!r}'.format(self.value[:40].tobytes())
if self.value.nbytes > 40:
sig_value = sig_value[:-1]+"..."+sig_value[-1]
signature.append(sig_value)
for key in super(cls, self)._repr_keys():
if key == 'value':
continue
value = str(getattr(self, key))
signature.append('{}={!r}'.format(key, value))
signature = ', '.join(signature)
return '{}({})'.format(class_name, signature)
@register
class Image(_Media):
"""Displays an image as a widget.
The `value` of this widget accepts a byte string. The byte string is the
raw image data that you want the browser to display. You can explicitly
define the format of the byte string using the `format` trait (which
defaults to "png").
If you pass `"url"` to the `"format"` trait, `value` will be interpreted
as a URL as bytes encoded in UTF-8.
"""
_view_name = Unicode('ImageView').tag(sync=True)
_model_name = Unicode('ImageModel').tag(sync=True)
# Define the custom state properties to sync with the front-end
format = Unicode('png', help="The format of the image.").tag(sync=True)
width = CUnicode(help="Width of the image in pixels. Use layout.width "
"for styling the widget.").tag(sync=True)
height = CUnicode(help="Height of the image in pixels. Use layout.height "
"for styling the widget.").tag(sync=True)
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
@classmethod
def from_file(cls, filename, **kwargs):
return cls._from_file('image', filename, **kwargs)
def __repr__(self):
return self._get_repr(Image)
@register
class Video(_Media):
"""Displays a video as a widget.
The `value` of this widget accepts a byte string. The byte string is the
raw video data that you want the browser to display. You can explicitly
define the format of the byte string using the `format` trait (which
defaults to "mp4").
If you pass `"url"` to the `"format"` trait, `value` will be interpreted
as a URL as bytes encoded in UTF-8.
"""
_view_name = Unicode('VideoView').tag(sync=True)
_model_name = Unicode('VideoModel').tag(sync=True)
# Define the custom state properties to sync with the front-end
format = Unicode('mp4', help="The format of the video.").tag(sync=True)
width = CUnicode(help="Width of the video in pixels.").tag(sync=True)
height = CUnicode(help="Height of the video in pixels.").tag(sync=True)
autoplay = Bool(True, help="When true, the video starts when it's displayed").tag(sync=True)
loop = Bool(True, help="When true, the video will start from the beginning after finishing").tag(sync=True)
controls = Bool(True, help="Specifies that video controls should be displayed (such as a play/pause button etc)").tag(sync=True)
@classmethod
def from_file(cls, filename, **kwargs):
return cls._from_file('video', filename, **kwargs)
def __repr__(self):
return self._get_repr(Video)
@register
class Audio(_Media):
"""Displays a audio as a widget.
The `value` of this widget accepts a byte string. The byte string is the
raw audio data that you want the browser to display. You can explicitly
define the format of the byte string using the `format` trait (which
defaults to "mp3").
If you pass `"url"` to the `"format"` trait, `value` will be interpreted
as a URL as bytes encoded in UTF-8.
"""
_view_name = Unicode('AudioView').tag(sync=True)
_model_name = Unicode('AudioModel').tag(sync=True)
# Define the custom state properties to sync with the front-end
format = Unicode('mp3', help="The format of the audio.").tag(sync=True)
autoplay = Bool(True, help="When true, the audio starts when it's displayed").tag(sync=True)
loop = Bool(True, help="When true, the audio will start from the beginning after finishing").tag(sync=True)
controls = Bool(True, help="Specifies that audio controls should be displayed (such as a play/pause button etc)").tag(sync=True)
@classmethod
def from_file(cls, filename, **kwargs):
return cls._from_file('audio', filename, **kwargs)
def __repr__(self):
return self._get_repr(Audio)
| bsd-3-clause | -1,439,636,317,653,839,400 | 33.745536 | 132 | 0.620326 | false |
BorgERP/borg-erp-6of3 | l10n_hr/l10n_hr_account/account_invoice.py | 1 | 8374 | # -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2012 Slobodni programi d.o.o. (<http://www.slobodni-programi.com>).
# $Id$
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields, osv
from tools.translate import _
import poziv_na_broj as pnbr
from tools import DEFAULT_SERVER_DATE_FORMAT, DEFAULT_SERVER_DATETIME_FORMAT
import time
class account_invoice(osv.Model):
_inherit = "account.invoice"
def _get_reference_type(self, cursor, user, context=None):
"""Function used by the function field reference_type in order to initalise available Reference Types"""
res = super(account_invoice, self)._get_reference_type(cursor, user, context=context)
res.append(('pnbr', 'Poziv na br.(HR)'))
return res
def _get_default_reference_type(self, cr, uid, context=None):
if context is None:
context = {}
type_inv = context.get('type', 'out_invoice')
user = self.pool.get('res.users').browse(cr, uid, uid, context=context)
if user.company_id.country_id and user.company_id.country_id.code in ('HR'):
if type_inv in ('out_invoice'):
return 'pnbr'
return 'none'
def _convert_ref(self, cr, uid, ref):
ref = super(account_invoice, self)._convert_ref(cr, uid, ref)
res = ''
for ch in ref:
res = res + (ch.isdigit() and ch or '')
return res
_columns = {
'reference_type': fields.selection(_get_reference_type,
'Reference Type',
required=True, readonly=True,
states={'draft': [('readonly', False)]}),
'date_delivery': fields.date('Delivery Date', readonly=True,
states={'draft': [('readonly', False)]},
select=True,
help="Keep empty to use the current date"),
'supplier_number': fields.char('Supplier ref', size=32, select=True, readonly=True,
states={'draft': [('readonly', False)]}),
}
_defaults = {
'reference_type': _get_default_reference_type,
}
def copy(self, cr, uid, id, default=None, context=None):
default = default or {}
if 'date_delivery' not in default:
default.update({'date_delivery': False})
return super(account_invoice, self).copy(cr, uid, id, default, context)
def pnbr_get(self, cr, uid, inv_id, context=None):
invoice = self.browse(cr, uid, inv_id, context=context)
res = invoice.reference or ''
def getP1_P4data(what):
res = ""
if what == 'partner_code':
res = invoice.partner_id.code or invoice.partner_id.id
if what == 'partner_id':
res = str(invoice.partner_id.id)
if what == 'invoice_no':
res = invoice.number
if what == 'invoice_ym':
res = invoice.date_invoice[2:4] + invoice.date_invoice[5:7]
if what == 'delivery_ym':
res = invoice.date_delivery[2:4] + invoice.date_delivery[5:7]
return self._convert_ref(cr, uid, res)
if invoice.journal_id.model_pnbr and invoice.journal_id.model_pnbr > 'HR':
model = invoice.journal_id.model_pnbr
P1 = getP1_P4data(invoice.journal_id.P1_pnbr or '')
P2 = getP1_P4data(invoice.journal_id.P2_pnbr or '')
P3 = getP1_P4data(invoice.journal_id.P3_pnbr or '')
P4 = getP1_P4data(invoice.journal_id.P4_pnbr or '')
res = pnbr.reference_number_get(model, P1, P2, P3, P4)
return model + ' ' + res
# KGB Copy
def action_number(self, cr, uid, ids, context=None):
if context is None:
context = {}
#TODO: not correct fix but required a fresh values before reading it.
self.write(cr, uid, ids, {})
for obj_inv in self.browse(cr, uid, ids, context=context):
id = obj_inv.id
invtype = obj_inv.type
number = obj_inv.number
move_id = obj_inv.move_id and obj_inv.move_id.id or False
reference = obj_inv.reference or ''
self.write(cr, uid, id, {'internal_number': number}) # kgb ids?
if invtype in ('in_invoice', 'in_refund'):
if not reference:
ref = self._convert_ref(cr, uid, number)
else:
ref = self._convert_ref(cr, uid, number)
#ref = reference
else:
ref = self._convert_ref(cr, uid, number)
#KGB - start
if not obj_inv.date_invoice:
self.write(cr, uid, [id],
{'date_invoice': time.strftime(DEFAULT_SERVER_DATE_FORMAT)},
context=context)
# TODO: need to? self.action_date_assign( cr, uid, [id])
if not obj_inv.date_delivery: # mandatory in Croatia for services
self.write(cr, uid, [id], {'date_delivery': obj_inv.date_invoice}, context=context)
ref = self.pnbr_get(cr, uid, id, context)
self.write(cr, uid, id, {'reference': ref})
#KGB - end
cr.execute('UPDATE account_move SET ref=%s ' \
'WHERE id=%s -- AND (ref is null OR ref = \'\')',
(ref, move_id))
cr.execute('UPDATE account_move_line SET ref=%s ' \
'WHERE move_id=%s -- AND (ref is null OR ref = \'\')',
(ref, move_id))
cr.execute('UPDATE account_analytic_line SET ref=%s ' \
'FROM account_move_line ' \
'WHERE account_move_line.move_id = %s ' \
'AND account_analytic_line.move_id = account_move_line.id',
(ref, move_id))
for inv_id, name in self.name_get(cr, uid, [id]):
ctx = context.copy()
if obj_inv.type in ('out_invoice', 'out_refund'):
ctx = self.get_log_context(cr, uid, context=ctx)
message = _("Invoice '%s' is validated.") % name
self.log(cr, uid, inv_id, message, context=ctx)
return True
def name_get(self, cr, uid, ids, context=None):
if not ids:
return []
types = {
'out_invoice': 'IR: ', # KGB CI
'in_invoice': 'UR: ', # KGB SI
'out_refund': 'IO: ', # KGB OR
'in_refund': 'UO: ', # KGB SR
}
return [(r['id'], (r['number']) or types[r['type']] + (r['name'] or ''))
for r in self.read(cr, uid, ids, ['type', 'number', 'name'], context, load='_classic_write')]
def button_change_fiscal_position(self, cr, uid, ids, context=None):
if context is None:
context = {}
fpos_obj = self.pool.get('account.fiscal.position')
inv_line_obj = self.pool.get('account.invoice.line')
for inv in self.browse(cr, uid, ids):
for line in inv.invoice_line:
new_taxes = fpos_obj.map_tax(cr, uid, inv.fiscal_position, line.product_id.taxes_id)
inv_line_obj.write(cr, uid, [line.id], {'invoice_line_tax_id': [(6, 0, new_taxes)]})
return True
| agpl-3.0 | -3,190,680,389,484,607,500 | 44.759563 | 112 | 0.524242 | false |
jmpews/torweb | tests/test_blog_load_from_md.py | 1 | 2367 | # coding:utf-8
import sys, os
import os.path
sys.path.append(os.path.dirname(sys.path[0]))
from settings.config import config
from peewee import Model, MySQLDatabase
mysqldb = MySQLDatabase('',
user=config.BACKEND_MYSQL['user'],
password=config.BACKEND_MYSQL['password'],
host=config.BACKEND_MYSQL['host'],
port=config.BACKEND_MYSQL['port'])
from db.mysql_model.blog import BlogPostCategory, BlogPostLabel, BlogPost
md_path = './docs/articles'
def check_md_format(file_path):
fd = open(file_path)
md_info = {}
while True:
line = fd.readline().strip()
if len(line) == 0:
break
try:
i = line.index(':')
k = line[:i]
v = line[i+1:]
except:
fd.close()
return None
md_info[k.strip().lower()] = v.strip()
# 校验字段是否存在
# Necessary Args: title, tags
# Optional Args: date, category, auth, slug
keys = md_info.keys()
if 'title' in keys and 'tags' in keys and 'slug' in keys:
md_info['content'] = fd.read(-1)
fd.close()
return md_info
else:
fd.close()
return None
def convert_md_2_post(md_info):
category = md_info.get('category')
if not category:
category = 'UnClassified'
cate = BlogPostCategory.get_by_name(category)
post = BlogPost.create(title=md_info['title'],
category=cate,
slug=md_info['slug'],
content=md_info['content'])
BlogPostLabel.add_post_label(md_info['tags'], post)
def get_files(root_path):
files = os.listdir(root_path)
print(files)
for file_name in files:
_, suffix = os.path.splitext(file_name)
if suffix == '.md':
md_file_path = os.path.join(root_path, file_name)
md_info = check_md_format(md_file_path)
if md_info:
print(md_info['title'])
convert_md_2_post(md_info)
if __name__ == '__main__':
mysqldb.create_tables([BlogPostLabel, BlogPost, BlogPostCategory], safe=True)
t = BlogPostLabel.delete()
t.execute()
t = BlogPost.delete()
t.execute()
t = BlogPostCategory.delete()
t.execute()
get_files(md_path)
| mit | 728,075,483,062,490,800 | 27.670732 | 81 | 0.553382 | false |
ageron/tensorflow | tensorflow/python/ops/custom_gradient.py | 1 | 12977 | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Decorator to overrides the gradient for a function."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python import pywrap_tensorflow
from tensorflow.python.eager import backprop
from tensorflow.python.eager import context
from tensorflow.python.eager import tape as tape_lib
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import gen_array_ops
from tensorflow.python.ops import resource_variable_ops
from tensorflow.python.ops import variable_scope
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.util import nest
from tensorflow.python.util import tf_decorator
from tensorflow.python.util import tf_inspect
from tensorflow.python.util.tf_export import tf_export
def copy_handle_data(source_t, target_t):
"""Copies HandleData for variant and resource type tensors if available.
The CppShapeInferenceResult::HandleData proto contains information about the
shapes and types of the element tensors of resource/variant type tensors.
We need to copy this across function boundaries, i.e., when capturing a
placeholder or when returning a function tensor as output. If we don't do this
the element tensors will have unknown shapes, e.g., if a TensorList variant
tensor is captured as a placeholder, elements popped from that list would have
unknown shape.
Args:
source_t: The tensor to copy HandleData from.
target_t: The tensor to copy HandleData to.
"""
if (target_t.dtype == dtypes.resource or
target_t.dtype == dtypes.variant):
if isinstance(source_t, ops.EagerTensor):
handle_data = source_t._handle_data # pylint: disable=protected-access
else:
handle_data = resource_variable_ops.get_resource_handle_data(source_t)
if (handle_data is not None
and handle_data.is_set
and handle_data.shape_and_type):
# pylint: disable=protected-access
pywrap_tensorflow.SetHandleShapeAndType(target_t.graph._c_graph,
target_t._as_tf_output(),
handle_data.SerializeToString())
# pylint: enable=protected-access
# Ensure that shapes and dtypes are propagated.
shapes, types = zip(*[(pair.shape, pair.dtype)
for pair in handle_data.shape_and_type])
ranks = [len(s.dim) if not s.unknown_rank else -1 for s in shapes]
shapes = [[d.size for d in s.dim]
if not s.unknown_rank else None for s in shapes]
pywrap_tensorflow.TF_GraphSetOutputHandleShapesAndTypes_wrapper(
target_t._op._graph._c_graph, # pylint: disable=protected-access
target_t._as_tf_output(), # pylint: disable=protected-access
shapes, ranks, types)
@tf_export("custom_gradient")
def custom_gradient(f):
"""Decorator to define a function with a custom gradient.
This decorator allows fine grained control over the gradients of a sequence
for operations. This may be useful for multiple reasons, including providing
a more efficient or numerically stable gradient for a sequence of operations.
For example, consider the following function that commonly occurs in the
computation of cross entropy and log likelihoods:
```python
def log1pexp(x):
return tf.log(1 + tf.exp(x))
```
Due to numerical instability, the gradient this function evaluated at x=100 is
NaN. For example:
```python
x = tf.constant(100.)
y = log1pexp(x)
dy = tf.gradients(y, x) # Will be NaN when evaluated.
```
The gradient expression can be analytically simplified to provide numerical
stability:
```python
@tf.custom_gradient
def log1pexp(x):
e = tf.exp(x)
def grad(dy):
return dy * (1 - 1 / (1 + e))
return tf.log(1 + e), grad
```
With this definition, the gradient at x=100 will be correctly evaluated as
1.0.
See also `tf.RegisterGradient` which registers a gradient function for a
primitive TensorFlow operation. `tf.custom_gradient` on the other hand allows
for fine grained control over the gradient computation of a sequence of
operations.
Note that if the decorated function uses `Variable`s, the enclosing variable
scope must be using `ResourceVariable`s.
Args:
f: function `f(*x)` that returns a tuple `(y, grad_fn)` where:
- `x` is a sequence of `Tensor` inputs to the function.
- `y` is a `Tensor` or sequence of `Tensor` outputs of applying
TensorFlow operations in `f` to `x`.
- `grad_fn` is a function with the signature `g(*grad_ys)` which returns
a list of `Tensor`s - the derivatives of `Tensor`s in `y` with respect
to the `Tensor`s in `x`. `grad_ys` is a `Tensor` or sequence of
`Tensor`s the same size as `y` holding the initial value gradients for
each `Tensor` in `y`. In a pure mathematical sense, a vector-argument
vector-valued function `f`'s derivatives should be its Jacobian matrix
`J`. Here we are expressing the Jacobian `J` as a function `grad_fn`
which defines how `J` will transform a vector `grad_ys` when
left-multiplied with it (`grad_ys * J`). This functional representation
of a matrix is convenient to use for chain-rule calculation
(in e.g. the back-propagation algorithm).
If `f` uses `Variable`s (that are not part of the
inputs), i.e. through `get_variable`, then `grad_fn` should have
signature `g(*grad_ys, variables=None)`, where `variables` is a list of
the `Variable`s, and return a 2-tuple `(grad_xs, grad_vars)`, where
`grad_xs` is the same as above, and `grad_vars` is a `list<Tensor>`
with the derivatives of `Tensor`s in `y` with respect to the variables
(that is, grad_vars has one Tensor per variable in variables).
Returns:
A function `h(x)` which returns the same value as `f(x)[0]` and whose
gradient (as calculated by `tf.gradients`) is determined by `f(x)[1]`.
"""
def decorated(*args, **kwargs):
"""Decorated function with custom gradient."""
if context.executing_eagerly():
return _eager_mode_decorator(f, *args, **kwargs)
else:
return _graph_mode_decorator(f, *args, **kwargs)
return tf_decorator.make_decorator(f, decorated)
def _graph_mode_decorator(f, *args, **kwargs):
"""Implement custom gradient decorator for graph mode."""
# TODO(rsepassi): Add support for kwargs
if kwargs:
raise ValueError(
"The custom_gradient decorator currently supports keywords "
"arguments only when eager execution is enabled.")
name = "CustomGradient-%s" % ops.uid()
args = [ops.convert_to_tensor(x) for x in args]
# Checking global and local variables attempts to ensure that no non-resource
# Variables are added to the graph.
current_var_scope = variable_scope.get_variable_scope()
before_vars = set(current_var_scope.global_variables() +
current_var_scope.local_variables())
with backprop.GradientTape() as tape:
result, grad_fn = f(*args)
after_vars = set(current_var_scope.global_variables() +
current_var_scope.local_variables())
new_vars = after_vars - before_vars
for v in new_vars:
if not resource_variable_ops.is_resource_variable(v):
raise TypeError(
"All variables used by a function wrapped with @custom_gradient must "
"be `ResourceVariable`s. Ensure that no `variable_scope` is created "
"with `use_resource=False`.")
# The variables that grad_fn needs to return gradients for are the set of
# variables used that are *not* part of the inputs.
variables = list(set(tape.watched_variables()) - set(args))
grad_argspec = tf_inspect.getfullargspec(grad_fn)
variables_in_signature = ("variables" in grad_argspec.args or
grad_argspec.varkw)
if variables and not variables_in_signature:
raise TypeError("If using @custom_gradient with a function that "
"uses variables, then grad_fn must accept a keyword "
"argument 'variables'.")
if variables_in_signature and not variables:
# User seems to intend to use variables but none were captured.
if not variable_scope.get_variable_scope().use_resource:
raise TypeError("If using @custom_gradient with a function that "
"uses variables, the enclosing variable scope must "
"have use_resource=True.")
else:
logging.warn("@custom_gradient grad_fn has 'variables' in signature, but "
"no ResourceVariables were used on the forward pass.")
flat_result = nest.flatten(result)
all_tensors = flat_result + args + variables
def tape_grad_fn(*result_grads):
"""Custom grad fn wrapper."""
result_grads = result_grads[:len(flat_result)]
if variables:
input_grads, variable_grads = grad_fn(*result_grads, variables=variables)
if len(variable_grads) != len(variables):
raise ValueError("Must return gradient for each variable from "
"@custom_gradient grad_fn.")
else:
input_grads = grad_fn(*result_grads)
variable_grads = []
# Need to return one value per input to the IdentityN, so pad the
# gradients of the inputs of the custom_gradient function with the
# gradients of the outputs as well.
input_grads = nest.flatten(input_grads)
return ([None] * len(flat_result)) + input_grads + variable_grads
@ops.RegisterGradient(name)
def internal_grad_fn(unused_op, *result_grads): # pylint: disable=unused-variable
"""Custom grad fn wrapper."""
return tape_grad_fn(*result_grads)
original_tensors = all_tensors
with ops.get_default_graph().gradient_override_map({"IdentityN": name}):
all_tensors = array_ops.identity_n(all_tensors)
# Propagate handle data for happier shape inference for resource variables.
for i, t in enumerate(original_tensors):
if t.dtype == dtypes.resource and hasattr(t, "_handle_data"):
all_tensors[i]._handle_data = t._handle_data # pylint: disable=protected-access
tape_lib.record_operation(
f.__name__, all_tensors, original_tensors, tape_grad_fn)
for ot, t in zip(original_tensors, all_tensors):
copy_handle_data(ot, t)
return nest.pack_sequence_as(
structure=result, flat_sequence=all_tensors[:len(flat_result)])
def _eager_mode_decorator(f, *args, **kwargs):
"""Implement custom gradient decorator for eager mode."""
with backprop.GradientTape() as tape:
result, grad_fn = f(*args, **kwargs)
all_inputs = list(args) + list(kwargs.values())
# The variables that grad_fn needs to return gradients for are the set of
# variables used that are *not* part of the inputs.
variables = [v for v in set(tape.watched_variables()) if v not in all_inputs]
grad_argspec = tf_inspect.getfullargspec(grad_fn)
if (variables and ("variables" not in grad_argspec.args) and
not grad_argspec.varkw):
raise TypeError("If using @custom_gradient with a function that "
"uses variables, then grad_fn must accept a keyword "
"argument 'variables'.")
flat_result = nest.flatten(result)
# TODO(apassos) consider removing the identity below.
flat_result = [gen_array_ops.identity(x) for x in flat_result]
def actual_grad_fn(*result_grads):
"""Custom grad fn wrapper."""
if variables:
input_grads, variable_grads = grad_fn(*result_grads, variables=variables)
if len(variable_grads) != len(variables):
raise ValueError("Must return gradient for each variable from "
"@custom_gradient grad_fn.")
else:
input_grads = grad_fn(*result_grads)
variable_grads = []
return nest.flatten(input_grads) + variable_grads
input_tensors = [ops.convert_to_tensor(x) for x
in list(args) + list(variables)]
tape_lib.record_operation(f.__name__, flat_result, input_tensors,
actual_grad_fn)
flat_result = list(flat_result)
return nest.pack_sequence_as(result, flat_result)
| apache-2.0 | -7,386,745,741,267,146,000 | 44.059028 | 86 | 0.681282 | false |
dmilos/nucleotide | src/nucleotide/component/windows/cygwingcc/atom/blank.py | 1 | 1439 | #!/usr/bin/env python2
# Copyright 2015 Dejan D. M. Milosavljevic
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import nucleotide
import nucleotide.component
import nucleotide.component.function
component_windows_cygwingcc_atom_blank = {
'platform' : {
'host' : 'Windows',
'guest' : 'cygwin'
},
'cc' : {
'vendor' : 'FSF',
'name' : 'cygwingcc',
'version': 'X'
},
'name' :'blank',
'config' : {
'CPPDEFINES' : [ ],
'CPPFLAGS' : [],
'LIBS' : [ ]
},
'class': [ 'blank', 'cygwingcc:blank' ]
}
class Blank:
def __init__(self):
pass
@staticmethod
def extend(P_option):
nucleotide.component.function.extend( P_option, 'cygwingcc:blank', component_windows_cygwingcc_atom_blank )
@staticmethod
def check(self):
pass
| apache-2.0 | 7,428,636,978,328,667,000 | 25.188679 | 115 | 0.601112 | false |
alexsalo/genenetwork2 | wqflask/base/webqtlConfig.py | 1 | 3470 | #########################################'
# Environment Variables - public
#########################################
#Debug Level
#1 for debug, mod python will reload import each time
DEBUG = 1
#USER privilege
USERDICT = {'guest':1,'user':2, 'admin':3, 'root':4}
#minimum number of informative strains
KMININFORMATIVE = 5
#maximum number of traits for interval mapping
MULTIPLEMAPPINGLIMIT = 11
#maximum number of traits for correlation
MAXCORR = 100
#Daily download limit from one IP
DAILYMAXIMUM = 1000
#maximum LRS value
MAXLRS = 460.0
#temporary data life span
MAXLIFE = 86400
#MINIMUM Database public value
PUBLICTHRESH = 0
#NBCI address
NCBI_LOCUSID = "http://www.ncbi.nlm.nih.gov/entrez/query.fcgi?db=gene&cmd=Retrieve&dopt=Graphics&list_uids=%s"
UCSC_REFSEQ = "http://genome.cse.ucsc.edu/cgi-bin/hgGene?db=%s&hgg_gene=%s&hgg_chrom=chr%s&hgg_start=%s&hgg_end=%s"
GENBANK_ID = "http://www.ncbi.nlm.nih.gov/entrez/query.fcgi?db=Nucleotide&cmd=search&doptcmdl=DocSum&term=%s"
OMIM_ID = "http://www.ncbi.nlm.nih.gov/omim/%s"
UNIGEN_ID = "http://www.ncbi.nlm.nih.gov/UniGene/clust.cgi?ORG=%s&CID=%s";
HOMOLOGENE_ID = "http://www.ncbi.nlm.nih.gov/sites/entrez?Db=homologene&Cmd=DetailsSearch&Term=%s"
PUBMEDLINK_URL = "http://www.ncbi.nlm.nih.gov/entrez/query.fcgi?cmd=Retrieve&db=PubMed&list_uids=%s&dopt=Abstract"
UCSC_POS = "http://genome.ucsc.edu/cgi-bin/hgTracks?clade=mammal&org=%s&db=%s&position=chr%s:%s-%s&pix=800&Submit=submit"
UCSC_BLAT = 'http://genome.ucsc.edu/cgi-bin/hgBlat?org=%s&db=%s&type=0&sort=0&output=0&userSeq=%s'
UTHSC_BLAT = 'http://ucscbrowser.genenetwork.org/cgi-bin/hgBlat?org=%s&db=%s&type=0&sort=0&output=0&userSeq=%s'
UCSC_GENOME = "http://genome.ucsc.edu/cgi-bin/hgTracks?db=%s&position=chr%s:%d-%d&hgt.customText=http://web2qtl.utmem.edu:88/snp/chr%s"
ENSEMBLE_BLAT = 'http://www.ensembl.org/Mus_musculus/featureview?type=AffyProbe&id=%s'
DBSNP = 'http://www.ncbi.nlm.nih.gov/SNP/snp_ref.cgi?type=rs&rs=%s'
UCSC_RUDI_TRACK_URL = " http://genome.cse.ucsc.edu/cgi-bin/hgTracks?org=%s&db=%s&hgt.customText=http://gbic.biol.rug.nl/~ralberts/tracks/%s/%s"
GENOMEBROWSER_URL="http://ucscbrowser.genenetwork.org/cgi-bin/hgTracks?clade=mammal&org=Mouse&db=mm9&position=%s&hgt.suggest=&pix=800&Submit=submit"
ENSEMBLETRANSCRIPT_URL="http://useast.ensembl.org/Mus_musculus/Lucene/Details?species=Mus_musculus;idx=Transcript;end=1;q=%s"
GNROOT = "/home/zas1024/gene/" # Will remove this and dependent items later
SECUREDIR = GNROOT + 'secure/'
COMMON_LIB = GNROOT + 'support/admin'
HTMLPATH = GNROOT + 'genotype_files/'
PYLMM_PATH = '/home/zas1024/plink_gemma/'
SNP_PATH = '/home/zas1024/snps/'
IMGDIR = GNROOT + '/wqflask/wqflask/images/'
IMAGESPATH = HTMLPATH + 'images/'
UPLOADPATH = IMAGESPATH + 'upload/'
TMPDIR = '/home/zas1024/tmp/' # Will remove this and dependent items later
GENODIR = HTMLPATH + 'genotypes/'
NEWGENODIR = HTMLPATH + 'new_genotypes/'
GENO_ARCHIVE_DIR = GENODIR + 'archive/'
TEXTDIR = HTMLPATH + 'ProbeSetFreeze_DataMatrix/'
CMDLINEDIR = HTMLPATH + 'webqtl/cmdLine/'
ChangableHtmlPath = GNROOT + 'web/'
SITENAME = 'GN'
PORTADDR = "http://50.16.251.170"
BASEHREF = '<base href="http://50.16.251.170/">'
INFOPAGEHREF = '/dbdoc/%s.html'
GLOSSARYFILE = "/glossary.html"
CGIDIR = '/webqtl/' #XZ: The variable name 'CGIDIR' should be changed to 'PYTHONDIR'
SCRIPTFILE = 'main.py'
REFRESHSTR = '<meta http-equiv="refresh" content="5;url=%s' + SCRIPTFILE +'?sid=%s">'
REFRESHDIR = '%s' + SCRIPTFILE +'?sid=%s'
| agpl-3.0 | -4,796,549,076,374,986,000 | 44.064935 | 148 | 0.711816 | false |
MisanthropicBit/pygments-sisal | setup.py | 1 | 1494 | """pygments-sisal module setup script for distribution."""
from __future__ import with_statement
import os
import setuptools
def get_version(filename):
with open(filename) as fh:
for line in fh:
if line.startswith('__version__'):
return line.split('=')[-1].strip()[1:-1]
setuptools.setup(
name='pygments-sisal',
version=get_version(os.path.join('pygments_sisal', '__init__.py')),
author='Alexander Asp Bock',
author_email='[email protected]',
platforms='All',
description=('A pygments lexer for SISAL'),
install_requires=['Pygments>=2.0'],
license='MIT',
keywords='pygments, lexer, sisal',
url='https://github.com/MisanthropicBit/pygments-sisal',
packages=setuptools.find_packages(),
long_description=open('README.md').read(),
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Topic :: Utilities',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5'
],
# Pygments entry point
entry_points="[pygments.lexers]\n"
"sisal=pygments_sisal:SisalLexer"
)
| mit | 8,996,431,354,112,685,000 | 32.2 | 71 | 0.613119 | false |
lmazuel/azure-sdk-for-python | azure-mgmt-compute/azure/mgmt/compute/v2015_06_15/models/virtual_machine_scale_set_instance_view_py3.py | 1 | 1931 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class VirtualMachineScaleSetInstanceView(Model):
"""The instance view of a virtual machine scale set.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar virtual_machine: The instance view status summary for the virtual
machine scale set.
:vartype virtual_machine:
~azure.mgmt.compute.v2015_06_15.models.VirtualMachineScaleSetInstanceViewStatusesSummary
:ivar extensions: The extensions information.
:vartype extensions:
list[~azure.mgmt.compute.v2015_06_15.models.VirtualMachineScaleSetVMExtensionsSummary]
:param statuses: The resource status information.
:type statuses:
list[~azure.mgmt.compute.v2015_06_15.models.InstanceViewStatus]
"""
_validation = {
'virtual_machine': {'readonly': True},
'extensions': {'readonly': True},
}
_attribute_map = {
'virtual_machine': {'key': 'virtualMachine', 'type': 'VirtualMachineScaleSetInstanceViewStatusesSummary'},
'extensions': {'key': 'extensions', 'type': '[VirtualMachineScaleSetVMExtensionsSummary]'},
'statuses': {'key': 'statuses', 'type': '[InstanceViewStatus]'},
}
def __init__(self, *, statuses=None, **kwargs) -> None:
super(VirtualMachineScaleSetInstanceView, self).__init__(**kwargs)
self.virtual_machine = None
self.extensions = None
self.statuses = statuses
| mit | -237,297,689,070,112,480 | 39.229167 | 114 | 0.65044 | false |
pallets/jinja | tests/test_security.py | 1 | 6176 | import pytest
from markupsafe import escape
from jinja2 import Environment
from jinja2.exceptions import SecurityError
from jinja2.exceptions import TemplateRuntimeError
from jinja2.exceptions import TemplateSyntaxError
from jinja2.nodes import EvalContext
from jinja2.sandbox import ImmutableSandboxedEnvironment
from jinja2.sandbox import SandboxedEnvironment
from jinja2.sandbox import unsafe
class PrivateStuff:
def bar(self):
return 23
@unsafe
def foo(self):
return 42
def __repr__(self):
return "PrivateStuff"
class PublicStuff:
def bar(self):
return 23
def _foo(self):
return 42
def __repr__(self):
return "PublicStuff"
class TestSandbox:
def test_unsafe(self, env):
env = SandboxedEnvironment()
pytest.raises(
SecurityError, env.from_string("{{ foo.foo() }}").render, foo=PrivateStuff()
)
assert env.from_string("{{ foo.bar() }}").render(foo=PrivateStuff()) == "23"
pytest.raises(
SecurityError, env.from_string("{{ foo._foo() }}").render, foo=PublicStuff()
)
assert env.from_string("{{ foo.bar() }}").render(foo=PublicStuff()) == "23"
assert env.from_string("{{ foo.__class__ }}").render(foo=42) == ""
assert env.from_string("{{ foo.func_code }}").render(foo=lambda: None) == ""
# security error comes from __class__ already.
pytest.raises(
SecurityError,
env.from_string("{{ foo.__class__.__subclasses__() }}").render,
foo=42,
)
def test_immutable_environment(self, env):
env = ImmutableSandboxedEnvironment()
pytest.raises(SecurityError, env.from_string("{{ [].append(23) }}").render)
pytest.raises(SecurityError, env.from_string("{{ {1:2}.clear() }}").render)
def test_restricted(self, env):
env = SandboxedEnvironment()
pytest.raises(
TemplateSyntaxError,
env.from_string,
"{% for item.attribute in seq %}...{% endfor %}",
)
pytest.raises(
TemplateSyntaxError,
env.from_string,
"{% for foo, bar.baz in seq %}...{% endfor %}",
)
def test_template_data(self, env):
env = Environment(autoescape=True)
t = env.from_string(
"{% macro say_hello(name) %}"
"<p>Hello {{ name }}!</p>{% endmacro %}"
'{{ say_hello("<blink>foo</blink>") }}'
)
escaped_out = "<p>Hello <blink>foo</blink>!</p>"
assert t.render() == escaped_out
assert str(t.module) == escaped_out
assert escape(t.module) == escaped_out
assert t.module.say_hello("<blink>foo</blink>") == escaped_out
assert (
escape(t.module.say_hello(EvalContext(env), "<blink>foo</blink>"))
== escaped_out
)
assert escape(t.module.say_hello("<blink>foo</blink>")) == escaped_out
def test_attr_filter(self, env):
env = SandboxedEnvironment()
tmpl = env.from_string('{{ cls|attr("__subclasses__")() }}')
pytest.raises(SecurityError, tmpl.render, cls=int)
def test_binary_operator_intercepting(self, env):
def disable_op(left, right):
raise TemplateRuntimeError("that operator so does not work")
for expr, ctx, rv in ("1 + 2", {}, "3"), ("a + 2", {"a": 2}, "4"):
env = SandboxedEnvironment()
env.binop_table["+"] = disable_op
t = env.from_string(f"{{{{ {expr} }}}}")
assert t.render(ctx) == rv
env.intercepted_binops = frozenset(["+"])
t = env.from_string(f"{{{{ {expr} }}}}")
with pytest.raises(TemplateRuntimeError):
t.render(ctx)
def test_unary_operator_intercepting(self, env):
def disable_op(arg):
raise TemplateRuntimeError("that operator so does not work")
for expr, ctx, rv in ("-1", {}, "-1"), ("-a", {"a": 2}, "-2"):
env = SandboxedEnvironment()
env.unop_table["-"] = disable_op
t = env.from_string(f"{{{{ {expr} }}}}")
assert t.render(ctx) == rv
env.intercepted_unops = frozenset(["-"])
t = env.from_string(f"{{{{ {expr} }}}}")
with pytest.raises(TemplateRuntimeError):
t.render(ctx)
class TestStringFormat:
def test_basic_format_safety(self):
env = SandboxedEnvironment()
t = env.from_string('{{ "a{0.__class__}b".format(42) }}')
assert t.render() == "ab"
def test_basic_format_all_okay(self):
env = SandboxedEnvironment()
t = env.from_string('{{ "a{0.foo}b".format({"foo": 42}) }}')
assert t.render() == "a42b"
def test_safe_format_safety(self):
env = SandboxedEnvironment()
t = env.from_string('{{ ("a{0.__class__}b{1}"|safe).format(42, "<foo>") }}')
assert t.render() == "ab<foo>"
def test_safe_format_all_okay(self):
env = SandboxedEnvironment()
t = env.from_string('{{ ("a{0.foo}b{1}"|safe).format({"foo": 42}, "<foo>") }}')
assert t.render() == "a42b<foo>"
def test_empty_braces_format(self):
env = SandboxedEnvironment()
t1 = env.from_string('{{ ("a{}b{}").format("foo", "42")}}')
t2 = env.from_string('{{ ("a{}b{}"|safe).format(42, "<foo>") }}')
assert t1.render() == "afoob42"
assert t2.render() == "a42b<foo>"
class TestStringFormatMap:
def test_basic_format_safety(self):
env = SandboxedEnvironment()
t = env.from_string('{{ "a{x.__class__}b".format_map({"x":42}) }}')
assert t.render() == "ab"
def test_basic_format_all_okay(self):
env = SandboxedEnvironment()
t = env.from_string('{{ "a{x.foo}b".format_map({"x":{"foo": 42}}) }}')
assert t.render() == "a42b"
def test_safe_format_all_okay(self):
env = SandboxedEnvironment()
t = env.from_string(
'{{ ("a{x.foo}b{y}"|safe).format_map({"x":{"foo": 42}, "y":"<foo>"}) }}'
)
assert t.render() == "a42b<foo>"
| bsd-3-clause | -7,340,597,471,083,471,000 | 34.699422 | 88 | 0.553271 | false |
xbmcmegapack/plugin.video.megapack.dev | resources/lib/menus/home_countries_cambodia.py | 1 | 1113 | #!/usr/bin/python
# -*- coding: utf-8 -*-
"""
This file is part of XBMC Mega Pack Addon.
Copyright (C) 2014 Wolverine ([email protected])
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License along
with this program. If not, see http://www.gnu.org/licenses/gpl-3.0.html
"""
class Countries_Cambodia():
'''Class that manages this specific menu context.'''
def open(self, plugin, menu):
menu.add_xplugins(plugin.get_xplugins(dictionaries=["Channels",
"Events", "Live", "Movies", "Sports", "TVShows"],
countries=["Cambodia"])) | gpl-3.0 | -2,878,269,767,698,330,000 | 37.344828 | 76 | 0.69577 | false |
Subsets and Splits