repo_name
stringlengths 5
92
| path
stringlengths 4
232
| copies
stringclasses 19
values | size
stringlengths 4
7
| content
stringlengths 721
1.04M
| license
stringclasses 15
values | hash
int64 -9,223,277,421,539,062,000
9,223,102,107B
| line_mean
float64 6.51
99.9
| line_max
int64 15
997
| alpha_frac
float64 0.25
0.97
| autogenerated
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|
msmbuilder/mdentropy | mdentropy/cli/main.py | 1 | 1372 | from __future__ import print_function, absolute_import, division
import sys
import argparse
from .. import __version__
from . import dmutinf
from . import dtent
def main():
help = ('MDEntropy is a python library that allows users to perform '
'information-theoretic analyses on molecular dynamics (MD) '
'trajectories.')
p = argparse.ArgumentParser(description=help)
p.add_argument(
'-V', '--version',
action='version',
version='mdentropy %s' % __version__,
)
sub_parsers = p.add_subparsers(
metavar='command',
dest='cmd',
)
dmutinf.configure_parser(sub_parsers)
dtent.configure_parser(sub_parsers)
if len(sys.argv) == 1:
sys.argv.append('-h')
args = p.parse_args()
args_func(args, p)
def args_func(args, p):
try:
args.func(args, p)
except RuntimeError as e:
sys.exit("Error: %s" % e)
except Exception as e:
if e.__class__.__name__ not in ('ScannerError', 'ParserError'):
message = """\
An unexpected error has occurred with mdentropy (version %s), please
consider sending the following traceback to the mdentropy GitHub issue tracker at:
https://github.com/msmbuilder/mdentropy/issues
"""
print(message % __version__, file=sys.stderr)
raise # as if we did not catch it
| mit | 5,335,409,560,070,120,000 | 27 | 82 | 0.620991 | false |
thorkd1t/lurkbot | query.py | 1 | 1330 | import sqlite3
import time
import joinlist
# the tables look like this:
# db name 'imaqtpie.db' (each channel in joinlist has its own .db)
# Table name chat (each db has one table named 'chat')
#___________________________________________________________________
# usr | mesg | id | flags | channel | date/time |
#===================================================================
# bob | hi | 1 | @badges= | imaqtpie |2017-05-01 12:00:00 |
#-------------------------------------------------------------------
# jim | Kappa | 2 | @badges= | imaqtpie |2017-05-01 12:00:01 |
#-------------------------------------------------------------------
target = "imaqtpie" #channel name
conn = sqlite3.connect(target + '.db')
c = conn.cursor()
#====================================================================
#example queries:
#------------------
#c.execute('select usr from chat')
#c.execute('select * from chat')
#c.execute('select * from chat where usr == ""')
#c.execute('select mesg from chat where usr == "moobot" order by id desc limit 5')
c.execute('select * from chat order by id desc limit 50')
bla = c.fetchall()
if bla != None:
for i in bla:
print i[0] + ": " + i[1]
else:
print "nothing to print"
conn.close()
time.sleep(100)
| mit | -8,376,591,812,289,806,000 | 32.102564 | 82 | 0.437594 | false |
NicoVarg99/daf-recipes | ckan/ckan/ckan/ckan/tests/helpers.py | 1 | 16313 | # encoding: utf-8
'''This is a collection of helper functions for use in tests.
We want to avoid sharing test helper functions between test modules as
much as possible, and we definitely don't want to share test fixtures between
test modules, or to introduce a complex hierarchy of test class subclasses,
etc.
We want to reduce the amount of "travel" that a reader needs to undertake to
understand a test method -- reducing the number of other files they need to go
and read to understand what the test code does. And we want to avoid tightly
coupling test modules to each other by having them share code.
But some test helper functions just increase the readability of tests so much
and make writing tests so much easier, that it's worth having them despite the
potential drawbacks.
This module is reserved for these very useful functions.
'''
import webtest
import nose.tools
from nose.tools import assert_in, assert_not_in
import mock
from ckan.common import config
import ckan.lib.search as search
import ckan.config.middleware
import ckan.model as model
import ckan.logic as logic
def reset_db():
'''Reset CKAN's database.
If a test class uses the database, then it should call this function in its
``setup()`` method to make sure that it has a clean database to start with
(nothing left over from other test classes or from previous test runs).
If a test class doesn't use the database (and most test classes shouldn't
need to) then it doesn't need to call this function.
:returns: ``None``
'''
# Close any database connections that have been left open.
# This prevents CKAN from hanging waiting for some unclosed connection.
model.Session.close_all()
model.repo.rebuild_db()
def call_action(action_name, context=None, **kwargs):
'''Call the named ``ckan.logic.action`` function and return the result.
This is just a nicer way for user code to call action functions, nicer than
either calling the action function directly or via
:py:func:`ckan.logic.get_action`.
For example::
user_dict = call_action('user_create', name='seanh',
email='[email protected]', password='pass')
Any keyword arguments given will be wrapped in a dict and passed to the
action function as its ``data_dict`` argument.
Note: this skips authorization! It passes 'ignore_auth': True to action
functions in their ``context`` dicts, so the corresponding authorization
functions will not be run.
This is because ckan.tests.logic.action tests only the actions, the
authorization functions are tested separately in
ckan.tests.logic.auth.
See the :doc:`testing guidelines </contributing/testing>` for more info.
This function should eventually be moved to
:py:func:`ckan.logic.call_action` and the current
:py:func:`ckan.logic.get_action` function should be
deprecated. The tests may still need their own wrapper function for
:py:func:`ckan.logic.call_action`, e.g. to insert ``'ignore_auth': True``
into the ``context`` dict.
:param action_name: the name of the action function to call, e.g.
``'user_update'``
:type action_name: string
:param context: the context dict to pass to the action function
(optional, if no context is given a default one will be supplied)
:type context: dict
:returns: the dict or other value that the action function returns
'''
if context is None:
context = {}
context.setdefault('user', '127.0.0.1')
context.setdefault('ignore_auth', True)
return logic.get_action(action_name)(context=context, data_dict=kwargs)
def call_auth(auth_name, context, **kwargs):
'''Call the named ``ckan.logic.auth`` function and return the result.
This is just a convenience function for tests in
:py:mod:`ckan.tests.logic.auth` to use.
Usage::
result = helpers.call_auth('user_update', context=context,
id='some_user_id',
name='updated_user_name')
:param auth_name: the name of the auth function to call, e.g.
``'user_update'``
:type auth_name: string
:param context: the context dict to pass to the auth function, must
contain ``'user'`` and ``'model'`` keys,
e.g. ``{'user': 'fred', 'model': my_mock_model_object}``
:type context: dict
:returns: the dict that the auth function returns, e.g.
``{'success': True}`` or ``{'success': False, msg: '...'}``
or just ``{'success': False}``
:rtype: dict
'''
assert 'user' in context, ('Test methods must put a user name in the '
'context dict')
assert 'model' in context, ('Test methods must put a model in the '
'context dict')
return logic.check_access(auth_name, context, data_dict=kwargs)
class CKANTestApp(webtest.TestApp):
'''A wrapper around webtest.TestApp
It adds some convenience methods for CKAN
'''
_flask_app = None
@property
def flask_app(self):
if not self._flask_app:
self._flask_app = self.app.apps['flask_app']._wsgi_app
return self._flask_app
def _get_test_app():
'''Return a webtest.TestApp for CKAN, with legacy templates disabled.
For functional tests that need to request CKAN pages or post to the API.
Unit tests shouldn't need this.
'''
config['ckan.legacy_templates'] = False
app = ckan.config.middleware.make_app(config['global_conf'], **config)
app = CKANTestApp(app)
return app
class FunctionalTestBase(object):
'''A base class for functional test classes to inherit from.
Allows configuration changes by overriding _apply_config_changes and
resetting the CKAN config after your test class has run. It creates a
webtest.TestApp at self.app for your class to use to make HTTP requests
to the CKAN web UI or API.
If you're overriding methods that this class provides, like setup_class()
and teardown_class(), make sure to use super() to call this class's methods
at the top of yours!
'''
@classmethod
def _get_test_app(cls): # leading _ because nose is terrible
# FIXME: remove this method and switch to using helpers.get_test_app
# in each test once the old functional tests are fixed or removed
if not hasattr(cls, '_test_app'):
cls._test_app = _get_test_app()
return cls._test_app
@classmethod
def setup_class(cls):
# Make a copy of the Pylons config, so we can restore it in teardown.
cls._original_config = dict(config)
cls._apply_config_changes(config)
cls._get_test_app()
@classmethod
def _apply_config_changes(cls, cfg):
pass
def setup(self):
'''Reset the database and clear the search indexes.'''
reset_db()
if hasattr(self, '_test_app'):
self._test_app.reset()
search.clear_all()
@classmethod
def teardown_class(cls):
# Restore the Pylons config to its original values, in case any tests
# changed any config settings.
config.clear()
config.update(cls._original_config)
def submit_and_follow(app, form, extra_environ=None, name=None,
value=None, **args):
'''
Call webtest_submit with name/value passed expecting a redirect
and return the response from following that redirect.
'''
response = webtest_submit(form, name, value=value, status=302,
extra_environ=extra_environ, **args)
return app.get(url=response.headers['Location'],
extra_environ=extra_environ)
## FIXME: remove webtest_* functions below when we upgrade webtest
def webtest_submit(form, name=None, index=None, value=None, **args):
'''
backported version of webtest.Form.submit that actually works
for submitting with different submit buttons.
We're stuck on an old version of webtest because we're stuck
on an old version of webob because we're stuck on an old version
of Pylons. This prolongs our suffering, but on the bright side
it lets us have functional tests that work.
'''
fields = webtest_submit_fields(form, name, index=index, submit_value=value)
if form.method.upper() != "GET":
args.setdefault("content_type", form.enctype)
return form.response.goto(form.action, method=form.method,
params=fields, **args)
def webtest_submit_fields(form, name=None, index=None, submit_value=None):
'''
backported version of webtest.Form.submit_fields that actually works
for submitting with different submit buttons.
'''
from webtest.app import File
submit = []
# Use another name here so we can keep function param the same for BWC.
submit_name = name
if index is not None and submit_value is not None:
raise ValueError("Can't specify both submit_value and index.")
# If no particular button was selected, use the first one
if index is None and submit_value is None:
index = 0
# This counts all fields with the submit name not just submit fields.
current_index = 0
for name, field in form.field_order:
if name is None: # pragma: no cover
continue
if submit_name is not None and name == submit_name:
if index is not None and current_index == index:
submit.append((name, field.value_if_submitted()))
if submit_value is not None and \
field.value_if_submitted() == submit_value:
submit.append((name, field.value_if_submitted()))
current_index += 1
else:
value = field.value
if value is None:
continue
if isinstance(field, File):
submit.append((name, field))
continue
if isinstance(value, list):
for item in value:
submit.append((name, item))
else:
submit.append((name, value))
return submit
def webtest_maybe_follow(response, **kw):
"""
Follow all redirects. If this response is not a redirect, do nothing.
Returns another response object.
(backported from WebTest 2.0.1)
"""
remaining_redirects = 100 # infinite loops protection
while 300 <= response.status_int < 400 and remaining_redirects:
response = response.follow(**kw)
remaining_redirects -= 1
assert remaining_redirects > 0, "redirects chain looks infinite"
return response
def change_config(key, value):
'''Decorator to temporarily change CKAN's config to a new value
This allows you to easily create tests that need specific config values to
be set, making sure it'll be reverted to what it was originally, after your
test is run.
Usage::
@helpers.change_config('ckan.site_title', 'My Test CKAN')
def test_ckan_site_title(self):
assert config['ckan.site_title'] == 'My Test CKAN'
:param key: the config key to be changed, e.g. ``'ckan.site_title'``
:type key: string
:param value: the new config key's value, e.g. ``'My Test CKAN'``
:type value: string
'''
def decorator(func):
def wrapper(*args, **kwargs):
_original_config = config.copy()
config[key] = value
try:
return_value = func(*args, **kwargs)
finally:
config.clear()
config.update(_original_config)
return return_value
return nose.tools.make_decorator(func)(wrapper)
return decorator
def mock_auth(auth_function_path):
'''
Decorator to easily mock a CKAN auth method in the context of a test
function
It adds a mock object for the provided auth_function_path as a parameter to
the test function.
Essentially it makes sure that `ckan.authz.clear_auth_functions_cache` is
called before and after to make sure that the auth functions pick up
the newly changed values.
Usage::
@helpers.mock_auth('ckan.logic.auth.create.package_create')
def test_mock_package_create(self, mock_package_create):
from ckan import logic
mock_package_create.return_value = {'success': True}
# package_create is mocked
eq_(logic.check_access('package_create', {}), True)
assert mock_package_create.called
:param action_name: the full path to the auth function to be mocked,
e.g. ``ckan.logic.auth.create.package_create``
:type action_name: string
'''
from ckan.authz import clear_auth_functions_cache
def decorator(func):
def wrapper(*args, **kwargs):
try:
with mock.patch(auth_function_path) as mocked_auth:
clear_auth_functions_cache()
new_args = args + tuple([mocked_auth])
return_value = func(*new_args, **kwargs)
finally:
clear_auth_functions_cache()
return return_value
return nose.tools.make_decorator(func)(wrapper)
return decorator
def mock_action(action_name):
'''
Decorator to easily mock a CKAN action in the context of a test function
It adds a mock object for the provided action as a parameter to the test
function. The mock is discarded at the end of the function, even if there
is an exception raised.
Note that this mocks the action both when it's called directly via
``ckan.logic.get_action`` and via ``ckan.plugins.toolkit.get_action``.
Usage::
@mock_action('user_list')
def test_mock_user_list(self, mock_user_list):
mock_user_list.return_value = 'hi'
# user_list is mocked
eq_(helpers.call_action('user_list', {}), 'hi')
assert mock_user_list.called
:param action_name: the name of the action to be mocked,
e.g. ``package_create``
:type action_name: string
'''
def decorator(func):
def wrapper(*args, **kwargs):
mock_action = mock.MagicMock()
from ckan.logic import get_action as original_get_action
def side_effect(called_action_name):
if called_action_name == action_name:
return mock_action
else:
return original_get_action(called_action_name)
try:
with mock.patch('ckan.logic.get_action') as mock_get_action, \
mock.patch('ckan.plugins.toolkit.get_action') \
as mock_get_action_toolkit:
mock_get_action.side_effect = side_effect
mock_get_action_toolkit.side_effect = side_effect
new_args = args + tuple([mock_action])
return_value = func(*new_args, **kwargs)
finally:
# Make sure to stop the mock, even with an exception
mock_action.stop()
return return_value
return nose.tools.make_decorator(func)(wrapper)
return decorator
def set_extra_environ(key, value):
'''Decorator to temporarily changes a single request environemnt value
Create a new test app and use the a side effect of making a request
to set an extra_environ value. Reset the value to '' after the test.
Usage::
@helpers.extra_environ('SCRIPT_NAME', '/myscript')
def test_ckan_thing_affected_by_script_name(self):
# ...
:param key: the extra_environ key to be changed, e.g. ``'SCRIPT_NAME'``
:type key: string
:param value: the new extra_environ key's value, e.g. ``'/myscript'``
:type value: string
'''
def decorator(func):
def wrapper(*args, **kwargs):
app = _get_test_app()
app.get('/', extra_environ={key: value})
try:
return_value = func(*args, **kwargs)
finally:
app.get('/', extra_environ={key: ''})
return return_value
return nose.tools.make_decorator(func)(wrapper)
return decorator
| gpl-3.0 | -2,641,112,794,684,869,000 | 33.708511 | 79 | 0.634647 | false |
umeboshi2/vignewton | vignewton/resources.py | 1 | 1855 | from pyramid.security import Allow, Everyone, Authenticated
from fanstatic import Library, Resource
from js.lightbox import lightbox
from haberdashery.resources import jqueryui, fc_css, deform_css
#from trumpet.resources import jqueryui
from trumpet.resources import StaticResources as TrumpetResources
library = Library('vignewton_lib', 'static')
css = Library('vignewton_css', 'static/css')
js = Library('vignewton_js', 'static/js')
favicon = Resource(library, 'favicon.ico')
main_screen = Resource(css, 'mainscreen.css', depends=[deform_css])
admin_screen = Resource(css, 'adminscreen.css', depends=[deform_css])
post_to_url = Resource(js, 'post2url.js', depends=[jqueryui])
main_calendar_view = Resource(js, 'main-calendar-view.js', depends=[fc_css])
main_betgames_view = Resource(js, 'main-betgames-view.js', depends=[jqueryui])
main_betgames_confirm_bet = Resource(js, 'main-betgames-confirm-bet.js',
depends=[jqueryui])
from vignewton.security import authn_policy
class StaticResources(TrumpetResources):
main_screen = main_screen
admin_screen = admin_screen
# override trumpet favicon
favicon = favicon
main_calendar_view = main_calendar_view
main_betgames_view = main_betgames_view
main_betgames_confirm_bet = main_betgames_confirm_bet
post_to_url = post_to_url
lightbox = lightbox
# the acl entries are allow/deny, group, permission
class RootGroupFactory(object):
__name__ = ""
__acl__ = [
(Allow, Everyone, 'public'),
(Allow, Authenticated, 'user'),
(Allow, 'manager', 'manage'),
(Allow, 'editor', ('wiki_add', 'wiki_edit')),
(Allow, 'admin', ('admin', 'manage')),
]
authn_policy = authn_policy
def __init__(self, request):
# comment
pass
| unlicense | 3,318,480,387,530,409,500 | 27.538462 | 78 | 0.667925 | false |
mikeboers/PyAV | tests/test_codec_context.py | 1 | 10799 | from fractions import Fraction
from unittest import SkipTest
import os
from av import AudioResampler, Codec, Packet
from av.codec.codec import UnknownCodecError
import av
from .common import TestCase, fate_suite
def iter_frames(container, stream):
for packet in container.demux(stream):
for frame in packet.decode():
yield frame
def iter_raw_frames(path, packet_sizes, ctx):
with open(path, 'rb') as f:
for i, size in enumerate(packet_sizes):
packet = Packet(size)
read_size = f.readinto(packet)
assert size
assert read_size == size
if not read_size:
break
for frame in ctx.decode(packet):
yield frame
while True:
try:
frames = ctx.decode(None)
except EOFError:
break
for frame in frames:
yield frame
if not frames:
break
class TestCodecContext(TestCase):
def test_skip_frame_default(self):
ctx = Codec('png', 'w').create()
self.assertEqual(ctx.skip_frame.name, 'DEFAULT')
def test_parse(self):
# This one parses into a single packet.
self._assert_parse('mpeg4', fate_suite('h264/interlaced_crop.mp4'))
# This one parses into many small packets.
self._assert_parse('mpeg2video', fate_suite('mpeg2/mpeg2_field_encoding.ts'))
def _assert_parse(self, codec_name, path):
fh = av.open(path)
packets = []
for packet in fh.demux(video=0):
packets.append(packet)
full_source = b''.join(p.to_bytes() for p in packets)
for size in 1024, 8192, 65535:
ctx = Codec(codec_name).create()
packets = []
for i in range(0, len(full_source), size):
block = full_source[i:i + size]
packets.extend(ctx.parse(block))
packets.extend(ctx.parse())
parsed_source = b''.join(p.to_bytes() for p in packets)
self.assertEqual(len(parsed_source), len(full_source))
self.assertEqual(full_source, parsed_source)
class TestEncoding(TestCase):
def test_encoding_png(self):
self.image_sequence_encode('png')
def test_encoding_mjpeg(self):
self.image_sequence_encode('mjpeg')
def test_encoding_tiff(self):
self.image_sequence_encode('tiff')
def image_sequence_encode(self, codec_name):
try:
codec = Codec(codec_name, 'w')
except UnknownCodecError:
raise SkipTest()
container = av.open(fate_suite('h264/interlaced_crop.mp4'))
video_stream = container.streams.video[0]
width = 640
height = 480
ctx = codec.create()
pix_fmt = ctx.codec.video_formats[0].name
ctx.width = width
ctx.height = height
ctx.time_base = video_stream.codec_context.time_base
ctx.pix_fmt = pix_fmt
ctx.open()
frame_count = 1
path_list = []
for frame in iter_frames(container, video_stream):
new_frame = frame.reformat(width, height, pix_fmt)
new_packets = ctx.encode(new_frame)
self.assertEqual(len(new_packets), 1)
new_packet = new_packets[0]
path = self.sandboxed('%s/encoder.%04d.%s' % (
codec_name,
frame_count,
codec_name if codec_name != 'mjpeg' else 'jpg',
))
path_list.append(path)
with open(path, 'wb') as f:
f.write(new_packet)
frame_count += 1
if frame_count > 5:
break
ctx = av.Codec(codec_name, 'r').create()
for path in path_list:
with open(path, 'rb') as f:
size = os.fstat(f.fileno()).st_size
packet = Packet(size)
size = f.readinto(packet)
frame = ctx.decode(packet)[0]
self.assertEqual(frame.width, width)
self.assertEqual(frame.height, height)
self.assertEqual(frame.format.name, pix_fmt)
def test_encoding_h264(self):
self.video_encoding('libx264', {'crf': '19'})
def test_encoding_mpeg4(self):
self.video_encoding('mpeg4')
def test_encoding_mpeg1video(self):
self.video_encoding('mpeg1video')
def test_encoding_dvvideo(self):
options = {'pix_fmt': 'yuv411p',
'width': 720,
'height': 480}
self.video_encoding('dvvideo', options)
def test_encoding_dnxhd(self):
options = {'b': '90M', # bitrate
'pix_fmt': 'yuv422p',
'width': 1920,
'height': 1080,
'time_base': '1001/30000',
'max_frames': 5}
self.video_encoding('dnxhd', options)
def video_encoding(self, codec_name, options={}):
try:
codec = Codec(codec_name, 'w')
except UnknownCodecError:
raise SkipTest()
container = av.open(fate_suite('h264/interlaced_crop.mp4'))
video_stream = container.streams.video[0]
pix_fmt = options.pop('pix_fmt', 'yuv420p')
width = options.pop('width', 640)
height = options.pop('height', 480)
max_frames = options.pop('max_frames', 50)
time_base = options.pop('time_base', video_stream.codec_context.time_base)
ctx = codec.create()
ctx.width = width
ctx.height = height
ctx.time_base = time_base
ctx.framerate = 1 / ctx.time_base
ctx.pix_fmt = pix_fmt
ctx.options = options # TODO
ctx.open()
path = self.sandboxed('encoder.%s' % codec_name)
packet_sizes = []
frame_count = 0
with open(path, 'wb') as f:
for frame in iter_frames(container, video_stream):
"""
bad_frame = frame.reformat(width, 100, pix_fmt)
with self.assertRaises(ValueError):
ctx.encode(bad_frame)
bad_frame = frame.reformat(100, height, pix_fmt)
with self.assertRaises(ValueError):
ctx.encode(bad_frame)
bad_frame = frame.reformat(width, height, "rgb24")
with self.assertRaises(ValueError):
ctx.encode(bad_frame)
"""
if frame:
frame_count += 1
new_frame = frame.reformat(width, height, pix_fmt) if frame else None
for packet in ctx.encode(new_frame):
packet_sizes.append(packet.size)
f.write(packet)
if frame_count >= max_frames:
break
for packet in ctx.encode(None):
packet_sizes.append(packet.size)
f.write(packet)
dec_codec_name = codec_name
if codec_name == 'libx264':
dec_codec_name = 'h264'
ctx = av.Codec(dec_codec_name, 'r').create()
ctx.open()
decoded_frame_count = 0
for frame in iter_raw_frames(path, packet_sizes, ctx):
decoded_frame_count += 1
self.assertEqual(frame.width, width)
self.assertEqual(frame.height, height)
self.assertEqual(frame.format.name, pix_fmt)
self.assertEqual(frame_count, decoded_frame_count)
def test_encoding_pcm_s24le(self):
self.audio_encoding('pcm_s24le')
def test_encoding_aac(self):
self.audio_encoding('aac')
def test_encoding_mp2(self):
self.audio_encoding('mp2')
def audio_encoding(self, codec_name):
try:
codec = Codec(codec_name, 'w')
except UnknownCodecError:
raise SkipTest()
ctx = codec.create()
if ctx.codec.experimental:
raise SkipTest()
sample_fmt = ctx.codec.audio_formats[-1].name
sample_rate = 48000
channel_layout = "stereo"
channels = 2
ctx.time_base = Fraction(1) / sample_rate
ctx.sample_rate = sample_rate
ctx.format = sample_fmt
ctx.layout = channel_layout
ctx.channels = channels
ctx.open()
resampler = AudioResampler(sample_fmt, channel_layout, sample_rate)
container = av.open(fate_suite('audio-reference/chorusnoise_2ch_44kHz_s16.wav'))
audio_stream = container.streams.audio[0]
path = self.sandboxed('encoder.%s' % codec_name)
samples = 0
packet_sizes = []
with open(path, 'wb') as f:
for frame in iter_frames(container, audio_stream):
# We need to let the encoder retime.
frame.pts = None
"""
bad_resampler = AudioResampler(sample_fmt, "mono", sample_rate)
bad_frame = bad_resampler.resample(frame)
with self.assertRaises(ValueError):
next(encoder.encode(bad_frame))
bad_resampler = AudioResampler(sample_fmt, channel_layout, 3000)
bad_frame = bad_resampler.resample(frame)
with self.assertRaises(ValueError):
next(encoder.encode(bad_frame))
bad_resampler = AudioResampler('u8', channel_layout, 3000)
bad_frame = bad_resampler.resample(frame)
with self.assertRaises(ValueError):
next(encoder.encode(bad_frame))
"""
resampled_frame = resampler.resample(frame)
samples += resampled_frame.samples
for packet in ctx.encode(resampled_frame):
# bytearray because python can
# freaks out if the first byte is NULL
f.write(bytearray(packet))
packet_sizes.append(packet.size)
for packet in ctx.encode(None):
packet_sizes.append(packet.size)
f.write(bytearray(packet))
ctx = Codec(codec_name, 'r').create()
ctx.time_base = Fraction(1) / sample_rate
ctx.sample_rate = sample_rate
ctx.format = sample_fmt
ctx.layout = channel_layout
ctx.channels = channels
ctx.open()
result_samples = 0
# should have more asserts but not sure what to check
# libav and ffmpeg give different results
# so can really use checksums
for frame in iter_raw_frames(path, packet_sizes, ctx):
result_samples += frame.samples
self.assertEqual(frame.rate, sample_rate)
self.assertEqual(len(frame.layout.channels), channels)
| bsd-3-clause | 7,327,797,646,894,519,000 | 30.301449 | 88 | 0.548292 | false |
cjmathy/ann_bmi203 | ann/rap1.py | 1 | 6499 | import numpy as np
import re
import random
def prepare_data():
"""This method prepares input positive and negative datasets as bitvectors for the Rap1 binding problem. Output: three lists of bitvectors, one containing positive samples, negative samples that are similar to positive samples, and negative examples that are randomly chosen from the fasta sequences. All bitvectors are 17 bp (34 bits) long"""
# read in all positive data, convert to bitvectors
pos_str = read_positives()
pos_vec = str_to_vec(pos_str)
# read in all negative data. then, remove false negatives from the negative fa sequences and their reverse complements. Call this new set of sequences and their reverse complements "neg_str".
neg_str = read_negatives()
neg_str = remove_falseneg(neg_str, pos_str)
rc_neg_str = reverse_complement(neg_str)
rc_neg_str = remove_falseneg(rc_neg_str, pos_str)
neg_str = reverse_complement(rc_neg_str)
neg_str = neg_str + rc_neg_str
# cache interesting cases as "neg_simiar". interesting cases are those that look similar to the positive sequences (in that they contain cysteines at positions 5, 6, and 10) but are considered negative. also cache randomly chosen sequences, so that the neural net can be trained on sequences that are not similar to positive examples.
neg_sim, neg_rand = cache_cases(neg_str)
neg_sim_vec = str_to_vec(neg_sim)
neg_rand_vec = str_to_vec(neg_rand)
return pos_vec, neg_sim_vec, neg_rand_vec
def read_positives():
"reads in positive samples as strings"
seqs = []
file = '/Users/cjmathy/Documents/courses/bmi203/Final-Project/ann_bmi203/rap1-lieb-positives.txt'
with open(file, 'rb') as f:
for seq in f:
seqs.append(seq.strip())
return seqs
def read_negatives():
"reads in negative samples as strings"
seqs = []
file = '/Users/cjmathy/Documents/courses/bmi203/Final-Project/ann_bmi203/yeast-upstream-1k-negative.fa'
with open(file, 'rb') as f:
sequence = ''
for line in f:
if line[0] is not '>':
sequence += line.strip()
else:
if sequence:
seqs.append(sequence)
sequence = ""
return seqs
def str_to_vec(sequences):
"""converts nucleotide strings into vectors using a 2-bit encoding scheme."""
vecs = []
nuc2bit = {"A": (0, 0),
"C": (0, 1),
"T": (1, 0),
"G": (1, 1)}
for seq in sequences:
vec = []
for nuc in seq:
vec.append(nuc2bit[nuc][0])
vec.append(nuc2bit[nuc][1])
vecs.append(vec)
return vecs
def remove_falseneg(negatives, positives):
"""this method removes any negative fasta sequences that contain one of the positive sample sequences (essentially making them false negatives."""
seqs = []
for n in negatives:
if not any(p in n for p in positives):
seqs.append(n)
return seqs
def reverse_complement(sequences):
"""returns a list of reverse complemented sequences"""
rc = []
complement = {'A': 'T',
'C': 'G',
'G': 'C',
'T': 'A'}
for seq in sequences:
seq = list(seq)
seq = reversed([complement.get(nuc) for nuc in seq])
seq = ''.join(seq)
rc.append(seq)
return rc
def cache_cases(sequences):
"""this method separates the negative data into two sets: those that contain the Rap1 binding signature sequence, and a set that is randomly chosen from the negative data."""
# 1) cache negative cases that are similar to positives
sim_cache = []
for seq in sequences:
matches = re.findall(r'....CC...C.......', seq)
for match in matches:
sim_cache.append(match)
sim_cache = list(set(sim_cache))
# 2) cache randomly chosen 17 bp negatives. 5 from each fa sequence (including reverse complements). there are about 30000 neg_sim samples, so this will create about 30000 neg_rand samples from the 3000 sequences and their 3000 reverse complements.
bp = 17
rand_cache = []
for seq in sequences:
for _ in xrange(5):
i = random.randint(0, len(seq)-bp)
substr = seq[i:i+bp]
rand_cache.append(substr)
return sim_cache, rand_cache
def build_training_set(pos, neg_sim, neg_rand):
"""Builds a training set using 50% positive data, and 50% negative data. Negative data consists equally of similar-to-positve and random negative sequences"""
# we have 137 positive examples, 30000 special negative examples, and 30000 random negative examples, all 34 bits long. take 69 special negative examples and 68 random negative examples. add them to the positive examples to make our training set.
neg = []
for _ in xrange(69):
i = np.random.randint(0, len(neg_sim))
neg.append(neg_sim[i])
for _ in xrange(68):
i = np.random.randint(0, len(neg_rand))
neg.append(neg_rand[i])
Xp = np.array(pos)
Xn = np.array(neg)
X = np.concatenate((Xp, Xn), axis=0) # nd array, 274 x 34
yp = np.ones((Xp.shape[0],))
yn = np.zeros((Xn.shape[0],))
y = np.concatenate((yp, yn), axis=0) # nd array, 34 x 1
return X, y
def build_training_set_100(pos, neg_sim, neg_rand):
"""same as above, but allowing for some positive and negative samples to be held out as a test set"""
neg = []
for _ in xrange(50):
i = np.random.randint(0, len(neg_sim))
neg.append(neg_sim[i])
for _ in xrange(50):
i = np.random.randint(0, len(neg_rand))
neg.append(neg_rand[i])
Xp = np.array(pos)
Xn = np.array(neg)
X = np.concatenate((Xp, Xn), axis=0)
yp = np.ones((Xp.shape[0],))
yn = np.zeros((Xn.shape[0],))
y = np.concatenate((yp, yn), axis=0)
return X, y
def build_test_set(pos, neg_sim, neg_rand):
"""same as above, but allowing for some positive and negative samples to be held out as a test set"""
neg = []
for _ in xrange(19):
i = np.random.randint(0, len(neg_sim))
neg.append(neg_sim[i])
for _ in xrange(18):
i = np.random.randint(0, len(neg_rand))
neg.append(neg_rand[i])
Xp = np.array(pos)
Xn = np.array(neg)
X = np.concatenate((Xp, Xn), axis=0)
yp = np.ones((Xp.shape[0],))
yn = np.zeros((Xn.shape[0],))
y = np.concatenate((yp, yn), axis=0)
return X, y
| apache-2.0 | 5,019,768,587,822,818,000 | 35.717514 | 347 | 0.624558 | false |
m87/pyEM | stepwise.py | 1 | 3019 | from thirdparty import log_mvnpdf, log_mvnpdf_diag
import numpy as np
from online import *
from scipy.misc import logsumexp
from gaussEM import GaussEM
class Stepwise(OnlineEM):
def __init__(self, param):
super().__init__(param)
self.param = float(param['alpha'])
self.skip = int(param['skip'])
self.mbsize= int(param['mb'])
def prepare(self, dataset):
super().prepare(dataset)
class StepwiseGauss(Stepwise, GaussEM):
def __init__(self, param):
super().__init__(param)
self.cov = param['cov']
self.C = float(param['smoothing'])
self.mvnpdf = {'full': log_mvnpdf, 'diag': log_mvnpdf_diag}
def e(self, X):
lg = self.mvnpdf[self.cov](np.array([X]), self.means, self.COV[self.cov])
#s = np.inner((X - self.means),(X-self.means))
#print(s)
#print(self.means[0])
logResps = lg[0] + np.log(self.weights)
self.histAcc += logsumexp(logResps)
self.hist.append(-self.histAcc/self.N)
#self.hist.append(logsumexp(logResps))
maxLg = np.max(logResps)
logResps -= maxLg
self.resps = np.exp(logResps)
np.clip(self.resps, 10*EPS, np.inf, out=self.resps)
self.resps /= np.sum(self.resps)
self.N += 1
lam = np.power(self.N+2, -float(self.param))
for c in np.arange(self.n):
self.accResps[c]= (1-lam) * self.accResps[c] + lam * self.resps[c]
self.accMeans[c]= (1-lam)* self.accMeans[c] + lam * X * self.resps[c]
tmp = self.accMeans[c] / self.accResps[c]
diff = X - tmp
self.accCovars[c] = (1-lam) * self.accCovars[c] + lam * np.outer(self.resps[c] * diff, diff)
self.accResps /= np.sum(self.accResps)
def m(self, X):
if self.N < self.skip: return
if self.N % self.mbsize != 0:
return
for c in np.arange(self.n):
self.weights[c] = self.accResps[c] / (self.N+ 10*EPS ) + EPS
self.means[c] = (self.accMeans[c] + 10* EPS)/ (self.accResps[c] + 10 * EPS )
self.covars[c] = (self.accCovars[c] + 10* EPS * np.identity(self.dim))/ (self.accResps[c] + 10 * EPS ) * self.I[self.cov]
self.diagCovars[c] = np.diag(self.covars[c])
self.weights /= sum(self.weights)
def prepare(self,dataset):
super().prepare(dataset)
self.accResps = np.zeros((self.n,))
self.accMeans = np.zeros((self.n,self.dim))
self.accCovars = np.zeros((self.n,self.dim,self.dim))
self.weights = np.ones((self.n,))
self.weights /= self.n
self.means = np.zeros((self.n,self.dim))
for it,x in enumerate(dataset.I):
self.means[it] = x
self.covars = np.array([np.identity(self.dim) for x in range(self.n)])
self.diagCovars = np.ones((self.n,self.dim))
self.COV = {'full' : self.covars, 'diag' : self.diagCovars}
self.I ={'full': 1.0, 'diag': np.identity(self.dim)}
| mit | 1,769,778,788,891,410,400 | 34.940476 | 133 | 0.56575 | false |
lama7/blogtool | doc/source/conf.py | 1 | 7957 | # -*- coding: utf-8 -*-
#
# blogtool documentation build configuration file, created by
# sphinx-quickstart on Thu Mar 21 11:36:34 2013.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys, os
import re
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = []
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'blogtool'
copyright = u'2013, Gerry LaMontagne'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
VERSIONFILE = "../../blogtool/__version__.py"
m = re.search("^__version__ = [\"](.*?)[\"]", open(VERSIONFILE, "r").read())
if m:
version = m.group(1)
else:
raise RuntimeError("Could not extract version string.\n")
# The full version, including alpha/beta/rc tags.
release = version
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = []
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'haiku'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'blogtooldoc'
# -- Options for LaTeX output --------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'blogtool.tex', u'blogtool Documentation',
u'Gerry LaMontagne', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'blogtool', u'blogtool Documentation',
[u'Gerry LaMontagne'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output ------------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'blogtool', u'blogtool Documentation',
u'Gerry LaMontagne', 'blogtool', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
| mit | 1,635,249,381,925,181,000 | 30.955823 | 80 | 0.702149 | false |
gunan/tensorflow | tensorflow/python/kernel_tests/rnn_test.py | 1 | 32669 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for rnn module."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import time
import timeit
import numpy as np
from six.moves import xrange # pylint: disable=redefined-builtin
from tensorflow.core.protobuf import config_pb2
from tensorflow.python.client import session
from tensorflow.python.eager import context
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops as ops_lib
from tensorflow.python.framework import tensor_shape
from tensorflow.python.framework import test_util
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import gradients_impl
from tensorflow.python.ops import init_ops
from tensorflow.python.ops import rnn
from tensorflow.python.ops import rnn_cell_impl
from tensorflow.python.ops import tensor_array_ops
from tensorflow.python.ops import variables as variables_lib
import tensorflow.python.ops.data_flow_grad # pylint: disable=unused-import
import tensorflow.python.ops.nn_grad # pylint: disable=unused-import
import tensorflow.python.ops.sparse_grad # pylint: disable=unused-import
import tensorflow.python.ops.tensor_array_grad # pylint: disable=unused-import
from tensorflow.python.platform import test
from tensorflow.python.training import saver
class Plus1RNNCell(rnn_cell_impl.RNNCell):
"""RNN Cell generating (output, new_state) = (input + 1, state + 1)."""
@property
def output_size(self):
return 5
@property
def state_size(self):
return 5
def call(self, input_, state, scope=None):
return (input_ + 1, state + 1)
class ScalarStateRNNCell(rnn_cell_impl.RNNCell):
"""RNN Cell generating (output, new_state) = (input + 1, state + 1)."""
@property
def output_size(self):
return 1
@property
def state_size(self):
return tensor_shape.TensorShape([])
def zero_state(self, batch_size, dtype):
return array_ops.zeros([], dtype=dtypes.int32)
def call(self, input_, state, scope=None):
return (input_, state + 1)
class UnbalancedOutputRNNCell(rnn_cell_impl.RNNCell):
"""RNN Cell generating (output, new_state) = (input + 1, state + 1)."""
@property
def output_size(self):
return tensor_shape.TensorShape(1), tensor_shape.TensorShape((2))
@property
def state_size(self):
return tensor_shape.TensorShape([])
def zero_state(self, batch_size, dtype):
return array_ops.zeros([], dtype=dtypes.int32)
def call(self, input_, state, scope=None):
concatenated = array_ops.concat((input_, input_), axis=-1)
return (input_, concatenated), state + 1
class TensorArrayStateRNNCell(rnn_cell_impl.RNNCell):
"""RNN Cell its state as a TensorArray."""
@property
def output_size(self):
return 1
@property
def state_size(self):
return (tensor_shape.TensorShape([]), ())
def zero_state(self, batch_size, dtype):
return (array_ops.zeros([], dtype=dtypes.int32),
tensor_array_ops.TensorArray(
dtype=dtype, size=0, dynamic_size=True))
def call(self, input_, state, scope=None):
new_array = state[1].write(state[0], input_)
return (input_, (state[0] + 1, new_array))
class RNNTest(test.TestCase):
def setUp(self):
self._seed = 23489
np.random.seed(self._seed)
@test_util.run_in_graph_and_eager_modes
def testInvalidSequenceLengthShape(self):
cell = Plus1RNNCell()
if context.executing_eagerly():
inputs = [constant_op.constant(np.ones((3, 4)))]
else:
inputs = [array_ops.placeholder(dtypes.float32, shape=(3, 4))]
with self.assertRaisesRegexp(ValueError, "must be a vector"):
rnn.dynamic_rnn(
cell,
array_ops.stack(inputs),
dtype=dtypes.float32,
sequence_length=[[4]])
@test_util.run_in_graph_and_eager_modes
def testInvalidDtype(self):
if context.executing_eagerly():
inputs = np.zeros((3, 4, 5), dtype=np.int32)
else:
inputs = array_ops.placeholder(dtypes.int32, shape=(3, 4, 5))
cells = [
rnn_cell_impl.BasicRNNCell,
rnn_cell_impl.GRUCell,
rnn_cell_impl.BasicLSTMCell,
rnn_cell_impl.LSTMCell,
]
for cell_cls in cells:
with self.cached_session():
with self.assertRaisesRegexp(
ValueError, "RNN cell only supports floating"):
cell = cell_cls(2, dtype=dtypes.int32)
rnn.dynamic_rnn(cell, inputs, dtype=dtypes.int32)
@test_util.run_in_graph_and_eager_modes
def testBatchSizeFromInput(self):
cell = Plus1RNNCell()
in_eager_mode = context.executing_eagerly()
# With static batch size
if in_eager_mode:
inputs = np.zeros((3, 4, 5), dtype=np.float32)
initial_state = np.zeros((3, 5), dtype=np.float32)
else:
inputs = array_ops.placeholder(dtypes.float32, shape=(3, 4, 5))
initial_state = array_ops.placeholder(dtypes.float32, shape=(3, 5))
# - Without initial_state
outputs, state = rnn.dynamic_rnn(cell, inputs, dtype=dtypes.float32)
self.assertEqual(3, outputs.shape[0])
self.assertEqual(3, state.shape[0])
# - With initial_state
outputs, state = rnn.dynamic_rnn(
cell, inputs, initial_state=initial_state)
self.assertEqual(3, outputs.shape[0])
self.assertEqual(3, state.shape[0])
# Without static batch size
# Tensor shapes are fully determined with eager execution enabled,
# so only run this test for graph construction.
if not in_eager_mode:
inputs = array_ops.placeholder(dtypes.float32, shape=(None, 4, 5))
# - Without initial_state
outputs, state = rnn.dynamic_rnn(cell, inputs, dtype=dtypes.float32)
self.assertEqual(None, outputs.shape.dims[0].value)
self.assertEqual(None, state.shape.dims[0].value)
# - With initial_state
outputs, state = rnn.dynamic_rnn(
cell,
inputs,
initial_state=array_ops.placeholder(dtypes.float32, shape=(None, 5)))
self.assertEqual(None, outputs.shape.dims[0].value)
self.assertEqual(None, state.shape.dims[0].value)
@test_util.run_in_graph_and_eager_modes
def testScalarStateIsAccepted(self):
cell = ScalarStateRNNCell()
in_eager_mode = context.executing_eagerly()
if in_eager_mode:
inputs = np.array([[[1], [2], [3], [4]]], dtype=np.float32)
else:
inputs = array_ops.placeholder(dtypes.float32, shape=(1, 4, 1))
with self.cached_session(use_gpu=True) as sess:
outputs, state = rnn.dynamic_rnn(
cell, inputs, dtype=dtypes.float32, sequence_length=[4])
if not in_eager_mode:
outputs, state = sess.run(
[outputs, state], feed_dict={inputs: [[[1], [2], [3], [4]]]})
self.assertAllEqual([[[1], [2], [3], [4]]], outputs)
self.assertAllEqual(4, state)
@test_util.run_in_graph_and_eager_modes
def testUnbalancedOutputIsAccepted(self):
cell = UnbalancedOutputRNNCell()
in_eager_mode = context.executing_eagerly()
if in_eager_mode:
inputs = np.array([[[1], [2], [3], [4]]], dtype=np.float32)
else:
inputs = array_ops.placeholder(dtypes.float32, shape=(1, 4, 1))
with self.cached_session(use_gpu=True) as sess:
outputs, state = rnn.dynamic_rnn(
cell, inputs, dtype=dtypes.float32, sequence_length=[4])
if not in_eager_mode:
outputs, state = sess.run(
[outputs, state], feed_dict={inputs: [[[1], [2], [3], [4]]]})
self.assertIsInstance(outputs, tuple)
self.assertAllEqual([[[1], [2], [3], [4]]], outputs[0])
self.assertAllEqual([[[1, 1], [2, 2], [3, 3], [4, 4]]], outputs[1])
self.assertAllEqual(4, state)
@test_util.assert_no_new_pyobjects_executing_eagerly
def testEagerMemory(self):
with context.eager_mode():
cell = TensorArrayStateRNNCell()
inputs = np.array([[[1], [2], [3], [4]]], dtype=np.float32)
rnn.dynamic_rnn(cell, inputs, dtype=dtypes.float32, sequence_length=[4])
@test_util.run_in_graph_and_eager_modes
@test_util.run_v1_only("b/120545219")
def testTensorArrayStateIsAccepted(self):
cell = TensorArrayStateRNNCell()
in_eager_mode = context.executing_eagerly()
if in_eager_mode:
inputs = np.array([[[1], [2], [3], [4]]], dtype=np.float32)
else:
inputs = array_ops.placeholder(dtypes.float32, shape=(1, 4, 1))
with self.cached_session(use_gpu=True) as sess:
outputs, state = rnn.dynamic_rnn(
cell, inputs, dtype=dtypes.float32, sequence_length=[4])
state = (state[0], state[1].stack())
if not in_eager_mode:
outputs, state = sess.run(
[outputs, state], feed_dict={
inputs: [[[1], [2], [3], [4]]]
})
self.assertAllEqual([[[1], [2], [3], [4]]], outputs)
self.assertAllEqual(4, state[0])
self.assertAllEqual([[[1]], [[2]], [[3]], [[4]]], state[1])
@test_util.run_deprecated_v1
def testCellGetInitialState(self):
cell = rnn_cell_impl.BasicRNNCell(5)
with self.assertRaisesRegexp(
ValueError, "batch_size and dtype cannot be None"):
cell.get_initial_state(None, None, None)
inputs = array_ops.placeholder(dtypes.float32, shape=(None, 4, 1))
with self.assertRaisesRegexp(
ValueError, "batch size from input tensor is different from"):
cell.get_initial_state(inputs=inputs, batch_size=50, dtype=None)
with self.assertRaisesRegexp(
ValueError, "batch size from input tensor is different from"):
cell.get_initial_state(
inputs=inputs, batch_size=constant_op.constant(50), dtype=None)
with self.assertRaisesRegexp(
ValueError, "dtype from input tensor is different from"):
cell.get_initial_state(inputs=inputs, batch_size=None, dtype=dtypes.int16)
initial_state = cell.get_initial_state(
inputs=inputs, batch_size=None, dtype=None)
self.assertEqual(initial_state.shape.as_list(), [None, 5])
self.assertEqual(initial_state.dtype, inputs.dtype)
batch = array_ops.shape(inputs)[0]
dtype = inputs.dtype
initial_state = cell.get_initial_state(None, batch, dtype)
self.assertEqual(initial_state.shape.as_list(), [None, 5])
self.assertEqual(initial_state.dtype, inputs.dtype)
def _assert_cell_builds(self, cell_class, dtype, batch_size, in_size,
out_size):
cell = cell_class(out_size, dtype=dtype)
in_shape = tensor_shape.TensorShape((batch_size, in_size))
cell.build(in_shape)
state_output = cell.get_initial_state(
inputs=None, batch_size=batch_size, dtype=dtype)
cell_output, _ = cell(array_ops.zeros(in_shape, dtype), state_output)
self.assertAllEqual([batch_size, out_size], cell_output.shape.as_list())
@test_util.run_in_graph_and_eager_modes
def testCellsBuild(self):
f32 = dtypes.float32
f64 = dtypes.float64
self._assert_cell_builds(rnn_cell_impl.BasicRNNCell, f32, 5, 7, 3)
self._assert_cell_builds(rnn_cell_impl.BasicRNNCell, f64, 5, 7, 3)
self._assert_cell_builds(rnn_cell_impl.BasicLSTMCell, f32, 5, 7, 3)
self._assert_cell_builds(rnn_cell_impl.BasicLSTMCell, f64, 5, 7, 3)
self._assert_cell_builds(rnn_cell_impl.GRUCell, f32, 5, 7, 3)
self._assert_cell_builds(rnn_cell_impl.GRUCell, f64, 5, 7, 3)
self._assert_cell_builds(rnn_cell_impl.LSTMCell, f32, 5, 7, 3)
self._assert_cell_builds(rnn_cell_impl.LSTMCell, f64, 5, 7, 3)
@test_util.run_deprecated_v1
def testBasicLSTMCellInterchangeWithLSTMCell(self):
with self.session(graph=ops_lib.Graph()) as sess:
basic_cell = rnn_cell_impl.BasicLSTMCell(1)
basic_cell(array_ops.ones([1, 1]),
state=basic_cell.get_initial_state(inputs=None,
batch_size=1,
dtype=dtypes.float32))
self.evaluate([v.initializer for v in basic_cell.variables])
self.evaluate(basic_cell._bias.assign([10.] * 4))
save = saver.Saver()
prefix = os.path.join(self.get_temp_dir(), "ckpt")
save_path = save.save(sess, prefix)
with self.session(graph=ops_lib.Graph()) as sess:
lstm_cell = rnn_cell_impl.LSTMCell(1, name="basic_lstm_cell")
lstm_cell(array_ops.ones([1, 1]),
state=lstm_cell.get_initial_state(inputs=None,
batch_size=1,
dtype=dtypes.float32))
self.evaluate([v.initializer for v in lstm_cell.variables])
save = saver.Saver()
save.restore(sess, save_path)
self.assertAllEqual([10.] * 4, self.evaluate(lstm_cell._bias))
######### Benchmarking RNN code
def _static_vs_dynamic_rnn_benchmark_static(inputs_list_t, sequence_length):
(_, input_size) = inputs_list_t[0].get_shape().as_list()
initializer = init_ops.random_uniform_initializer(-0.01, 0.01, seed=127)
cell = rnn_cell_impl.LSTMCell(
num_units=input_size,
use_peepholes=True,
initializer=initializer,
state_is_tuple=False)
outputs, final_state = rnn.static_rnn(
cell,
inputs_list_t,
sequence_length=sequence_length,
dtype=dtypes.float32)
trainable_variables = ops_lib.get_collection(
ops_lib.GraphKeys.TRAINABLE_VARIABLES)
gradients = gradients_impl.gradients(outputs + [final_state],
trainable_variables)
return control_flow_ops.group(final_state, *(gradients + outputs))
def _static_vs_dynamic_rnn_benchmark_dynamic(inputs_t, sequence_length):
(unused_0, unused_1, input_size) = inputs_t.get_shape().as_list()
initializer = init_ops.random_uniform_initializer(-0.01, 0.01, seed=127)
cell = rnn_cell_impl.LSTMCell(
num_units=input_size,
use_peepholes=True,
initializer=initializer,
state_is_tuple=False)
outputs, final_state = rnn.dynamic_rnn(
cell, inputs_t, sequence_length=sequence_length, dtype=dtypes.float32)
trainable_variables = ops_lib.get_collection(
ops_lib.GraphKeys.TRAINABLE_VARIABLES)
gradients = gradients_impl.gradients([outputs, final_state],
trainable_variables)
return control_flow_ops.group(final_state, outputs, *gradients)
def graph_creation_static_vs_dynamic_rnn_benchmark(max_time):
config = config_pb2.ConfigProto()
config.allow_soft_placement = True
# These parameters don't matter
batch_size = 512
num_units = 512
# Set up sequence lengths
np.random.seed([127])
sequence_length = np.random.randint(0, max_time, size=batch_size)
inputs_list = [
np.random.randn(batch_size, num_units).astype(np.float32)
for _ in range(max_time)
]
inputs = np.dstack(inputs_list).transpose([0, 2, 1]) # batch x time x depth
def _create_static_rnn():
with session.Session(config=config, graph=ops_lib.Graph()):
inputs_list_t = [
variables_lib.Variable(
x, trainable=False).value() for x in inputs_list
]
_static_vs_dynamic_rnn_benchmark_static(inputs_list_t, sequence_length)
def _create_dynamic_rnn():
with session.Session(config=config, graph=ops_lib.Graph()):
inputs_t = variables_lib.Variable(inputs, trainable=False).value()
_static_vs_dynamic_rnn_benchmark_dynamic(inputs_t, sequence_length)
delta_static = timeit.timeit(_create_static_rnn, number=5)
delta_dynamic = timeit.timeit(_create_dynamic_rnn, number=5)
print("%d \t %f \t %f \t %f" %
(max_time, delta_static, delta_dynamic, delta_dynamic / delta_static))
return delta_static, delta_dynamic
def _timer(sess, ops):
# Warm in
for _ in range(2):
sess.run(ops)
# Timing run
runs = 20
start = time.time()
for _ in range(runs):
sess.run(ops)
end = time.time()
return (end - start) / float(runs)
def static_vs_dynamic_rnn_benchmark(batch_size, max_time, num_units, use_gpu):
config = config_pb2.ConfigProto()
config.allow_soft_placement = True
# Set up sequence lengths
np.random.seed([127])
sequence_length = np.random.randint(0, max_time, size=batch_size)
inputs_list = [
np.random.randn(batch_size, num_units).astype(np.float32)
for _ in range(max_time)
]
inputs = np.dstack(inputs_list).transpose([0, 2, 1]) # batch x time x depth
# Using rnn()
with session.Session(config=config, graph=ops_lib.Graph()) as sess:
with ops_lib.device("/cpu:0" if not use_gpu else None):
inputs_list_t = [
variables_lib.Variable(
x, trainable=False).value() for x in inputs_list
]
ops = _static_vs_dynamic_rnn_benchmark_static(inputs_list_t,
sequence_length)
variables_lib.global_variables_initializer().run()
delta_static = _timer(sess, ops)
# Using dynamic_rnn()
with session.Session(config=config, graph=ops_lib.Graph()) as sess:
with ops_lib.device("/cpu:0" if not use_gpu else None):
inputs_t = variables_lib.Variable(inputs, trainable=False).value()
ops = _static_vs_dynamic_rnn_benchmark_dynamic(inputs_t, sequence_length)
variables_lib.global_variables_initializer().run()
delta_dynamic = _timer(sess, ops)
print("%d \t %d \t %d \t %s \t %f \t %f \t %f" %
(batch_size, max_time, num_units, use_gpu, delta_static, delta_dynamic,
delta_dynamic / delta_static))
return delta_static, delta_dynamic
def _half_seq_len_vs_unroll_half_rnn_benchmark(inputs_list_t, sequence_length):
(_, input_size) = inputs_list_t[0].get_shape().as_list()
initializer = init_ops.random_uniform_initializer(-0.01, 0.01, seed=127)
cell = rnn_cell_impl.LSTMCell(
num_units=input_size,
use_peepholes=True,
initializer=initializer,
state_is_tuple=False)
outputs, final_state = rnn.static_rnn(
cell,
inputs_list_t,
sequence_length=sequence_length,
dtype=dtypes.float32)
trainable_variables = ops_lib.get_collection(
ops_lib.GraphKeys.TRAINABLE_VARIABLES)
gradients = gradients_impl.gradients(outputs + [final_state],
trainable_variables)
return control_flow_ops.group(final_state, *(gradients + outputs))
def half_seq_len_vs_unroll_half_rnn_benchmark(batch_size, max_time, num_units,
use_gpu):
config = config_pb2.ConfigProto()
config.allow_soft_placement = True
# Set up sequence lengths
np.random.seed([127])
sequence_length = max_time * np.ones((batch_size,))
inputs_list = [
np.random.randn(batch_size, num_units).astype(np.float32)
for _ in range(max_time)
]
# Halve the sequence length, full static unroll
with session.Session(config=config, graph=ops_lib.Graph()) as sess:
with ops_lib.device("/cpu:0" if not use_gpu else None):
inputs_list_t = [
variables_lib.Variable(
x, trainable=False).value() for x in inputs_list
]
ops = _half_seq_len_vs_unroll_half_rnn_benchmark(inputs_list_t,
sequence_length / 2)
variables_lib.global_variables_initializer().run()
delta_half_seq_len = _timer(sess, ops)
# Halve the unroll size, don't use sequence length
with session.Session(config=config, graph=ops_lib.Graph()) as sess:
with ops_lib.device("/cpu:0" if not use_gpu else None):
inputs_list_t = [
variables_lib.Variable(
x, trainable=False).value() for x in inputs_list
]
ops = _half_seq_len_vs_unroll_half_rnn_benchmark(
inputs_list_t[:(max_time // 2)], sequence_length / 2)
variables_lib.global_variables_initializer().run()
delta_unroll_half = _timer(sess, ops)
print("%d \t %d \t\t %d \t %s \t %f \t\t %f \t\t %f" %
(batch_size, max_time, num_units, use_gpu, delta_half_seq_len,
delta_unroll_half, delta_half_seq_len / delta_unroll_half))
return delta_half_seq_len, delta_unroll_half
def _concat_state_vs_tuple_state_rnn_benchmark(inputs_list_t, sequence_length,
state_is_tuple):
(_, input_size) = inputs_list_t[0].get_shape().as_list()
initializer = init_ops.random_uniform_initializer(-0.01, 0.01, seed=127)
cell = rnn_cell_impl.LSTMCell(
num_units=input_size,
use_peepholes=True,
initializer=initializer,
state_is_tuple=state_is_tuple)
outputs, final_state = rnn.static_rnn(
cell,
inputs_list_t,
sequence_length=sequence_length,
dtype=dtypes.float32)
final_state = list(final_state) if state_is_tuple else [final_state]
trainable_variables = ops_lib.get_collection(
ops_lib.GraphKeys.TRAINABLE_VARIABLES)
gradients = gradients_impl.gradients(outputs + final_state,
trainable_variables)
return control_flow_ops.group(*(final_state + gradients + outputs))
def concat_state_vs_tuple_state_rnn_benchmark(batch_size, max_time, num_units,
use_gpu):
config = config_pb2.ConfigProto()
config.allow_soft_placement = True
# Set up sequence lengths
np.random.seed([127])
sequence_length = max_time * np.ones((batch_size,))
inputs_list = [
np.random.randn(batch_size, num_units).astype(np.float32)
for _ in range(max_time)
]
# Run with concatenated states (default)
with session.Session(config=config, graph=ops_lib.Graph()) as sess:
with ops_lib.device("/cpu:0" if not use_gpu else None):
inputs_list_t = [
variables_lib.Variable(
x, trainable=False).value() for x in inputs_list
]
ops = _concat_state_vs_tuple_state_rnn_benchmark(
inputs_list_t, sequence_length, state_is_tuple=False)
variables_lib.global_variables_initializer().run()
delta_concat_state = _timer(sess, ops)
# Run with tuple states (new)
with session.Session(config=config, graph=ops_lib.Graph()) as sess:
with ops_lib.device("/cpu:0" if not use_gpu else None):
inputs_list_t = [
variables_lib.Variable(
x, trainable=False).value() for x in inputs_list
]
ops = _concat_state_vs_tuple_state_rnn_benchmark(
inputs_list_t, sequence_length, state_is_tuple=True)
variables_lib.global_variables_initializer().run()
delta_tuple_state = _timer(sess, ops)
print("%d \t %d \t %d \t %s \t %f \t\t %f \t\t %f" %
(batch_size, max_time, num_units, use_gpu, delta_concat_state,
delta_tuple_state, delta_concat_state / delta_tuple_state))
return delta_concat_state, delta_tuple_state
def _dynamic_rnn_swap_memory_benchmark(inputs_t, sequence_length, swap_memory):
(unused_0, unused_1, input_size) = inputs_t.get_shape().as_list()
initializer = init_ops.random_uniform_initializer(-0.01, 0.01, seed=127)
cell = rnn_cell_impl.LSTMCell(
num_units=input_size,
use_peepholes=True,
initializer=initializer,
state_is_tuple=False)
outputs, final_state = rnn.dynamic_rnn(
cell,
inputs_t,
sequence_length=sequence_length,
swap_memory=swap_memory,
dtype=dtypes.float32)
trainable_variables = ops_lib.get_collection(
ops_lib.GraphKeys.TRAINABLE_VARIABLES)
gradients = gradients_impl.gradients([outputs, final_state],
trainable_variables)
return control_flow_ops.group(final_state, outputs, *gradients)
def dynamic_rnn_swap_memory_benchmark(batch_size, max_time, num_units):
config = config_pb2.ConfigProto()
config.allow_soft_placement = True
# Set up sequence lengths
np.random.seed([127])
sequence_length = np.random.randint(0, max_time, size=batch_size)
inputs_list = [
np.random.randn(batch_size, num_units).astype(np.float32)
for _ in range(max_time)
]
inputs = np.dstack(inputs_list).transpose([0, 2, 1]) # batch x time x depth
# No memory swap
with session.Session(config=config, graph=ops_lib.Graph()) as sess:
inputs_t = variables_lib.Variable(inputs, trainable=False).value()
ops = _dynamic_rnn_swap_memory_benchmark(
inputs_t, sequence_length, swap_memory=False)
variables_lib.global_variables_initializer().run()
no_swap = _timer(sess, ops)
# Memory swap
with session.Session(config=config, graph=ops_lib.Graph()) as sess:
inputs_t = variables_lib.Variable(inputs, trainable=False).value()
ops = _dynamic_rnn_swap_memory_benchmark(
inputs_t, sequence_length, swap_memory=True)
variables_lib.global_variables_initializer().run()
swap = _timer(sess, ops)
print("%d \t %d \t %d \t %f \t %f \t %f" %
(batch_size, max_time, num_units, no_swap, swap, swap / no_swap))
return no_swap, swap
def rnn_long_sequence_benchmark(batch_size, seqlen, num_units, dynamic,
swap_memory, nn):
config = config_pb2.ConfigProto()
config.allow_soft_placement = True
# Set up sequence lengths
np.random.seed([127])
sequence_length = [seqlen for _ in range(batch_size)]
inputs_list = [
np.random.randn(batch_size, num_units).astype(np.float32)
for _ in range(seqlen)
]
inputs = np.dstack(inputs_list).transpose([0, 2, 1]) # batch x time x depth
for _ in range(nn):
if dynamic:
with session.Session(config=config, graph=ops_lib.Graph()) as sess:
inputs_t = variables_lib.Variable(inputs, trainable=False).value()
ops = _dynamic_rnn_swap_memory_benchmark(
inputs_t, sequence_length, swap_memory=swap_memory)
variables_lib.global_variables_initializer().run()
elapsed = _timer(sess, ops)
else:
with session.Session(config=config, graph=ops_lib.Graph()) as sess:
inputs_list_t = [
variables_lib.Variable(
x, trainable=False).value() for x in inputs_list
]
ops = _static_vs_dynamic_rnn_benchmark_static(inputs_list_t,
sequence_length)
variables_lib.global_variables_initializer().run()
elapsed = _timer(sess, ops)
print("%d \t %d \t %d \t %s \t %f \t %f" % (batch_size, seqlen, num_units,
dynamic, elapsed,
elapsed / seqlen))
class BenchmarkRNN(test.Benchmark):
def benchmarkGraphCreationStaticVsDynamicLSTM(self):
print("Graph Creation: Static Unroll vs. Dynamic Unroll LSTM")
print("max_t \t dt(static) \t dt(dynamic) \t dt(dynamic)/dt(static)")
for max_time in (1, 25, 50):
s_dt, d_dt = graph_creation_static_vs_dynamic_rnn_benchmark(max_time)
self.report_benchmark(
name="graph_creation_time_static_T%02d" % max_time,
iters=5,
wall_time=s_dt)
self.report_benchmark(
name="graph_creation_time_dynamic_T%02d" % max_time,
iters=5,
wall_time=d_dt)
def benchmarkStaticUnrollVsDynamicFlowLSTM(self):
print("Calculation: Static Unroll with Dynamic Flow LSTM "
"vs. Dynamic Unroll LSTM")
print("batch \t max_t \t units \t gpu \t dt(static) \t dt(dynamic) "
"\t dt(dynamic)/dt(static)")
for batch_size in (256,):
for max_time in (50,):
for num_units in (512, 256, 128):
for use_gpu in (False, True):
s_dt, d_dt = static_vs_dynamic_rnn_benchmark(batch_size, max_time,
num_units, use_gpu)
self.report_benchmark(
name="static_unroll_time_T%02d_B%03d_N%03d_gpu_%s" %
(max_time, batch_size, num_units, use_gpu),
iters=20,
wall_time=s_dt)
self.report_benchmark(
name="dynamic_unroll_time_T%02d_B%03d_N%03d_gpu_%s" %
(max_time, batch_size, num_units, use_gpu),
iters=20,
wall_time=d_dt)
def benchmarkDynamicLSTMNoMemorySwapVsMemorySwap(self):
print("Calculation: Dynamic LSTM No Memory Swap vs. Memory Swap")
print("batch \t max_t \t units \t no_swap \t swap \t swap/no_swap")
for batch_size in (256, 512):
for max_time in (100,):
for num_units in (512, 256, 128):
no_swap, swap = dynamic_rnn_swap_memory_benchmark(batch_size,
max_time, num_units)
self.report_benchmark(
name="dynamic_lstm_no_memory_swap_T%02d_B%03d_N%03d" %
(max_time, batch_size, num_units),
iters=20,
wall_time=no_swap)
self.report_benchmark(
name="dynamic_lstm_with_memory_swap_T%02d_B%03d_N%03d" %
(max_time, batch_size, num_units),
iters=20,
wall_time=swap)
def benchmarkStaticUnrollHalfSequenceLengthVsHalfUnroll(self):
print("Calculation: Static Unroll with Halved Sequence Length "
"vs. Half Static Unroll")
print("batch \t full_t \t units \t gpu \t dt(half_seq_len) "
"\t dt(unroll_half) \t dt(half_seq_len)/dt(unroll_half)")
for batch_size in (128,):
for max_time in (50,):
for num_units in (256,):
for use_gpu in (False, True):
s_dt, d_dt = half_seq_len_vs_unroll_half_rnn_benchmark(batch_size,
max_time,
num_units,
use_gpu)
self.report_benchmark(
name="half_seq_len_time_T%02d_B%03d_N%03d_gpu_%s" %
(max_time, batch_size, num_units, use_gpu),
iters=20,
wall_time=s_dt)
self.report_benchmark(
name="unroll_half_time_T%02d_B%03d_N%03d_gpu_%s" %
(max_time, batch_size, num_units, use_gpu),
iters=20,
wall_time=d_dt)
def benchmarkStaticUnrollStateConcatVsStateTuple(self):
print("Calculation: Static Unroll with Concatenated State "
"vs. Tuple State")
print("batch \t time \t units \t gpu \t dt(concat_state) "
"\t dt(tuple_state) \t dt(concat_state)/dt(tuple_state)")
for batch_size in (
16,
128,):
for max_time in (50,):
for num_units in (
16,
128,):
for use_gpu in (False, True):
c_dt, t_dt = concat_state_vs_tuple_state_rnn_benchmark(batch_size,
max_time,
num_units,
use_gpu)
self.report_benchmark(
name="concat_state_time_T%02d_B%03d_N%03d_gpu_%s" %
(max_time, batch_size, num_units, use_gpu),
iters=20,
wall_time=c_dt)
self.report_benchmark(
name="tuple_state_time_T%02d_B%03d_N%03d_gpu_%s" %
(max_time, batch_size, num_units, use_gpu),
iters=20,
wall_time=t_dt)
def _benchmarkDynamicLSTMMemorySwapLongSeq(self):
"""The memory swapping test for the SOSP submission."""
print("Calculation: Long LSTM Sequence")
print("batch \t len \t units \t dynamic \t elapsed_t \t elapsed_t/len")
batch_size = 512
seqlen = 800
num_units = 512
dynamic = True
swap_memory = True
# Some warming up.
if swap_memory:
rnn_long_sequence_benchmark(batch_size, seqlen, num_units,
dynamic, swap_memory, 2)
# Measure the performance.
for slen in xrange(100, 1100, 100):
rnn_long_sequence_benchmark(batch_size, slen, num_units, dynamic,
swap_memory, 3)
if __name__ == "__main__":
test.main()
| apache-2.0 | -9,210,532,734,080,610,000 | 37.388954 | 80 | 0.625578 | false |
smartboyathome/Wonderland-Engine | Doorknob/DBWrapper.py | 1 | 11354 | '''
Copyright (c) 2012 Alexander Abbott
This file is part of the Cheshire Cyber Defense Scoring Engine (henceforth
referred to as Cheshire).
Cheshire is free software: you can redistribute it and/or modify it under
the terms of the GNU Affero General Public License as published by the
Free Software Foundation, either version 3 of the License, or (at your
option) any later version.
Cheshire is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for
more details.
You should have received a copy of the GNU Affero General Public License
along with Cheshire. If not, see <http://www.gnu.org/licenses/>.
'''
import abc
class DBWrapper(object):
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def close(self):
raise NotImplementedError()
@abc.abstractmethod
def get_all_teams(self):
raise NotImplementedError()
@abc.abstractmethod
def get_specific_team(self, team_id):
raise NotImplementedError()
@abc.abstractmethod
def create_team(self, team_name, team_id):
raise NotImplementedError()
@abc.abstractmethod
def modify_team(self, team_id, **data):
raise NotImplementedError()
@abc.abstractmethod
def delete_team(self, team_id):
raise NotImplementedError()
@abc.abstractmethod
def get_scores_for_all_teams(self):
raise NotImplementedError()
@abc.abstractmethod
def get_score_for_team(self, team_id):
raise NotImplementedError()
@abc.abstractmethod
def calculate_scores_for_team(self, team_id):
raise NotImplementedError()
@abc.abstractmethod
def get_all_team_configs_for_all_machines(self):
raise NotImplementedError()
@abc.abstractmethod
def get_team_config_for_all_machines(self, team_id):
raise NotImplementedError()
@abc.abstractmethod
def get_team_config_for_machine(self, team_id, machine_id):
raise NotImplementedError()
@abc.abstractmethod
def create_team_config_for_machine(self, team_id, machine_id, **data):
raise NotImplementedError()
@abc.abstractmethod
def modify_team_config_for_machine(self, team_id, machine_id, **data):
raise NotImplementedError()
@abc.abstractmethod
def delete_team_config_for_machine(self, team_id, machine_id):
raise NotImplementedError()
@abc.abstractmethod
def get_all_machines(self):
raise NotImplementedError()
@abc.abstractmethod
def get_specific_machine(self, machine_id):
raise NotImplementedError()
@abc.abstractmethod
def create_machine(self, machine_id, general_ip):
raise NotImplementedError()
@abc.abstractmethod
def modify_machine(self, machine_id, **data):
raise NotImplementedError()
@abc.abstractmethod
def delete_machine(self, machine_id):
raise NotImplementedError()
@abc.abstractmethod
def get_all_users(self):
raise NotImplementedError()
@abc.abstractmethod
def get_specific_user(self, username, password_hash=None):
raise NotImplementedError()
@abc.abstractmethod
def get_specific_user_with_password(self, username):
raise NotImplementedError()
@abc.abstractmethod
def create_user(self, username, password_hash, email, role, **extra_info):
raise NotImplementedError()
@abc.abstractmethod
def modify_user(self, username, **data):
raise NotImplementedError()
@abc.abstractmethod
def delete_user(self, username):
raise NotImplementedError()
@abc.abstractmethod
def get_current_scoring_session(self):
raise NotImplementedError()
@abc.abstractmethod
def start_current_scoring_session(self):
raise NotImplementedError()
@abc.abstractmethod
def stop_current_scoring_session(self):
raise NotImplementedError()
@abc.abstractmethod
def clear_current_scoring_session(self):
raise NotImplementedError()
@abc.abstractmethod
def get_all_archived_scoring_sessions(self):
raise NotImplementedError()
@abc.abstractmethod
def get_specific_archived_scoring_session(self, archive_id):
raise NotImplementedError()
@abc.abstractmethod
def archive_current_scoring_session(self, archive_id):
raise NotImplementedError()
@abc.abstractmethod
def get_all_check_classes(self):
raise NotImplementedError()
@abc.abstractmethod
def get_specific_check_class(self, class_name):
raise NotImplementedError()
@abc.abstractmethod
def create_check_class(self, class_name, check_type, module_name):
raise NotImplementedError()
@abc.abstractmethod
def modify_check_class(self, class_name, **data):
raise NotImplementedError()
@abc.abstractmethod
def delete_check_class(self, class_name):
raise NotImplementedError()
@abc.abstractmethod
def get_all_check_scripts(self):
raise NotImplementedError()
@abc.abstractmethod
def get_specific_check_script(self, module_name):
raise NotImplementedError()
@abc.abstractmethod
def create_check_script(self, module_name, path):
raise NotImplementedError()
@abc.abstractmethod
def modify_check_script(self, module_name, **data):
raise NotImplementedError()
@abc.abstractmethod
def delete_check_script(self, module_name):
raise NotImplementedError()
@abc.abstractmethod
def get_all_checks(self):
raise NotImplementedError()
@abc.abstractmethod
def get_specific_check(self, check_id, check_type):
raise NotImplementedError()
@abc.abstractmethod
def delete_specific_check(self, check_id, check_type):
raise NotImplementedError()
@abc.abstractmethod
def get_all_service_checks(self):
raise NotImplementedError()
@abc.abstractmethod
def get_specific_service_check(self, check_id):
raise NotImplementedError()
@abc.abstractmethod
def create_service_check(self, check_id, description, machine, check_class):
raise NotImplementedError()
@abc.abstractmethod
def modify_service_check(self, check_id, **data):
raise NotImplementedError()
@abc.abstractmethod
def complete_service_check(self, check_id, team_id, timestamp, score):
raise NotImplementedError()
@abc.abstractmethod
def delete_service_check(self, check_id):
raise NotImplementedError()
@abc.abstractmethod
def get_all_inject_checks(self):
raise NotImplementedError()
@abc.abstractmethod
def get_specific_inject_check(self, check_id):
raise NotImplementedError()
@abc.abstractmethod
def create_inject_check(self, check_id, description, machine, check_class, inject_number, time_to_check):
raise NotImplementedError()
@abc.abstractmethod
def modify_inject_check(self, check_id, **data):
raise NotImplementedError()
@abc.abstractmethod
def complete_inject_check(self, check_id, team_id, timestamp, score):
raise NotImplementedError()
@abc.abstractmethod
def delete_inject_check(self, check_id):
raise NotImplementedError()
@abc.abstractmethod
def get_all_manual_checks(self):
raise NotImplementedError()
@abc.abstractmethod
def get_all_manual_checks_for_team(self, team_id):
raise NotImplementedError()
@abc.abstractmethod
def get_specific_manual_check(self, check_id):
raise NotImplementedError()
@abc.abstractmethod
def get_specific_manual_check_for_team(self, check_id, team_id):
raise NotImplementedError()
@abc.abstractmethod
def create_manual_check(self, check_id, description, comments, inject_number, team_id, points_awarded, timestamp):
raise NotImplementedError()
@abc.abstractmethod
def modify_manual_check(self, check_id, team_id, **data):
raise NotImplementedError()
@abc.abstractmethod
def delete_manual_check(self, check_id, team_id):
raise NotImplementedError()
@abc.abstractmethod
def get_all_attacker_checks(self):
raise NotImplementedError()
@abc.abstractmethod
def get_specific_attacker_check(self, check_id, team_id):
raise NotImplementedError()
@abc.abstractmethod
def create_attacker_check(self, check_id, description, machine, team_id, check_class):
raise NotImplementedError()
@abc.abstractmethod
def modify_attacker_check(self, check_id, team_id, **data):
raise NotImplementedError()
@abc.abstractmethod
def complete_attacker_check(self, check_id, team_id, timestamp, score):
raise NotImplementedError()
@abc.abstractmethod
def delete_attacker_check(self, check_id, team_id):
raise NotImplementedError()
@abc.abstractmethod
def get_all_attacker_checks_for_team(self, team_id):
raise NotImplementedError()
@abc.abstractmethod
def get_all_completed_checks(self):
raise NotImplementedError()
@abc.abstractmethod
def get_all_completed_service_checks(self):
raise NotImplementedError()
@abc.abstractmethod
def get_all_completed_inject_checks(self):
raise NotImplementedError()
@abc.abstractmethod
def get_all_completed_manual_checks(self):
raise NotImplementedError()
@abc.abstractmethod
def get_all_completed_attacker_checks(self):
raise NotImplementedError()
@abc.abstractmethod
def get_all_completed_checks_for_team(self, team_id):
raise NotImplementedError()
@abc.abstractmethod
def get_all_completed_service_checks_for_team(self, team_id):
raise NotImplementedError()
@abc.abstractmethod
def get_all_completed_inject_checks_for_team(self, team_id):
raise NotImplementedError()
@abc.abstractmethod
def get_all_completed_manual_checks_for_team(self, team_id):
raise NotImplementedError()
@abc.abstractmethod
def get_all_completed_attacker_checks_for_team(self, team_id):
raise NotImplementedError()
@abc.abstractmethod
def get_all_completed_checks_for_team_since(self, team_id, timestamp):
raise NotImplementedError()
@abc.abstractmethod
def get_specific_completed_service_check_for_team(self, check_id, team_id):
raise NotImplementedError()
@abc.abstractmethod
def get_specific_completed_inject_check_for_team(self, check_id, team_id):
raise NotImplementedError()
@abc.abstractmethod
def get_specific_completed_manual_check_for_team(self, check_id, team_id):
raise NotImplementedError()
@abc.abstractmethod
def get_specific_completed_attacker_check_for_team(self, check_id, team_id):
raise NotImplementedError()
@abc.abstractmethod
def get_specific_completed_service_check_for_team_at_time(self, check_id, team_id, timestamp):
raise NotImplementedError()
@abc.abstractmethod
def get_specific_completed_attacker_check_for_team_at_time(self, check_id, team_id, timestamp):
raise NotImplementedError() | agpl-3.0 | 8,022,469,306,059,580,000 | 28.881579 | 118 | 0.692795 | false |
frankrousseau/weboob | weboob/capabilities/shop.py | 1 | 3939 | # -*- coding: utf-8 -*-
# Copyright(C) 2014 Oleg Plakhotniuk
#
# This file is part of weboob.
#
# weboob is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# weboob is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with weboob. If not, see <http://www.gnu.org/licenses/>.
from .base import BaseObject, StringField, DecimalField, UserError
from .date import DateField
from .collection import CapCollection
__all__ = ['OrderNotFound', 'Order', 'Payment', 'Item', 'CapShop']
class OrderNotFound(UserError):
"""
Raised when an order is not found.
"""
def __init__(self, msg='Order not found'):
UserError.__init__(self, msg)
class Order(BaseObject):
"""
Purchase order.
"""
date = DateField('Date when the order was placed')
shipping = DecimalField('Shipping price')
discount = DecimalField('Discounts')
tax = DecimalField('Tax')
def __repr__(self):
return u"<Order id=%r date=%r>" % (self.id, self.date)
class Payment(BaseObject):
"""
Payment for an order.
"""
date = DateField('The date when payment was applied')
method = StringField('Payment method; e.g. "VISA 1234"')
amount = DecimalField('Payment amount')
def __repr__(self):
return u"<Payment date=%r method=%r amount=%r>" % \
(self.date, self.method, self.amount)
class Item(BaseObject):
"""
Purchased item within an order.
"""
label = StringField('Item label')
url = StringField('URL with item description')
price = DecimalField('Item price')
def __repr__(self):
return u"<Item label=%r price=%r>" % (self.label, self.price)
class CapShop(CapCollection):
"""
Capability of online shops to see orders history.
"""
def iter_resources(self, objs, split_path):
"""
Iter resources.
Default implementation of this method is to return on top-level
all orders (by calling :func:`iter_accounts`).
:param objs: type of objects to get
:type objs: tuple[:class:`BaseObject`]
:param split_path: path to discover
:type split_path: :class:`list`
:rtype: iter[:class:`BaseObject`]
"""
if Order in objs:
self._restrict_level(split_path)
return self.iter_orders()
def get_currency(self):
"""
Get the currency this shop uses.
:rtype: :class:`str`
"""
raise NotImplementedError()
def iter_orders(self):
"""
Iter history of orders.
:rtype: iter[:class:`Order`]
"""
raise NotImplementedError()
def get_order(self, id):
"""
Get an order from its ID.
:param id: ID of the order
:type id: :class:`str`
:rtype: :class:`Order`
:raises: :class:`OrderNotFound`
"""
raise NotImplementedError()
def iter_payments(self, order):
"""
Iter payments of a specific order.
:param order: order to get payments
:type order: :class:`Order`
:rtype: iter[:class:`Payment`]
:raises: :class:`OrderNotFound`
"""
raise NotImplementedError()
def iter_items(self, order):
"""
Iter items of a specific order.
:param order: order to get items
:type order: :class:`Order`
:rtype: iter[:class:`Item`]
:raises: :class:`OrderNotFound`
"""
raise NotImplementedError()
| agpl-3.0 | -8,459,858,471,684,363,000 | 26.165517 | 77 | 0.611323 | false |
gigglearrows/anniesbot | alembic/versions/3841cd597e_added_a_table_for_duel_stats.py | 1 | 1033 | """Added a table for duel stats
Revision ID: 3841cd597e
Revises: d5f1b8bd68
Create Date: 2015-12-02 00:12:07.548855
"""
# revision identifiers, used by Alembic.
revision = '3841cd597e'
down_revision = 'd5f1b8bd68'
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.create_table('tb_user_duel_stats',
sa.Column('user_id', sa.Integer(), autoincrement=False, nullable=False),
sa.Column('duels_won', sa.Integer(), nullable=False),
sa.Column('duels_total', sa.Integer(), nullable=False),
sa.Column('points_won', sa.Integer(), nullable=False),
sa.Column('points_lost', sa.Integer(), nullable=False),
sa.Column('last_duel', sa.DateTime(), nullable=True),
sa.PrimaryKeyConstraint('user_id')
)
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_table('tb_user_duel_stats')
### end Alembic commands ###
| mit | -8,916,108,574,692,642,000 | 27.694444 | 76 | 0.680542 | false |
marscher/PyEMMA | pyemma/_ext/variational/estimators/tests/benchmark_moments.py | 1 | 5907 | from __future__ import absolute_import
from __future__ import print_function
__author__ = 'noe'
import time
import numpy as np
from .. import moments
def genS(N):
""" Generates sparsities given N (number of cols) """
S = [10, 90, 100, 500, 900, 1000, 2000, 5000, 7500, 9000, 10000, 20000, 50000, 75000, 90000] # non-zero
return [s for s in S if s <= N]
def genX(L, N, n_var=None, const=False):
X = np.random.rand(L, N) # random data
if n_var is not None:
if const:
Xsparse = np.ones((L, N))
else:
Xsparse = np.zeros((L, N))
Xsparse[:, :n_var] = X[:, :n_var]
X = Xsparse
return X
def genY(L, N, n_var=None, const=False):
X = np.random.rand(L, N) # random data
if n_var is not None:
if const:
Xsparse = -np.ones((L, N))
else:
Xsparse = np.zeros((L, N))
Xsparse[:, :n_var] = X[:, :n_var]
X = Xsparse
return X
def reftime_momentsXX(X, remove_mean=False, nrep=3):
# time for reference calculation
t1 = time.time()
for r in range(nrep):
s_ref = X.sum(axis=0) # computation of mean
if remove_mean:
X = X - s_ref/float(X.shape[0])
C_XX_ref = np.dot(X.T, X) # covariance matrix
t2 = time.time()
# return mean time
return (t2-t1)/float(nrep)
def mytime_momentsXX(X, remove_mean=False, nrep=3):
# time for reference calculation
t1 = time.time()
for r in range(nrep):
w, s, C_XX = moments.moments_XX(X, remove_mean=remove_mean)
t2 = time.time()
# return mean time
return (t2-t1)/float(nrep)
def reftime_momentsXXXY(X, Y, remove_mean=False, symmetrize=False, nrep=3):
# time for reference calculation
t1 = time.time()
for r in range(nrep):
sx = X.sum(axis=0) # computation of mean
sy = Y.sum(axis=0) # computation of mean
if symmetrize:
sx = 0.5*(sx + sy)
sy = sx
if remove_mean:
X = X - sx/float(X.shape[0])
Y = Y - sy/float(Y.shape[0])
if symmetrize:
C_XX_ref = np.dot(X.T, X) + np.dot(Y.T, Y)
C_XY = np.dot(X.T, Y)
C_XY_ref = C_XY + C_XY.T
else:
C_XX_ref = np.dot(X.T, X)
C_XY_ref = np.dot(X.T, Y)
t2 = time.time()
# return mean time
return (t2-t1)/float(nrep)
def mytime_momentsXXXY(X, Y, remove_mean=False, symmetrize=False, nrep=3):
# time for reference calculation
t1 = time.time()
for r in range(nrep):
w, sx, sy, C_XX, C_XY = moments.moments_XXXY(X, Y, remove_mean=remove_mean, symmetrize=symmetrize)
t2 = time.time()
# return mean time
return (t2-t1)/float(nrep)
def benchmark_moments(L=10000, N=10000, nrep=5, xy=False, remove_mean=False, symmetrize=False, const=False):
#S = [10, 100, 1000]
S = genS(N)
# time for reference calculation
X = genX(L, N)
if xy:
Y = genY(L, N)
reftime = reftime_momentsXXXY(X, Y, remove_mean=remove_mean, symmetrize=symmetrize, nrep=nrep)
else:
reftime = reftime_momentsXX(X, remove_mean=remove_mean, nrep=nrep)
# my time
times = np.zeros(len(S))
for k, s in enumerate(S):
X = genX(L, N, n_var=s, const=const)
if xy:
Y = genY(L, N, n_var=s, const=const)
times[k] = mytime_momentsXXXY(X, Y, remove_mean=remove_mean, symmetrize=symmetrize, nrep=nrep)
else:
times[k] = mytime_momentsXX(X, remove_mean=remove_mean, nrep=nrep)
# assemble report
rows = ['L, data points', 'N, dimensions', 'S, nonzeros', 'time trivial', 'time moments_XX', 'speed-up']
table = np.zeros((6, len(S)))
table[0, :] = L
table[1, :] = N
table[2, :] = S
table[3, :] = reftime
table[4, :] = times
table[5, :] = reftime / times
# print table
if xy:
fname = 'moments_XXXY'
else:
fname = 'moments_XX'
print(fname + '\tremove_mean = ' + str(remove_mean) + '\tsym = ' + str(symmetrize) + '\tconst = ' + str(const))
print(rows[0] + ('\t%i' * table.shape[1])%tuple(table[0]))
print(rows[1] + ('\t%i' * table.shape[1])%tuple(table[1]))
print(rows[2] + ('\t%i' * table.shape[1])%tuple(table[2]))
print(rows[3] + ('\t%.3f' * table.shape[1])%tuple(table[3]))
print(rows[4] + ('\t%.3f' * table.shape[1])%tuple(table[4]))
print(rows[5] + ('\t%.3f' * table.shape[1])%tuple(table[5]))
print()
def main():
LNs = [(100000, 100, 10), (10000, 1000, 7), (1000, 2000, 5), (250, 5000, 5), (100, 10000, 5)]
for L, N, nrep in LNs:
benchmark_moments(L=L, N=N, nrep=nrep, xy=False, remove_mean=False, symmetrize=False, const=False)
benchmark_moments(L=L, N=N, nrep=nrep, xy=False, remove_mean=False, symmetrize=False, const=True)
benchmark_moments(L=L, N=N, nrep=nrep, xy=False, remove_mean=True, symmetrize=False, const=False)
benchmark_moments(L=L, N=N, nrep=nrep, xy=False, remove_mean=True, symmetrize=False, const=True)
benchmark_moments(L=L, N=N, nrep=nrep, xy=True, remove_mean=False, symmetrize=False, const=False)
benchmark_moments(L=L, N=N, nrep=nrep, xy=True, remove_mean=False, symmetrize=False, const=True)
benchmark_moments(L=L, N=N, nrep=nrep, xy=True, remove_mean=False, symmetrize=True, const=False)
benchmark_moments(L=L, N=N, nrep=nrep, xy=True, remove_mean=False, symmetrize=True, const=True)
benchmark_moments(L=L, N=N, nrep=nrep, xy=True, remove_mean=True, symmetrize=False, const=False)
benchmark_moments(L=L, N=N, nrep=nrep, xy=True, remove_mean=True, symmetrize=False, const=True)
benchmark_moments(L=L, N=N, nrep=nrep, xy=True, remove_mean=True, symmetrize=True, const=False)
benchmark_moments(L=L, N=N, nrep=nrep, xy=True, remove_mean=True, symmetrize=True, const=True)
if __name__ == "__main__":
main() | lgpl-3.0 | -6,502,714,212,863,599,000 | 35.695652 | 115 | 0.583206 | false |
09zwcbupt/undergrad_thesis | ext/poxdesk/qx/tool/pylib/ecmascript/frontend/Comment_2.py | 1 | 27369 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
################################################################################
#
# qooxdoo - the new era of web development
#
# http://qooxdoo.org
#
# Copyright:
# 2006-2012 1&1 Internet AG, Germany, http://www.1und1.de
#
# License:
# LGPL: http://www.gnu.org/licenses/lgpl.html
# EPL: http://www.eclipse.org/org/documents/epl-v10.php
# See the LICENSE file in the project's top-level directory for details.
#
# Authors:
# * Sebastian Werner (wpbasti)
# * Fabian Jakobs (fjakobs)
# * Thomas Herchenroeder (thron7)
#
################################################################################
import sys, string, re
from ecmascript.frontend import tree
from generator import Context as context
from textile import textile
##
# Many Regexp's
S_INLINE_COMMENT = "//.*"
R_INLINE_COMMENT = re.compile("^" + S_INLINE_COMMENT + "$")
R_INLINE_COMMENT_TIGHT = re.compile("^//\S+")
R_INLINE_COMMENT_PURE = re.compile("^//")
S_BLOCK_COMMENT = "/\*(?:[^*]|[\n]|(?:\*+(?:[^*/]|[\n])))*\*+/"
R_BLOCK_COMMENT = re.compile("^" + S_BLOCK_COMMENT + "$")
R_BLOCK_COMMENT_JAVADOC = re.compile("^/\*\*")
R_BLOCK_COMMENT_QTDOC = re.compile("^/\*!")
R_BLOCK_COMMENT_AREA = re.compile("^/\*\n\s*\*\*\*\*\*")
R_BLOCK_COMMENT_DIVIDER = re.compile("^/\*\n\s*----")
R_BLOCK_COMMENT_HEADER = re.compile("^/\* \*\*\*\*")
R_BLOCK_COMMENT_TIGHT_START = re.compile("^/\*\S+")
R_BLOCK_COMMENT_TIGHT_END = re.compile("\S+\*/$")
R_BLOCK_COMMENT_PURE_START = re.compile("^/\*")
R_BLOCK_COMMENT_PURE_END = re.compile("\*/$")
R_ATTRIBUTE = re.compile('[^{]@(\w+)\s*')
R_JAVADOC_STARS = re.compile(r'^\s*\*')
R_NAMED_TYPE = re.compile(r'^\s*([a-zA-Z0-9_\.#-]+)\s*({([^}]+)})?')
R_SIMPLE_TYPE = re.compile(r'^\s*({([^}]+)})?')
VARPREFIXES = {
"a" : "Array",
"b" : "Boolean",
"d" : "Date",
"f" : "Function",
"i" : "Integer",
"h" : "Map",
"m" : "Map",
"n" : "Number",
"o" : "Object",
"r" : "RegExp",
"s" : "String",
"v" : "var",
"w" : "Widget"
}
VARNAMES = {
"a" : "Array",
"arr" : "Array",
"doc" : "Document",
"e" : "Event",
"ev" : "Event",
"evt" : "Event",
"el" : "Element",
"elem" : "Element",
"elm" : "Element",
"ex" : "Exception",
"exc" : "Exception",
"flag" : "Boolean",
"force" : "Boolean",
"f" : "Function",
"func" : "Function",
"h" : "Map",
"hash" : "Map",
"map" : "Map",
"node" : "Node",
"n" : "Number",
"num" : "Number",
"o" : "Object",
"obj" : "Object",
"reg" : "RegExp",
"s" : "String",
"str" : "String",
"win" : "Window"
}
VARDESC = {
"propValue" : "Current value",
"propOldValue" : "Previous value",
"propData" : "Property configuration map"
}
def nameToType(name):
typ = "var"
# Evaluate type from name
if name in VARNAMES:
typ = VARNAMES[name]
elif len(name) > 1:
if name[1].isupper():
if name[0] in VARPREFIXES:
typ = VARPREFIXES[name[0]]
return typ
def nameToDescription(name):
desc = "TODOC"
if name in VARDESC:
desc = VARDESC[name]
return desc
##
# Parsed comments are represented as lists of "attributes". This is a schematic:
# [{
# 'category' : 'description'|'param'|'throws'|'return'| ... (prob. all '@' tags),
# 'text' : <descriptive string>,
# 'name' : <name e.g. param name>,
# 'defaultValue' : <param default value>,
# 'type' : [{ (array for alternatives, e.g. "{Map|null}")
# 'type': 'Map'|'String'|..., (from e.g. "{String[]}")
# 'dimensions': <int> (0 = scalar, 1 = array, ...)
# }]
# }]
#
def getAttrib(attribList, category):
for attrib in attribList:
if attrib["category"] == category:
return attrib
def getParam(attribList, name):
for attrib in attribList:
if attrib["category"] == "param":
if "name" in attrib and attrib["name"] == name:
return attrib
def attribHas(attrib, key):
if attrib != None and key in attrib and not attrib[key] in ["", None]:
return True
return False
##
# Holds a string representing a JS comment
#
class Comment(object):
def __init__(self, s):
self.string = s
def correctInline(self):
if R_INLINE_COMMENT_TIGHT.match(self.string):
return R_INLINE_COMMENT_PURE.sub("// ", self.string)
return self.string
def correctBlock(self):
source = self.string
if not self.getFormat() in ["javadoc", "qtdoc"]:
if R_BLOCK_COMMENT_TIGHT_START.search(self.string):
source = R_BLOCK_COMMENT_PURE_START.sub("/* ", self.string)
if R_BLOCK_COMMENT_TIGHT_END.search(source):
source = R_BLOCK_COMMENT_PURE_END.sub(" */", self.string)
return source
def correct(self):
if self.string[:2] == "//":
return self.correctInline()
else:
return self.correctBlock()
def isMultiLine(self):
return self.string.find("\n") != -1
def getFormat(self):
if R_BLOCK_COMMENT_JAVADOC.search(self.string):
return "javadoc"
elif R_BLOCK_COMMENT_QTDOC.search(self.string):
return "qtdoc"
elif R_BLOCK_COMMENT_AREA.search(self.string):
return "area"
elif R_BLOCK_COMMENT_DIVIDER.search(self.string):
return "divider"
elif R_BLOCK_COMMENT_HEADER.search(self.string):
return "header"
return "block"
def qt2javadoc(self):
attribList = self.parse(False)
res = "/**"
desc = self.getAttrib(attribList, "description")
if "text" in desc:
desc = desc["text"]
else:
desc = ""
if "\n" in desc:
res += "\n"
for line in desc.split("\n"):
res += " * %s\n" % line
res += " "
else:
res += " %s " % desc
res += "*/"
return res
def parse(self, format=True):
# print "Parse: " + intext
# Strip "/**", "/*!" and "*/"
intext = self.string[3:-2]
# Strip leading stars in every line
text = ""
for line in intext.split("\n"):
text += R_JAVADOC_STARS.sub("", line) + "\n"
# Autodent
text = Text(text).autoOutdent()
# Search for attributes
desc = { "category" : "description", "text" : "" }
attribs = [desc]
pos = 0
while True:
# this is necessary to match ^ at the beginning of a line
if pos > 0 and text[pos-1] == "\n": pos -= 1
match = R_ATTRIBUTE.search(text, pos)
if match == None:
prevText = text[pos:].rstrip()
if len(attribs) == 0:
desc["text"] = prevText
else:
attribs[-1]["text"] = prevText
break
prevText = text[pos:match.start(0)].rstrip()
pos = match.end(0)
if len(attribs) == 0:
desc["text"] = prevText
else:
attribs[-1]["text"] = prevText
attribs.append({ "category" : match.group(1), "text" : "" })
# parse details
for attrib in attribs:
self.parseDetail(attrib, format)
return attribs
def parseDetail(self, attrib, format=True):
text = attrib["text"]
if attrib["category"] in ["param", "event", "see", "state", "appearance", "childControl"]:
match = R_NAMED_TYPE.search(text)
else:
match = R_SIMPLE_TYPE.search(text)
if match:
text = text[match.end(0):]
if attrib["category"] in ["param", "event", "see", "state", "appearance", "childControl"]:
attrib["name"] = match.group(1)
#print ">>> NAME: %s" % match.group(1)
remain = match.group(3)
else:
remain = match.group(2)
if remain != None:
defIndex = remain.rfind("?")
if defIndex != -1:
attrib["defaultValue"] = remain[defIndex+1:].strip()
remain = remain[0:defIndex].strip()
#print ">>> DEFAULT: %s" % attrib["defaultValue"]
typValues = []
for typ in remain.split("|"):
typValue = typ.strip()
arrayIndex = typValue.find("[")
if arrayIndex != -1:
arrayValue = (len(typValue) - arrayIndex) / 2
typValue = typValue[0:arrayIndex]
else:
arrayValue = 0
typValues.append({ "type" : typValue, "dimensions" : arrayValue })
if len(typValues) > 0:
attrib["type"] = typValues
#print ">>> TYPE: %s" % attrib["type"]
if format:
attrib["text"] = self.formatText(text)
else:
attrib["text"] = self.cleanupText(text)
if attrib["text"] == "":
del attrib["text"]
def cleanupText(self, text):
#print "============= INTEXT ========================="
#print text
text = text.replace("<p>", "\n")
text = text.replace("<br/>", "\n")
text = text.replace("<br>", "\n")
text = text.replace("</p>", " ")
# on single lines strip the content
if not "\n" in text:
text = text.strip()
else:
newline = False
lines = text.split("\n")
text = u""
for line in lines:
if line == "":
if not newline:
newline = True
else:
if text != "":
text += "\n"
if newline:
text += "\n"
newline = False
text += line
#print "============= OUTTEXT ========================="
#print text
# Process TODOC the same as no text
if text == "TODOC":
return ""
return text
##
# JSDoc can contain macros, which are expanded here.
#
def expandMacros(self, text):
_mmap = {
"qxversion" : (context.jobconf.get("let/QOOXDOO_VERSION", "!!TODO!!") if
hasattr(context,'jobconf') else "[undef]" ) # ecmalint.py doesn't know jobs
}
templ = string.Template(text)
text = templ.safe_substitute(_mmap)
return text
def formatText(self, text):
text = self.cleanupText(text)
#if "\n" in text:
# print
# print "------------- ORIGINAL ----------------"
# print text
text = text.replace("<pre", "\n\n<pre").replace("</pre>", "</pre>\n\n")
text = self.expandMacros(text)
# encode to ascii leads into a translation of umlauts to their XML code.
text = unicode(textile.textile(text.encode("utf-8"), output="ascii"))
#if "\n" in text:
# print "------------- TEXTILED ----------------"
# print text
return text
def splitText(self, attrib=True):
res = ""
first = True
for line in self.string.split("\n"):
if attrib:
if first:
res += " %s\n" % line
else:
res += " * %s\n" % line
else:
res += " * %s\n" % line
first = False
if not res.endswith("\n"):
res += "\n"
return res
@staticmethod
def parseType(vtype):
typeText = ""
firstType = True
for entry in vtype:
if not firstType:
typeText += " | "
typeText += entry["type"]
if "dimensions" in entry and entry["dimensions"] > 0:
typeText += "[]" * entry["dimensions"]
firstType = False
return typeText
##
# Helper class for text-level operations
#
class Text(object):
def __init__(self, s):
self.string = s
##
# Remove a fixed number of spaces from the beginning of each line
# in text.
#
# @param indent {Int} number of spaces to remove
#
def outdent(self, indent):
return re.compile("\n\s{%s}" % indent).sub("\n", self.string)
#def indent(self, source, indent):
# return re.compile("\n").sub("\n" + (" " * indent), source)
##
# Insert <indent> at the beginning of each line in text
#
# @param indent {String} string to insert
#
def indent(self, indent):
return re.compile("\n").sub("\n" + indent, self.string)
def autoOutdent(self):
text = self.string
lines = text.split("\n")
if len(lines) <= 1:
return text.strip()
for line in lines:
if len(line) > 0 and line[0] != " ":
return text
result = ""
for line in lines:
if len(line) >= 0:
result += line[1:]
result += "\n"
return result
# -- Helper functions working on tree nodes ------------------------------------
def hasThrows(node):
if node.type == "throw":
return True
if node.hasChildren():
for child in node.children:
if hasThrows(child):
return True
return False
def getReturns(node, found):
if node.type == "function":
pass
elif node.type == "return":
if node.getChildrenLength(True) > 0:
val = "var"
else:
val = "void"
if node.hasChild("expression"):
expr = node.getChild("expression")
if expr.hasChild("variable"):
var = expr.getChild("variable")
if var.getChildrenLength(True) == 1 and var.hasChild("identifier"):
val = nameToType(var.getChild("identifier").get("name"))
else:
val = "var"
elif expr.hasChild("constant"):
val = expr.getChild("constant").get("constantType")
if val == "number":
val = expr.getChild("constant").get("detail")
elif expr.hasChild("array"):
val = "Array"
elif expr.hasChild("map"):
val = "Map"
elif expr.hasChild("function"):
val = "Function"
elif expr.hasChild("call"):
val = "var"
if not val in found:
found.append(val)
elif node.hasChildren():
for child in node.children:
getReturns(child, found)
return found
def findComment(node):
def findCommentBefore(node):
while node:
if node.hasChild("commentsBefore"):
for comment in node.getChild("commentsBefore").children:
if comment.get("detail") in ["javadoc", "qtdoc"]:
comments = parseNode(node)
return comments
if node.hasParent():
node = node.parent
else:
return None
def findCommentAfter(node):
while node:
if node.hasChild("commentsBefore"):
for comment in node.getChild("commentsBefore").children:
if comment.get("detail") in ["javadoc", "qtdoc"]:
comments = parseNode(node)
return comments
if node.hasChildren():
node = node.children[0]
else:
return None
if node.type == "file":
return findCommentAfter(node)
else:
return findCommentBefore(node)
def parseNode(node):
"""Takes the last doc comment from the commentsBefore child, parses it and
returns a Node representing the doc comment"""
# Find the last doc comment
commentsBefore = node.getChild("commentsBefore", False)
if commentsBefore and commentsBefore.hasChildren():
for child in commentsBefore.children:
if child.type == "comment" and child.get("detail") in ["javadoc", "qtdoc"]:
return Comment(child.get("text")).parse()
return []
##
# fill(node) -- look for function definitions in the tree represented by <node>,
# look for their corresponding comment and amend it, or create it in the first
# place
#
def fill(node):
if node.type in ["comment", "commentsBefore", "commentsAfter"]:
return
if node.hasParent():
target = node
if node.type == "function":
name = node.get("name", False)
else:
name = ""
alternative = False
assignType = None
if name != None:
assignType = "function"
# move to hook operation
while target.parent.type in ["first", "second", "third"] and target.parent.parent.type == "operation" and target.parent.parent.get("operator") == "HOOK":
alternative = True
target = target.parent.parent
# move comment to assignment
while target.parent.type == "right" and target.parent.parent.type == "assignment":
target = target.parent.parent
if target.hasChild("left"):
left = target.getChild("left")
if left and left.hasChild("variable"):
var = left.getChild("variable")
last = var.getLastChild(False, True)
if last and last.type == "identifier":
name = last.get("name")
assignType = "object"
for child in var.children:
if child.type == "identifier":
if child.get("name") in ["prototype", "Proto"]:
assignType = "member"
elif child.get("name") in ["class", "base", "Class"]:
assignType = "static"
elif target.parent.type == "definition":
name = target.parent.get("identifier")
assignType = "definition"
# move to definition
if target.parent.type == "assignment" and target.parent.parent.type == "definition" and target.parent.parent.parent.getChildrenLength(True) == 1:
target = target.parent.parent.parent
assignType = "function"
# move comment to keyvalue
if target.parent.type == "value" and target.parent.parent.type == "keyvalue":
target = target.parent.parent
name = target.get("key")
assignType = "map"
if name == "construct":
assignType = "constructor"
if target.parent.type == "map" and target.parent.parent.type == "value" and target.parent.parent.parent.type == "keyvalue":
paname = target.parent.parent.parent.get("key")
if paname == "members":
assignType = "member"
elif paname == "statics":
assignType = "static"
# filter stuff, only add comments to member and static values and to all functions
if assignType in ["member", "static"] and node.type == "function":
if not hasattr(target, "documentationAdded") and target.parent.type != "params":
old = []
commentNode = None
# create commentsBefore
if target.hasChild("commentsBefore"):
commentsBefore = target.getChild("commentsBefore")
if commentsBefore.hasChild("comment"):
for child in commentsBefore.children:
if child.get("detail") in ["javadoc", "qtdoc"]:
old = Comment(child.get("text")).parse(False)
commentNode = child
commentNodeIndex = commentsBefore.children.index(child)
break
else:
commentsBefore = tree.Node("commentsBefore")
target.addChild(commentsBefore)
# create comment node
if commentNode == None:
commentNodeIndex = None
commentNode = tree.Node("comment")
commentNode.set("detail", "javadoc")
#if node.type == "function":
# commentNode.set("text", fromFunction(node, assignType, name, alternative, old))
#else:
# commentNode.set("text", fromNode(node, assignType, name, alternative, old))
commentNode.set("text", fromFunction(node, assignType, name, alternative, old))
commentNode.set("multiline", True)
commentsBefore.addChild(commentNode,commentNodeIndex)
# in case of alternative methods, use the first one, ignore the others
target.documentationAdded = True
if node.hasChildren():
for child in node.children:
fill(child)
def fromNode(node, assignType, name, alternative, old=[]):
#
# description
##############################################################
oldDesc = getAttrib(old, "description")
if attribHas(oldDesc, "text"):
newText = oldDesc["text"]
else:
newText = "{var} TODOC"
if "\n" in newText:
s = "/**\n%s\n-*/" % Comment(newText).splitText(False)
else:
s = "/** %s */" % newText
s = s.replace("/** ", "/** ").replace(" */", " */")
#
# other @attributes
##############################################################
for attrib in old:
cat = attrib["category"]
if cat != "description":
print " * Found unallowed attribute %s in comment for %s (node)" % (cat, name)
return s
def fromFunction(func, assignType, name, alternative, old=[]):
#
# open comment
##############################################################
s = "/**\n"
#
# description
##############################################################
oldDesc = getAttrib(old, "description")
if attribHas(oldDesc, "text"):
newText = oldDesc["text"]
else:
newText = "TODOC"
s += Comment(newText).splitText(False)
s += " *\n"
#
# add @type
##############################################################
# TODO: Remove the @type annotation as it conflicts with JSdoc
# if assignType != None:
# s += " * @type %s\n" % assignType
# else:
# s += " * @type unknown TODOC\n"
#
# add @abstract
##############################################################
oldAbstract = getAttrib(old, "abstract")
first = func.getChild("body").getChild("block").getFirstChild(False, True)
abstract = first and first.type == "throw"
if abstract:
if attribHas(oldAbstract, "text"):
newText = oldDesc["text"]
else:
newText = ""
s += " * @abstract%s" % Comment(newText).splitText()
if not s.endswith("\n"):
s += "\n"
elif oldAbstract:
print " * Removing old @abstract for %s" % name
#
# add @param
##############################################################
params = func.getChild("params")
if params.hasChildren():
for child in params.children:
if child.type == "variable":
newName = child.getChild("identifier").get("name")
newType = newTypeText = nameToType(newName)
newDefault = ""
newText = nameToDescription(newName)
oldParam = getParam(old, newName)
# Get type and text from old content
if oldParam:
if attribHas(oldParam, "type"):
newTypeText = Comment.parseType(oldParam["type"])
if attribHas(oldParam, "defaultValue"):
newDefault = " ? %s" % oldParam["defaultValue"]
if attribHas(oldParam, "text"):
newText = oldParam["text"].strip()
s += " * @param %s {%s%s}%s" % (newName, newTypeText, newDefault, Comment(newText).splitText())
if not s.endswith("\n"):
s += "\n"
#
# add @return
##############################################################
if name != "construct":
oldReturn = getAttrib(old, "return")
newType = "void"
newText = ""
# Get type and text from old content
if oldReturn:
if attribHas(oldReturn, "type"):
newType = Comment.parseType(oldReturn["type"])
if attribHas(oldReturn, "text"):
newText = oldReturn["text"].strip()
# Try to autodetect the type
if newType == "void":
returns = getReturns(func.getChild("body"), [])
if len(returns) > 0:
newType = " | ".join(returns)
elif name != None and name[:2] == "is" and name[3].isupper():
newType = "Boolean"
# Add documentation hint in non void cases
if newType != "void":
if newText == "":
newText = "TODOC"
s += " * @return {%s}%s" % (newType, Comment(newText).splitText())
if not s.endswith("\n"):
s += "\n"
#
# add @throws
##############################################################
oldThrows = getAttrib(old, "throws")
if hasThrows(func):
if oldThrows and attribHas(oldThrows, "text"):
newText = oldThrows["text"]
elif abstract:
newText = "the abstract function warning."
else:
newText = "TODOC"
s += " * @throws%s" % Comment(newText).splitText()
if not s.endswith("\n"):
s += "\n"
elif oldThrows:
print " * Removing old @throw attribute in comment for %s" % name
#
# other @attributes
##############################################################
for attrib in old:
cat = attrib["category"]
if cat in ["see", "author", "deprecated", "exception", "since", "version", "abstract", "overridden", "lint"]:
s += " * @%s" % cat
if cat == "see":
if attribHas(attrib, "name"):
s += Comment(attrib["name"]).splitText()
elif attribHas(attrib, "text"):
s += Comment(attrib["text"]).splitText()
if not s.endswith("\n"):
s += "\n"
elif not cat in ["description", "type", "abstract", "param", "return", "throws", "link", "internal", "signature"]:
print " * Found unallowed attribute %s in comment for %s (function)" % (cat, name)
#
# close comment
##############################################################
s += " */"
return s
| gpl-3.0 | 1,810,750,978,431,360,500 | 27.27376 | 161 | 0.483796 | false |
TPopovich/mongo-connector | mongo_connector/oplog_manager.py | 1 | 31232 | # Copyright 2013-2014 MongoDB, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tails the oplog of a shard and returns entries
"""
import bson
import logging
try:
import Queue as queue
except ImportError:
import queue
import pymongo
import sys
import time
import threading
import traceback
from mongo_connector import errors, util
from mongo_connector.constants import DEFAULT_BATCH_SIZE
from mongo_connector.util import retry_until_ok
from pymongo import MongoClient
class OplogThread(threading.Thread):
"""OplogThread gathers the updates for a single oplog.
"""
def __init__(self, primary_conn, main_address, oplog_coll, is_sharded,
doc_manager, oplog_progress_dict, namespace_set, auth_key,
auth_username, repl_set=None, collection_dump=True,
batch_size=DEFAULT_BATCH_SIZE, fields=None,
dest_mapping={},
namespace_set_to_skip=[],
db_to_skip=[]): # additional db to skip, as we always skip "config" and "local"
"""Initialize the oplog thread.
"""
super(OplogThread, self).__init__()
self.batch_size = batch_size
#The connection to the primary for this replicaSet.
self.primary_connection = primary_conn
#Boolean chooses whether to dump the entire collection if no timestamp
# is present in the config file
self.collection_dump = collection_dump
#The mongos for sharded setups
#Otherwise the same as primary_connection.
#The value is set later on.
self.main_connection = None
#The connection to the oplog collection
self.oplog = oplog_coll
#Boolean describing whether the cluster is sharded or not
self.is_sharded = is_sharded
#A document manager for each target system.
#These are the same for all threads.
if type(doc_manager) == list:
self.doc_managers = doc_manager
else:
self.doc_managers = [doc_manager]
#Boolean describing whether or not the thread is running.
self.running = True
#Stores the timestamp of the last oplog entry read.
self.checkpoint = None
#A dictionary that stores OplogThread/timestamp pairs.
#Represents the last checkpoint for a OplogThread.
self.oplog_progress = oplog_progress_dict
#The set of namespaces to process from the mongo cluster.
self.namespace_set = namespace_set
#The set of namespaces to not process from the mongo cluster.
self.namespace_set_to_skip = namespace_set_to_skip
self.db_to_skip = set(db_to_skip) if db_to_skip else set([])
self.db_to_skip.add("config") # always skip "config" and "local"
self.db_to_skip.add("local")
#The dict of source namespaces to destination namespaces
self.dest_mapping = dest_mapping
#If authentication is used, this is an admin password.
self.auth_key = auth_key
#This is the username used for authentication.
self.auth_username = auth_username
# Set of fields to export
self._fields = set(fields) if fields else None
logging.info('OplogThread: Initializing oplog thread')
if is_sharded:
self.main_connection = MongoClient(main_address)
else:
self.main_connection = MongoClient(main_address,
replicaSet=repl_set)
self.oplog = self.main_connection['local']['oplog.rs']
if auth_key is not None:
#Authenticate for the whole system
self.primary_connection['admin'].authenticate(
auth_username, auth_key)
self.main_connection['admin'].authenticate(
auth_username, auth_key)
if not self.oplog.find_one():
err_msg = 'OplogThread: No oplog for thread:'
logging.warning('%s %s' % (err_msg, self.primary_connection))
@property
def fields(self):
return self._fields
@fields.setter
def fields(self, value):
if value:
self._fields = set(value)
# Always include _id field
self._fields.add('_id')
else:
self._fields = None
def run(self):
"""Start the oplog worker.
"""
logging.debug("OplogThread: Run thread started")
while self.running is True:
logging.debug("OplogThread: Getting cursor")
cursor = self.init_cursor()
logging.debug("OplogThread: Got the cursor, go go go!")
# we've fallen too far behind
if cursor is None and self.checkpoint is not None:
err_msg = "OplogThread: Last entry no longer in oplog"
effect = "cannot recover!"
logging.error('%s %s %s' % (err_msg, effect, self.oplog))
self.running = False
continue
#The only entry is the last one we processed
if cursor is None or util.retry_until_ok(cursor.count) == 1:
logging.debug("OplogThread: Last entry is the one we "
"already processed. Up to date. Sleeping.")
time.sleep(1)
continue
last_ts = None
err = False
remove_inc = 0
upsert_inc = 0
try:
logging.debug("OplogThread: about to process new oplog "
"entries")
while cursor.alive and self.running:
logging.debug("OplogThread: Cursor is still"
" alive and thread is still running.")
for n, entry in enumerate(cursor):
logging.debug("OplogThread: Iterating through cursor,"
" document number in this cursor is %d"
% n)
# Break out if this thread should stop
if not self.running:
break
# Don't replicate entries resulting from chunk moves
if entry.get("fromMigrate"):
continue
# Take fields out of the oplog entry that
# shouldn't be replicated. This may nullify
# the document if there's nothing to do.
if not self.filter_oplog_entry(entry):
continue
#sync the current oplog operation
operation = entry['op']
ns = entry['ns']
# use namespace mapping if one exists
ns = self.dest_mapping.get(entry['ns'], ns)
for docman in self.doc_managers:
try:
logging.debug("OplogThread: Operation for this "
"entry is %s" % str(operation))
# Remove
if operation == 'd':
entry['_id'] = entry['o']['_id']
docman.remove(entry)
remove_inc += 1
# Insert
elif operation == 'i': # Insert
# Retrieve inserted document from
# 'o' field in oplog record
doc = entry.get('o')
# Extract timestamp and namespace
doc['_ts'] = util.bson_ts_to_long(
entry['ts'])
doc['ns'] = ns
docman.upsert(doc)
upsert_inc += 1
# Update
elif operation == 'u':
doc = {"_id": entry['o2']['_id'],
"_ts": util.bson_ts_to_long(
entry['ts']),
"ns": ns}
# 'o' field contains the update spec
docman.update(doc, entry.get('o', {}))
update_inc += 1
except errors.OperationFailed:
logging.exception(
"Unable to process oplog document %r"
% entry)
except errors.ConnectionFailed:
logging.exception(
"Connection failed while processing oplog "
"document %r" % entry)
if (remove_inc + upsert_inc + update_inc) % 1000 == 0:
logging.debug(
"OplogThread: Documents removed: %d, "
"inserted: %d, updated: %d so far" % (
remove_inc, upsert_inc, update_inc))
logging.debug("OplogThread: Doc is processed.")
last_ts = entry['ts']
# update timestamp per batch size
# n % -1 (default for self.batch_size) == 0 for all n
if n % self.batch_size == 1 and last_ts is not None:
self.checkpoint = last_ts
self.update_checkpoint()
# update timestamp after running through oplog
if last_ts is not None:
logging.debug("OplogThread: updating checkpoint after"
"processing new oplog entries")
self.checkpoint = last_ts
self.update_checkpoint()
except (pymongo.errors.AutoReconnect,
pymongo.errors.OperationFailure,
pymongo.errors.ConfigurationError):
logging.exception(
"Cursor closed due to an exception. "
"Will attempt to reconnect.")
err = True
if err is True and self.auth_key is not None:
self.primary_connection['admin'].authenticate(
self.auth_username, self.auth_key)
self.main_connection['admin'].authenticate(
self.auth_username, self.auth_key)
err = False
# update timestamp before attempting to reconnect to MongoDB,
# after being join()'ed, or if the cursor closes
if last_ts is not None:
logging.debug("OplogThread: updating checkpoint after an "
"Exception, cursor closing, or join() on this"
"thread.")
self.checkpoint = last_ts
self.update_checkpoint()
logging.debug("OplogThread: Sleeping. Documents removed: %d, "
"upserted: %d, updated: %d"
% (remove_inc, upsert_inc, update_inc))
time.sleep(2)
def join(self):
"""Stop this thread from managing the oplog.
"""
logging.debug("OplogThread: exiting due to join call.")
self.running = False
threading.Thread.join(self)
def filter_oplog_entry(self, entry):
"""Remove fields from an oplog entry that should not be replicated."""
if not self._fields:
return entry
def pop_excluded_fields(doc):
for key in set(doc) - self._fields:
doc.pop(key)
# 'i' indicates an insert. 'o' field is the doc to be inserted.
if entry['op'] == 'i':
pop_excluded_fields(entry['o'])
# 'u' indicates an update. 'o' field is the update spec.
elif entry['op'] == 'u':
pop_excluded_fields(entry['o'].get("$set", {}))
pop_excluded_fields(entry['o'].get("$unset", {}))
# not allowed to have empty $set/$unset, so remove if empty
if "$set" in entry['o'] and not entry['o']['$set']:
entry['o'].pop("$set")
if "$unset" in entry['o'] and not entry['o']['$unset']:
entry['o'].pop("$unset")
if not entry['o']:
return None
return entry
def get_oplog_cursor(self, timestamp):
"""Move cursor to the proper place in the oplog.
"""
logging.debug("OplogThread: Getting the oplog cursor and moving it "
"to the proper place in the oplog.")
if timestamp is None:
return None
cursor, cursor_len = None, 0
while (True):
try:
logging.debug("OplogThread: Getting the oplog cursor "
"in the while true loop for get_oplog_cursor")
if not self.namespace_set_to_skip:
if not self.namespace_set:
cursor = self.oplog.find(
{'ts': {'$gte': timestamp}},
tailable=True, await_data=True
)
else:
cursor = self.oplog.find(
{'ts': {'$gte': timestamp},
'ns': {'$in': self.namespace_set}},
tailable=True, await_data=True
)
else: ## user wants to skip
if not self.namespace_set:
cursor = self.oplog.find(
{'ts': {'$gte': timestamp},
'ns': {'$nin': self.namespace_set_to_skip}},
tailable=True, await_data=True
)
else: # here user might have both ns must be in namespace_set and namespace_set_to_skip, just use namespace_set
cursor = self.oplog.find(
{'ts': {'$gte': timestamp},
'ns': {'$in': self.namespace_set}},
tailable=True, await_data=True
)
# Applying 8 as the mask to the cursor enables OplogReplay
cursor.add_option(8)
logging.debug("OplogThread: Cursor created, getting a count.")
cursor_len = cursor.count()
logging.debug("OplogThread: Count is %d" % cursor_len)
break
except (pymongo.errors.AutoReconnect,
pymongo.errors.OperationFailure,
pymongo.errors.ConfigurationError):
pass
if cursor_len == 0:
logging.debug("OplogThread: Initiating rollback from "
"get_oplog_cursor")
#rollback, we are past the last element in the oplog
timestamp = self.rollback()
logging.info('Finished rollback')
return self.get_oplog_cursor(timestamp)
first_oplog_entry = retry_until_ok(lambda: cursor[0])
cursor_ts_long = util.bson_ts_to_long(first_oplog_entry.get("ts"))
given_ts_long = util.bson_ts_to_long(timestamp)
if cursor_ts_long > given_ts_long:
# first entry in oplog is beyond timestamp, we've fallen behind!
return None
elif cursor_len == 1: # means we are the end of the oplog
self.checkpoint = timestamp
#to commit new TS after rollbacks
return cursor
elif cursor_len > 1:
doc = retry_until_ok(next, cursor)
if timestamp == doc['ts']:
return cursor
else: # error condition
logging.error('OplogThread: %s Bad timestamp in config file'
% self.oplog)
return None
def dump_collection(self):
"""Dumps collection into the target system.
This method is called when we're initializing the cursor and have no
configs i.e. when we're starting for the first time.
"""
dump_set = self.namespace_set or []
logging.debug("OplogThread: Dumping set of collections %s " % dump_set)
#no namespaces specified
if not self.namespace_set:
db_list = retry_until_ok(self.main_connection.database_names)
for database in db_list:
if database in self.db_to_skip: # skip "config" , "local" and any other db specified by user to skip
continue
coll_list = retry_until_ok(
self.main_connection[database].collection_names)
for coll in coll_list:
if coll.startswith("system"):
continue
namespace = "%s.%s" % (database, coll)
dump_set.append(namespace)
timestamp = util.retry_until_ok(self.get_last_oplog_timestamp)
if timestamp is None:
return None
long_ts = util.bson_ts_to_long(timestamp)
def docs_to_dump():
for namespace in dump_set:
logging.info("OplogThread: dumping collection %s"
% namespace)
database, coll = namespace.split('.', 1)
last_id = None
attempts = 0
sleep_sec = 1
# Loop to handle possible AutoReconnect
while attempts < 60:
target_coll = self.main_connection[database][coll]
if not last_id:
cursor = util.retry_until_ok(
target_coll.find,
fields=self._fields,
sort=[("_id", pymongo.ASCENDING)]
)
else:
cursor = util.retry_until_ok(
target_coll.find,
{"_id": {"$gt": last_id}},
fields=self._fields,
sort=[("_id", pymongo.ASCENDING)]
)
try:
for doc in cursor:
if not self.running:
raise StopIteration
doc["ns"] = self.dest_mapping.get(
namespace, namespace)
doc["_ts"] = long_ts
last_id = doc["_id"]
yield doc
break
except pymongo.errors.AutoReconnect:
attempts += 1
if (attempts%10 == 0): sleep_sec <<= 1 ## maybe we want some exponential backoff ???
time.sleep(sleep_sec)
# Extra threads (if any) that assist with collection dumps
dumping_threads = []
# Did the dump succeed for all target systems?
dump_success = True
# Holds any exceptions we can't recover from
errors = queue.Queue()
try:
for dm in self.doc_managers:
# Bulk upsert if possible
if hasattr(dm, "bulk_upsert"):
logging.debug("OplogThread: Using bulk upsert function for"
"collection dump")
# Slight performance gain breaking dump into separate
# threads, only if > 1 replication target
if len(self.doc_managers) == 1:
dm.bulk_upsert(docs_to_dump())
else:
def do_dump(error_queue):
all_docs = docs_to_dump()
try:
dm.bulk_upsert(all_docs)
except Exception:
# Likely exceptions:
# pymongo.errors.OperationFailure,
# mongo_connector.errors.ConnectionFailed
# mongo_connector.errors.OperationFailed
error_queue.put(sys.exc_info())
t = threading.Thread(target=do_dump, args=(errors,))
dumping_threads.append(t)
t.start()
else:
logging.debug("OplogThread: DocManager %s has not"
"bulk_upsert method. Upserting documents "
"serially for collection dump." % str(dm))
num = 0
for num, doc in enumerate(docs_to_dump()):
if num % 10000 == 0:
logging.debug("Upserted %d docs." % num)
dm.upsert(doc)
logging.debug("Upserted %d docs" % num)
# cleanup
for t in dumping_threads:
t.join()
except Exception:
# See "likely exceptions" comment above
errors.put(sys.exc_info())
# Print caught exceptions
try:
while True:
klass, value, trace = errors.get_nowait()
dump_success = False
traceback.print_exception(klass, value, trace)
except queue.Empty:
pass
if not dump_success:
err_msg = "OplogThread: Failed during dump collection"
effect = "cannot recover!"
logging.error('%s %s %s' % (err_msg, effect, self.oplog))
self.running = False
return None
return timestamp
def get_last_oplog_timestamp(self):
"""Return the timestamp of the latest entry in the oplog.
"""
if not self.namespace_set:
curr = self.oplog.find().sort(
'$natural', pymongo.DESCENDING
).limit(1)
else:
curr = self.oplog.find(
{'ns': {'$in': self.namespace_set}}
).sort('$natural', pymongo.DESCENDING).limit(1)
if curr.count(with_limit_and_skip=True) == 0:
return None
logging.debug("OplogThread: Last oplog entry has timestamp %d."
% curr[0]['ts'].time)
return curr[0]['ts']
def init_cursor(self):
"""Position the cursor appropriately.
The cursor is set to either the beginning of the oplog, or
wherever it was last left off.
"""
logging.debug("OplogThread: Initializing the oplog cursor.")
timestamp = self.read_last_checkpoint()
if timestamp is None and self.collection_dump:
timestamp = self.dump_collection()
if timestamp:
msg = "Dumped collection into target system"
logging.info('OplogThread: %s %s'
% (self.oplog, msg))
elif timestamp is None:
# set timestamp to top of oplog
timestamp = retry_until_ok(self.get_last_oplog_timestamp)
self.checkpoint = timestamp
cursor = self.get_oplog_cursor(timestamp)
if cursor is not None:
self.update_checkpoint()
return cursor
def update_checkpoint(self):
"""Store the current checkpoint in the oplog progress dictionary.
"""
with self.oplog_progress as oplog_prog:
oplog_dict = oplog_prog.get_dict()
oplog_dict[str(self.oplog)] = self.checkpoint
logging.debug("OplogThread: oplog checkpoint updated to %s" %
str(self.checkpoint))
def read_last_checkpoint(self):
"""Read the last checkpoint from the oplog progress dictionary.
"""
oplog_str = str(self.oplog)
ret_val = None
with self.oplog_progress as oplog_prog:
oplog_dict = oplog_prog.get_dict()
if oplog_str in oplog_dict.keys():
ret_val = oplog_dict[oplog_str]
logging.debug("OplogThread: reading last checkpoint as %s " %
str(ret_val))
return ret_val
def rollback(self):
"""Rollback target system to consistent state.
The strategy is to find the latest timestamp in the target system and
the largest timestamp in the oplog less than the latest target system
timestamp. This defines the rollback window and we just roll these
back until the oplog and target system are in consistent states.
"""
# Find the most recently inserted document in each target system
logging.debug("OplogThread: Initiating rollback sequence to bring "
"system into a consistent state.")
last_docs = []
for dm in self.doc_managers:
dm.commit()
last_docs.append(dm.get_last_doc())
# Of these documents, which is the most recent?
last_inserted_doc = max(last_docs,
key=lambda x: x["_ts"] if x else float("-inf"))
# Nothing has been replicated. No need to rollback target systems
if last_inserted_doc is None:
return None
# Find the oplog entry that touched the most recent document.
# We'll use this to figure where to pick up the oplog later.
target_ts = util.long_to_bson_ts(last_inserted_doc['_ts'])
last_oplog_entry = util.retry_until_ok(
self.oplog.find_one,
{'ts': {'$lte': target_ts}},
sort=[('$natural', pymongo.DESCENDING)]
)
logging.debug("OplogThread: last oplog entry is %s"
% str(last_oplog_entry))
# The oplog entry for the most recent document doesn't exist anymore.
# If we've fallen behind in the oplog, this will be caught later
if last_oplog_entry is None:
return None
# rollback_cutoff_ts happened *before* the rollback
rollback_cutoff_ts = last_oplog_entry['ts']
start_ts = util.bson_ts_to_long(rollback_cutoff_ts)
# timestamp of the most recent document on any target system
end_ts = last_inserted_doc['_ts']
for dm in self.doc_managers:
rollback_set = {} # this is a dictionary of ns:list of docs
# group potentially conflicted documents by namespace
for doc in dm.search(start_ts, end_ts):
if doc['ns'] in rollback_set:
rollback_set[doc['ns']].append(doc)
else:
rollback_set[doc['ns']] = [doc]
# retrieve these documents from MongoDB, either updating
# or removing them in each target system
for namespace, doc_list in rollback_set.items():
# Get the original namespace
original_namespace = namespace
for source_name, dest_name in self.dest_mapping.items():
if dest_name == namespace:
original_namespace = source_name
database, coll = original_namespace.split('.', 1)
obj_id = bson.objectid.ObjectId
bson_obj_id_list = [obj_id(doc['_id']) for doc in doc_list]
to_update = util.retry_until_ok(
self.main_connection[database][coll].find,
{'_id': {'$in': bson_obj_id_list}},
fields=self._fields
)
#doc list are docs in target system, to_update are
#docs in mongo
doc_hash = {} # hash by _id
for doc in doc_list:
doc_hash[bson.objectid.ObjectId(doc['_id'])] = doc
to_index = []
def collect_existing_docs():
for doc in to_update:
if doc['_id'] in doc_hash:
del doc_hash[doc['_id']]
to_index.append(doc)
retry_until_ok(collect_existing_docs)
#delete the inconsistent documents
logging.debug("OplogThread: Rollback, removing inconsistent "
"docs.")
remov_inc = 0
for doc in doc_hash.values():
try:
dm.remove(doc)
remov_inc += 1
logging.debug("OplogThread: Rollback, removed %s " %
str(doc))
except errors.OperationFailed:
logging.warning(
"Could not delete document during rollback: %s "
"This can happen if this document was already "
"removed by another rollback happening at the "
"same time." % str(doc)
)
logging.debug("OplogThread: Rollback, removed %d docs." %
remov_inc)
#insert the ones from mongo
logging.debug("OplogThread: Rollback, inserting documents "
"from mongo.")
insert_inc = 0
fail_insert_inc = 0
for doc in to_index:
doc['_ts'] = util.bson_ts_to_long(rollback_cutoff_ts)
doc['ns'] = self.dest_mapping.get(namespace, namespace)
try:
insert_inc += 1
dm.upsert(doc)
except errors.OperationFailed as e:
fail_insert_inc += 1
logging.error("OplogThread: Rollback, Unable to "
"insert %s with exception %s"
% (doc, str(e)))
logging.debug("OplogThread: Rollback, Successfully inserted %d "
" documents and failed to insert %d"
" documents. Returning a rollback cutoff time of %s "
% (insert_inc, fail_insert_inc, str(rollback_cutoff_ts)))
return rollback_cutoff_ts
| apache-2.0 | 8,939,902,249,170,891,000 | 41.091644 | 132 | 0.493724 | false |
arauzoliver/uip-iiig2016-prog3 | FinalPC3/noteapp.py | 1 | 1122 | import bottle
import pymongo
import book
"""
Ruta por defecto para el index
"""
@bottle.route('/')
def book_index():
mynames_list = book.find_names()
return bottle.template('index', dict(mynames = mynames_list))
"""
Postea las nuevas entrada para ser insertadas a MongoDB
"""
@bottle.route('/newguest', method='POST')
def insert_newguest():
name = bottle.request.forms.get("name")
email = bottle.request.forms.get("email")
book.insert_name(name,email)
bottle.redirect('/')
"""
Se configura la conexion de datos
"""
"""
Configura una conexion string al servidor local
"""
connection_string = "mongodb://localhost"
"""
Gestiona la conexion entre MongoDB y PyMongo, PyMongo maneja nuestro pool
"""
connection = pymongo.MongoClient(connection_string)
#Now we want to set a context to the names database we created using the mongo interactive shell
"""
Enviamos la base de datos de nombres al shell de mongo
"""
database = connection.names
"""
Se adjunta la data u objeto
"""
book = book.book(database)
bottle.debug(True)
bottle.run(host='localhost', port=8082) | mit | 6,061,514,939,707,263,000 | 20.039216 | 96 | 0.697861 | false |
lhuriguen/tophandball | utils/models.py | 1 | 1767 | import urllib
import json
from decimal import Decimal
from django.db import models
class Marker(models.Model):
"""
Abstract model that provides geocoding for models with address.
"""
address = models.CharField(max_length=200, blank=True,
help_text="Separate address items with commas.")
latitude = models.DecimalField(max_digits=8, decimal_places=6,
null=True, blank=True)
longitude = models.DecimalField(max_digits=9, decimal_places=6,
null=True, blank=True)
class Meta:
abstract = True
def __init__(self, *args, **kwargs):
super(Marker, self).__init__(*args, **kwargs)
self._original_address = self.address
def save(self, *args, **kwargs):
if self._original_address != self.address:
self.latitude, self.longitude = 0, 0
if self.address and (not self.latitude or not self.longitude):
self.latitude, self.longitude = self.geocode(self.address)
# print self.latitude, self.longitude
super(Marker, self).save(*args, **kwargs)
def geocode(self, address):
address = urllib.quote_plus(address.encode('utf-8'))
base_url = "http://maps.googleapis.com/maps/api/geocode/json?"
request = base_url + "address=%s" % address
if self.country:
request += "®ion=%s" % self.country.code
data = json.loads(urllib.urlopen(request).read())
if data['status'] == 'OK':
latitude = data['results'][0]['geometry']['location']['lat']
longitude = data['results'][0]['geometry']['location']['lng']
return Decimal(latitude), Decimal(longitude)
return 0, 0
| mit | 1,176,390,992,221,440,800 | 38.266667 | 79 | 0.594228 | false |
chop-dbhi/django-concerns | concerns/migrations/0004_auto__chg_field_concern_reporter.py | 1 | 4938 | # encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Changing field 'Concern.reporter'
db.alter_column('concerns_concern', 'reporter_id', self.gf('django.db.models.fields.related.ForeignKey')(null=True, to=orm['auth.User']))
def backwards(self, orm):
# Changing field 'Concern.reporter'
db.alter_column('concerns_concern', 'reporter_id', self.gf('django.db.models.fields.related.ForeignKey')(default=None, to=orm['auth.User']))
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'concerns.concern': {
'Meta': {'ordering': "('created',)", 'object_name': 'Concern'},
'comment': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {}),
'document': ('django.db.models.fields.TextField', [], {}),
'headers': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {}),
'reporter': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'reported_concerns'", 'null': 'True', 'to': "orm['auth.User']"}),
'resolution': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'resolver': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'resolved_conerns'", 'null': 'True', 'to': "orm['auth.User']"}),
'status': ('django.db.models.fields.CharField', [], {'default': "'New'", 'max_length': '100'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
}
}
complete_apps = ['concerns']
| bsd-2-clause | -4,626,761,653,977,192,000 | 66.643836 | 182 | 0.560753 | false |
JP-Ellis/django-select2-forms | select2/views.py | 1 | 6241 | import copy
import json
from django.apps import apps
from django.db import models
from django.forms.models import ModelChoiceIterator
from django.http import HttpResponse
from django.utils.encoding import force_text
import logging
logger = logging.getLogger(__name__)
class ViewException(Exception):
pass
class InvalidParameter(ViewException):
pass
class JsonResponse(HttpResponse):
callback = None
def __init__(self, content='', callback=None, content_type="application/json", *args, **kwargs):
if not isinstance(content, str):
content = json.dumps(content)
if callback is not None:
self.callback = callback
if self.callback is not None:
content = u"%s(\n%s\n)" % (self.callback, content)
content_type = "text/javascript"
return super(JsonResponse, self).__init__(
content=content,
content_type=content_type,
*args,
**kwargs)
class Select2View(object):
def __init__(self, app_label, model_name, field_name):
self.app_label = app_label
self.model_name = model_name
self.field_name = field_name
_field = None
def get_field_and_model(self):
model_cls = apps.get_model(self.app_label, self.model_name)
if model_cls is None:
raise ViewException('Model %s.%s does not exist' % (self.app_label, self.model_name))
if self._field is None:
self._field = model_cls._meta.get_field(self.field_name)
return self._field, model_cls
def get_response(self, data, request, **kwargs):
callback = request.GET.get('callback', None)
if callback is None:
response_cls = JsonResponse
else:
response_cls = type('JsonpResponse', (JsonResponse,), {
'callback': callback,
})
return response_cls(data, **kwargs)
def get_data(self, queryset, page=None, page_limit=None):
field, model_cls = self.get_field_and_model()
# Check for the existences of a callable %s_queryset method on the
# model class and use it to filter the Select2 queryset.
#
# This is useful for model inheritance where the limit_choices_to can
# not easily be overriden in child classes.
model_queryset_method = '%s_queryset' % field.name
if callable(getattr(model_cls, model_queryset_method, None)):
queryset = getattr(model_cls, model_queryset_method)(queryset)
formfield = field.formfield()
total_count = None
if page is not None and page_limit is not None:
total_count = queryset.count()
offset = (page - 1) * page_limit
end = offset + page_limit
queryset = queryset[offset:end]
else:
offset = None
formfield.queryset = queryset
iterator = ModelChoiceIterator(formfield)
if offset is None:
total_count = len(iterator)
more = False
else:
paged_count = offset + len(iterator)
more = bool(paged_count < total_count)
data = {
'total': total_count,
'more': more,
'results': [],
}
for value, label in iterator:
if value is u'':
continue
data['results'].append({
'id': value,
'text': label,
})
return data
def init_selection(self, pks, is_multiple=False):
field, model_cls = self.get_field_and_model()
pks = [int(pk) for pk in pks]
queryset = field.queryset.filter(**{
('{}__in'.format(field.rel.get_related_field().name)): pks,
}).distinct()
pk_ordering = dict([(force_text(pk), i) for i, pk in enumerate(pks)])
data = self.get_data(queryset)
# Make sure we return in the same order we were passed
def results_sort_callback(item):
pk = force_text(item['id'])
return pk_ordering[pk]
data['results'] = sorted(data['results'], key=results_sort_callback)
return data['results']
def fetch_items(self, request):
try:
field, model_cls = self.get_field_and_model()
except ViewException as e:
return self.get_response({'error': str(e)}, request, status=500)
queryset = copy.deepcopy(field.queryset)
q = request.GET.get('q', None)
page_limit = request.GET.get('page_limit', 10)
page = request.GET.get('page', 1)
try:
if q is None:
return self.get_response({"results": [], "total": 0, "more": False}, request)
try:
page_limit = int(page_limit)
except TypeError:
raise InvalidParameter("Invalid page_limit '%s' passed" % page_limit)
else:
if page_limit < 1:
raise InvalidParameter("Invalid page_limit '%s' passed" % page_limit)
try:
page = int(page)
except TypeError:
raise InvalidParameter("Invalid page '%s' passed")
else:
if page < 1:
raise InvalidParameter("Invalid page '%s' passed")
except InvalidParameter as e:
return self.get_response({'error': str(e)}, request, status=500)
search_field = field.search_field
if callable(search_field):
search_field = search_field(q)
if isinstance(search_field, models.Q):
q_obj = search_field
else:
qset_contains_filter_key = '%(search_field)s__%(insensitive)scontains' % {
'search_field': search_field,
'insensitive': 'i' if not field.case_sensitive else '',
}
q_obj = models.Q(**{qset_contains_filter_key: q})
queryset = queryset.filter(q_obj)
data = self.get_data(queryset, page, page_limit)
return self.get_response(data, request)
def fetch_items(request, app_label, model_name, field_name):
view_cls = Select2View(app_label, model_name, field_name)
return view_cls.fetch_items(request)
| bsd-2-clause | 2,370,086,864,127,999,500 | 32.553763 | 100 | 0.573145 | false |
bearstech/nuka | nuka/task.py | 1 | 20281 | # Copyright 2017 by Bearstech <[email protected]>
#
# This file is part of nuka.
#
# nuka is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# nuka is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with nuka. If not, see <http://www.gnu.org/licenses/>.
import time
import base64
import codecs
import inspect
import asyncio
import logging
import importlib
import asyncssh.misc
from nuka.remote.task import RemoteTask
from nuka.configuration import config
from nuka import remote
from nuka import utils
from nuka import gpg
import nuka
class Base(asyncio.Future):
def __init__(self, **kwargs):
self.initialize(**kwargs)
super().__init__(loop=self.host.loop)
if self.host.cancelled():
self.cancel()
else:
self.process()
def initialize(self, host=None,
switch_user=None, switch_ssh_user=None, **args):
meta = {'filename': None, 'lineno': None,
'start': time.time(), 'times': [],
'remote_calls': [],
}
for infos in inspect.stack(2):
f = infos.frame
if isinstance(f.f_locals.get('self'), RemoteTask):
continue
if host is None:
host = f.f_locals.get('host')
if switch_user is None:
switch_user = f.f_locals.get('switch_user')
if switch_ssh_user is None:
switch_ssh_user = f.f_locals.get('switch_ssh_user')
if meta['filename'] is None:
filename = infos.filename
if filename.endswith('nuka/task.py'):
filename = 'nuka/task.py'
meta.update(filename=filename,
lineno=infos.lineno)
if host is not None:
break
if host is None: # pragma: no cover
raise RuntimeError('No valid host found in the stack')
self.switch_user = switch_user
self.switch_ssh_user = switch_ssh_user
self.meta = meta
self.host = host
self.loop = self.host.loop
self.args = args
self.res = {'changed': True, 'rc': 0}
self.start = time.time()
self.run_task = None
host.add_task(self)
def running(self):
"""return True if a remote task is running"""
if self.run_task is not None:
return not self.run_task.done()
return False
def process(self, fut=None):
if fut is not None: # pragma: no cover
# we waited for boot
self.meta['start'] = time.time()
start = time.time()
try:
self.pre_process()
except Exception as e:
self.host.log.exception(e)
self.cancel()
raise
else:
duration = time.time() - start
if duration > .05: # pragma: no cover
self.host.add_time(
start=start, time=duration,
type='pre_process', task=self)
self.run_task = self._loop.create_task(self._run())
def pre_process(self):
"""run locally before anything is sent to the host"""
def post_process(self):
"""run when we get a valid reply from the host"""
def render_template(self, fd):
"""render a template from a file descriptor:
.. code-block:: python
{'src': path, 'dst': path}
"""
src = fd['src']
ctx = dict(self.args, **self.args.get('ctx', {}))
ctx.update(host=self.host, env=config, **fd)
engine = config.get_template_engine()
template = engine.get_template(src)
fd['data'] = template.render(ctx)
if 'executable' not in fd:
fd['executable'] = utils.isexecutable(src)
def render_file(self, fd):
"""render a file from a file descriptor. A file descriptor is a dict:
.. code-block:: python
{'src': path, 'dst': path}
"""
src = fd['src']
if src.endswith('.gpg'):
_, data = gpg.decrypt(src, 'utf8')
elif src.endswith(utils.ARCHIVE_EXTS):
with open(src, 'rb',) as fd_:
data = fd_.read()
data = base64.b64encode(data).decode('utf8')
else:
with codecs.open(src, 'r', 'utf8') as fd_:
data = fd_.read()
fd['data'] = data
if 'executable' not in fd:
fd['executable'] = utils.isexecutable(src)
def log(self):
self.host.log.info(self)
def cancel(self):
"""cancel a task"""
if not self.cancelled():
super().cancel()
if not self.res.get('signal') and not self.host.failed():
# do not log cancellation if the user wanted it
self.log()
if self.run_task is not None:
self.run_task.cancel()
self.host.cancel()
async def _run(self):
# wrap the task to catch exception
try:
await self.run()
except Exception as e:
self.cancel()
if not isinstance(e, asyncio.CancelledError):
self.host.log.exception5(self)
# update meta
self.meta.update(self.res.pop('meta', {}))
# if task succeded then run post_process
start = time.time()
try:
self.post_process()
except Exception:
self.cancel()
self.host.log.exception5(self)
finally:
duration = time.time() - start
if duration > .05:
self.host.add_time(
start=start, time=duration,
type='post_process', task=self)
# set result / log stuff
if not self.done():
self.set_result(self)
self.meta.setdefault('time', time.time() - self.meta['start'])
self.host.add_time(type='task', task=self, **self.meta)
# log if not cancelled
if not self.cancelled():
self.log()
def __bool__(self):
return self.res.get('rc') == 0
def __repr__(self):
return '<{0}>'.format(str(self))
def __str__(self):
name = self.__class_name__()
instance_name = self.args.get('name')
if instance_name is None:
instance_name = '-'
s = '{0}({1})'.format(name, instance_name)
if self.res:
if self.res['rc'] == 0:
if self.cancelled():
s += ' cancelled at {filename}:{lineno}'.format(
**self.meta)
elif self.done() and getattr(self, 'changed', True):
s += ' changed'
else:
s += ' fail({0[rc]})'.format(self.res)
time = self.meta.get('local_time')
if time:
s += ' time({0}s)'.format(round(time, 1))
return s.strip()
def __class_name__(self):
klass = self.__class__
name = '{0}.{1}'.format(klass.__module__.split('.')[-1],
klass.__name__)
return name
class Task(Base, RemoteTask):
def process(self):
if self.host.cancelled():
self.cancel()
else:
diff_mode = self.args.get('diff_mode', nuka.cli.args.diff)
if diff_mode:
# ignore diff call if the task do not support it
attr = getattr(self, 'diff', None)
if attr in (None, False):
self.res['changed'] = False
self.meta['local_time'] = 0.
if attr is False:
self.host.log.info("{0}.diff is False".format(self))
else:
self.host.log.warning("{0}.diff is None".format(self))
self.set_result(self)
return
if self.host.fully_booted.done():
super().process()
else:
# use asyncio with callback since we are in a sync __init__
task = self.loop.create_task(wait_for_boot(self.host))
task.add_done_callback(super().process)
async def run(self):
"""Serialize the task, send it to the remote host.
The remote script will deserialize the task and run
:meth:`~nuka.remote.task.Task.do` (or diff() when using --diff)
"""
self.host.log.debug(self)
diff_mode = self.args.get('diff_mode', nuka.cli.args.diff)
klass = self.__class__
args = {}
for k, v in self.args.items():
if k not in ('ctx',):
args[k] = v
# prep stdin
stdin_data = dict(
task=(klass.__module__, klass.__name__),
remote_tmp=config['remote_tmp'],
switch_user=self.switch_user,
args=args,
check_mode=False,
diff_mode=diff_mode,
log_level=config['log']['levels']['remote_level'])
if config['testing'] and 'coverage' in self.host.vars:
# check if we can/want use coverage
cmd = (
'{coverage} run -p '
'--source={remote_dir}/nuka/tasks '
'{script} '
).format(coverage=self.host.vars['coverage'], **config)
else:
# use python
inventory = self.host.vars.get(
'inventory',
{'python': {'executable': 'python'}})
executable = inventory['python'].get('executable', 'python')
cmd = '{0} {script} '.format(executable, **config)
# allow to trac some ids from ps
cmd += '--deploy-id={0} --task-id={1}'.format(config['id'],
id(self))
# create process
proc = await self.host.create_process(
cmd, task=self,
switch_user=self.switch_user,
switch_ssh_user=self.switch_ssh_user)
# send stdin
zlib_avalaible = self.host.inventory['python']['zlib_available']
stdin = utils.proto_dumps_std(
stdin_data, proc.stdin,
content_type=zlib_avalaible and 'zlib' or 'plain')
await proc.stdin.drain()
res = {}
while res.get('message_type') != 'exit':
# wait for messages
try:
res = await proc.next_message()
except asyncio.CancelledError:
raise
except Exception as e:
self.cancel()
self.host.log.exception5(
'{0}\n\n{1}'.format(self, stdin))
else:
if res.get('message_type') == 'log':
self.host.log.log(res['level'], res['msg'])
# finalize
self.res.update(res)
if self.res['rc'] != 0 and not self.ignore_errors:
if not diff_mode:
self.cancel()
def log(self):
log = self.host.log
if 'exc' in self.res:
exc = '\n' + ''.join(self.res['exc'])
log.error('{0}\n{1}'.format(self, exc))
elif self.res.get('stderr'):
if self.res.get('rc') != 0:
log.error('{0}\n{1}\n{2}'.format(
self, self.res.get('stdout', ''), self.res['stderr']))
elif self.res['changed']:
log.changed('{0}\n{1}'.format(self, self.res['stderr']))
else:
data = self.res.get('diff', '')
if data.strip():
data = data.strip()
log.changed('{0} diff=\n{1}\n'.format(self, data))
elif self.cancelled():
log.error(self)
elif self.res['changed']:
log.changed(self)
else:
log.info(self)
for cmd_ in self.meta.get('remote_calls', []):
rtime = round(cmd_['time'], 3)
inf = '^ sh({cmd}) time({rtime})'.format(
rtime=rtime, **cmd_)
if cmd_['exc']:
log.error(inf + '\n' + ''.join(cmd_['exc']))
elif nuka.cli.args.verbose > 1:
log.info(inf)
stds = {k: v for k, v in cmd_.items()
if k in ('stderr', 'stdout') and v}
if stds:
log.debug3('^ ' + str(stds))
data = self.res.get('log')
if data:
level = None
for line in data.rstrip().split('\n'):
line = line.rstrip()
try:
line_level, message = line.split(':', 1)
except ValueError:
if level:
log.log(level, '^ ' + line)
else:
if line_level in logging._nameToLevel:
level = getattr(logging, line_level)
log.log(level, '^ ' + message)
else:
if level:
log.log(level, '^ ' + line)
class SetupTask(Base):
changed = False
def log(self):
self.host.log.debug(self)
def pre_process(self):
self.args['name'] = '' # unamed task
def cancel(self):
super().cancel()
if not self.host.cancelled():
self.host.cancel()
self.host.log.critical(
'Cancelled at {filename}:{lineno}...'.format(**self.meta))
class boot(SetupTask):
"""A task that just call host.boot()"""
def __class_name__(self):
return 'boot'
async def run(self):
# wait for boot async
try:
await self.host.boot()
except Exception:
self.host.log.exception('boot')
self.meta['start'] = self.host._start
class setup(SetupTask):
"""A task that just wait for :class:`~nuka.task.boot` then setup the
host"""
setup_cmd = (
'{0}rm -Rf {2[remote_tmp]}; '
'{0}mkdir -p {2[remote_tmp]} && {0}chmod 777 {2[remote_tmp]} &&'
'{0}mkdir -p {2[remote_dir]} &&'
'dd bs={1} count=1 | {0}tar -xz -C {2[remote_dir]} && '
'{0}`which python 2> /dev/null || which python3 || echo python` '
'{2[script]} --setup'
)
def __class_name__(self):
return 'setup'
async def run(self):
host = self.host
# wait for boot async
await host._named_tasks[boot.__name__]
self.meta['start'] = time.time()
# run bootstrap_command if any
if host.bootstrap_command:
res = await host.run_command(host.bootstrap_command)
if res['rc'] != 0:
self.host.log.error(res)
self.cancel()
return
# setup
sudo = ''
if host.use_sudo:
sudo = '{sudo} '.format(**config)
cmd = self.setup_cmd.format(sudo, '{bytes}', config)
mods = nuka.config['inventory_modules'][:]
mods += self.host.vars.get('inventory_modules', [])
if mods:
cmd += ' ' + ' '.join(['--inventory=' + m for m in mods])
stdin = remote.build_archive(
extra_classes=all_task_classes(),
mode='x:gz')
c = cmd.format(bytes=len(stdin))
host.log.debug('Uploading archive ({0}kb)...'.format(
int(len(stdin) / 1000)))
try:
proc = await self.host.create_process(c, task=self)
proc.stdin.write(stdin)
await proc.stdin.drain()
except (LookupError, OSError, asyncssh.misc.Error) as e:
if isinstance(e, asyncssh.misc.Error):
e = LookupError(str(e), self.host)
self.host.log.error(e.args[0])
self.host.fail(e)
return
res = {}
while res.get('message_type') != 'exit':
# wait for messages
try:
res = await proc.next_message()
except asyncio.CancelledError:
raise
except (LookupError, OSError) as e:
self.host.log.error(e.args[0])
self.host.fail(e)
return
except Exception as e:
self.cancel()
self.host.log.exception5(
'{0}\n\n{1}'.format(self, stdin))
else:
if res.get('message_type') == 'log':
self.host.log.log(res['level'], res['msg'])
self.res.update(res)
if self.res['rc'] != 0:
self.cancel()
host.vars['inventory'] = self.res['inventory']
for name in mods:
mod = importlib.import_module(name)
meth = getattr(mod, 'finalize_inventory', None)
if meth is not None:
meth(host.vars['inventory'])
host.log.debug(
'Inventory:\n{0}'.format(host.vars['inventory']))
if not host.fully_booted.done():
host.fully_booted.set_result(True)
class teardown(SetupTask):
"""remove `remote_dir` from the host"""
teardown_cmd = '{0}rm -Rf {1[remote_dir]}'
def __init__(self, host):
host._cancelled = False
super().__init__(host=host)
def __class_name__(self):
return 'teardown'
async def run(self):
if not self.host.failed():
sudo = self.host.use_sudo and 'sudo ' or ''
cmd = self.teardown_cmd.format(sudo, config)
await self.host.run_command(cmd, task=self)
class destroy(SetupTask):
"""destroy the host"""
def __init__(self, host):
host._cancelled = False
super().__init__(host=host)
def __class_name__(self):
return 'destroy'
async def run(self):
if 'destroyed' not in self.host.vars:
await self.host.destroy()
class wait(Base):
"""A task that wait for a coroutine / event / future:
.. code-block:: python
nuka.wait(do_something(host), event)
You can use a timeout:
.. code-block:: python
nuka.wait(event, timeout=30)
"""
def __class_name__(self):
return 'wait'
def __init__(self, future, *futures, **kwargs):
futures = list(futures)
if not isinstance(future, list):
futures.insert(0, future)
else:
futures[0:0] = future
kwargs['name'] = repr(futures)
kwargs['futures'] = futures
super().__init__(**kwargs)
async def run(self):
futures = self.args['futures']
res = await asyncio.wait_for(asyncio.gather(*futures),
timeout=self.args.get('timeout'))
self.set_result(res)
self.meta.setdefault('time', time.time() - self.meta['start'])
# skip time if we dont wait for a nuka.Event
events = [e for e in res if isinstance(e, nuka.Event)]
if events:
self.host.add_time(type='task', task=self, **self.meta)
async def wait_for_boot(host):
if not host.fully_booted.done():
create_setup_tasks(host)
task = host._named_tasks[setup.__name__]
if not task.done():
await task
def create_setup_tasks(host):
if not host.fully_booted.done():
for task in (boot, setup):
instance = host._named_tasks.get(task.__name__)
if instance is None:
instance = task(host=host)
host._named_tasks[task.__name__] = instance
host._tasks.append(instance)
def get_task_from_stack():
for info in inspect.stack(3):
f = info.frame
self = f.f_locals.get('self')
if isinstance(f.f_locals.get('self'), Base):
return self
def all_task_classes(cls=Task):
for klass in cls.__subclasses__():
yield from all_task_classes(klass)
yield klass
| gpl-3.0 | 3,238,277,962,347,360,000 | 31.346093 | 78 | 0.506139 | false |
sevaivanov/ring-api | tests/rest-api/0.1/unit-tests.py | 1 | 18477 | #!/usr/bin/env python3
import unittest
import requests
import json
API_URL = 'http://127.0.0.1:8080/api/v1'
def print_json(data):
print(json.dumps(data, sort_keys=True, indent=4))
class TestAccount(unittest.TestCase):
def test_accounts_get(self):
print("\nGET /accounts/")
res = requests.get(API_URL + '/accounts/')
res = res.json()
self.assertTrue('status' in res)
self.assertEqual(res['status'], 200)
def test_account_get(self):
print("\nGET /account/")
res = requests.get(
API_URL + '/account/',
params={'type': 'SIP'}
)
res = res.json()
self.assertTrue('status' in res)
self.assertEqual(res['status'], 200)
self.assertTrue('details' in res)
details = res['details']
self.assertTrue('Account.type' in details)
self.assertEqual(details['Account.type'], 'SIP')
res = requests.get(
API_URL + '/account/',
params={'type': 'RING'}
)
res = res.json()
self.assertTrue('status' in res)
self.assertEqual(res['status'], 200)
self.assertTrue('details' in res)
details = res['details']
self.assertTrue('Account.type' in details)
self.assertEqual(details['Account.type'], 'RING')
res = requests.get(
API_URL + '/account/',
params={'type': 'stuff'}
)
res = res.json()
self.assertTrue('status' in res)
self.assertEqual(res['status'], 400)
self.assertFalse('details' in res)
def test_account_post(self):
print("\nPOST /account/")
req = requests.get(API_URL + '/account/?type=RING')
req = req.json()
req['details']['Account.alias'] = "Unittest"
res = requests.post(
"http://localhost:8080/account/",
data=json.dumps(req)
)
res = res.json()
self.assertTrue('account_id' in res)
self.assertTrue('status' in res)
self.test_RING_account = res['account_id']
def test_account_details_get(self):
print("\nGET /accounts/<account_id>/details")
res = requests.get(API_URL + '/accounts/')
res = res.json()
accounts = res['accounts']
for account in accounts:
res = requests.get(
API_URL + '/accounts/' + account + '/details/',
params={'type': 'default'}
)
res = res.json()
self.assertEqual(res['status'], 200)
self.assertTrue('details' in res)
for account in accounts:
res = requests.get(
API_URL + '/accounts/' + account + '/details/',
params={'type' : 'volatile'}
)
res = res.json()
self.assertEqual(res['status'], 200)
self.assertTrue('details' in res)
def test_account_details_put(self):
print("\nPUT /accounts/<account_id>/details")
res = requests.get(API_URL + '/accounts/')
res = res.json()
accounts = res['accounts']
for account in accounts:
res = requests.get(
API_URL + '/accounts/' + account + '/details/',
params={'type': 'default'}
)
res = res.json()
self.assertEqual(res['status'], 200)
details = res['details']
res = requests.put(
API_URL + '/accounts/' + account + '/details/',
data=json.dumps({'details': details})
)
res = res.json()
self.assertEqual(res['status'], 200)
def test_account_delete(self):
print("\nDELETE /accounts/<account_id>")
res = requests.get(API_URL + '/accounts/')
res = res.json()
accounts = res['accounts']
for account in accounts:
res = requests.get(
API_URL + '/accounts/' + account + '/details/',
params={'type': 'default'}
)
res = res.json()
self.assertEqual(res['status'], 200)
if (res['details']['Account.alias'] == "Unittest"):
res = requests.delete(
API_URL + '/accounts/' + account + '/'
)
res = res.json()
self.assertEqual(res['status'], 200)
def test_account_ciphers_get(self):
print("\nGET /accounts/<account_id>/ciphers/")
res = requests.get(API_URL + '/accounts/')
res = res.json()
accounts = res['accounts']
for account in accounts:
res = requests.get(
API_URL + '/accounts/' + account + '/ciphers/'
)
res = res.json()
self.assertEqual(res['status'], 200)
self.assertTrue('ciphers' in res)
def test_account_codecs_get(self):
print("\nGET /accounts/<account_id>/codecs/")
res = requests.get(API_URL + '/accounts/')
res = res.json()
accounts = res['accounts']
for account in accounts:
res = requests.get(
API_URL + '/accounts/' + account + '/codecs/'
)
res = res.json()
self.assertEqual(res['status'], 200)
self.assertTrue('codecs' in res)
def test_account_codecs_put(self):
print("\nPUT /accounts/<account_id>/codecs/")
res = requests.get(API_URL + '/accounts/')
res = res.json()
accounts = res['accounts']
for account in accounts:
res = requests.get(
API_URL + '/accounts/' + account + '/codecs/'
)
res = res.json()
self.assertEqual(res['status'], 200)
self.assertTrue('codecs' in res)
codecs = res['codecs']
res = requests.put(
API_URL + '/accounts/' + account + '/codecs/',
data=json.dumps({'codecs': codecs})
)
res = res.json()
self.assertEqual(res['status'], 200)
self.assertTrue('codecs' in res)
def test_account_codec_details_get(self):
print("\nGET /accounts/<account_id>/codecs/<codec_id>")
res = requests.get(API_URL + '/accounts/')
res = res.json()
accounts = res['accounts']
for account in accounts:
res = requests.get(
API_URL + '/accounts/' + account + '/codecs/'
)
res = res.json()
self.assertEqual(res['status'], 200)
codecs = res['codecs']
for codec in codecs:
res = requests.get(
API_URL + '/accounts/' +
account +
'/codecs/' +
str(codec) + '/'
)
res = res.json()
self.assertEqual(res['status'], 200)
self.assertTrue('details' in res)
def test_account_codec_details_put(self):
print("\nPUT /accounts/<account_id>/codecs/<codec_id>")
res = requests.get(API_URL + '/accounts/')
res = res.json()
accounts = res['accounts']
for account in accounts:
res = requests.get(
API_URL + '/accounts/' + account + '/codecs/'
)
res = res.json()
self.assertEqual(res['status'], 200)
codecs = res['codecs']
for codec in codecs:
res = requests.get(
API_URL + '/accounts/' +
account +
'/codecs/' +
str(codec) + '/'
)
res = res.json()
self.assertEqual(res['status'], 200)
details = res['details']
res = requests.get(
API_URL + '/accounts/' +
account +
'/codecs/' +
str(codec) + '/',
data=json.dumps({'details': details})
)
res = res.json()
self.assertEqual(res['status'], 200)
self.assertTrue('details' in res)
def test_account_certificates_get(self):
print("\nGET /accounts/<account_id>/certificates/<cert_id>")
res = requests.get(API_URL + '/accounts/')
res = res.json()
accounts = []
for account in res['accounts']:
res = requests.get(
API_URL + '/accounts/' + account + '/details/',
{'type': 'default'}
)
res = res.json()
self.assertEqual(res['status'], 200)
if (res['details']['Account.alias'] == "Unittest"):
res = requests.get(
API_URL + '/accounts/' +
account +
'/certificates/fa5c04850341c00ba074518db52ee6745bb49bc1/',
params={'action': 'pin'}
)
res = res.json()
self.assertEqual(res['status'], 200)
self.assertEqual(res['success'], True)
res = requests.get(
API_URL + '/accounts/' +
account +
'/certificates/fa5c04850341c00ba074518db52ee6745bb49bc1/',
params={'action': 'validate'}
)
res = res.json()
self.assertEqual(res['status'], 200)
self.assertTrue('certificates' in res)
def test_account_certificates_put(self):
print("\nPUT /accounts/<account_id>/certificates/<cert_id>")
res = requests.get(API_URL + '/accounts/')
res = res.json()
accounts = []
for account in res['accounts']:
res = requests.get(
API_URL + '/accounts/' + account + '/details/',
params={'type': 'default'}
)
res = res.json()
self.assertEqual(res['status'], 200)
if (res['details']['Account.alias'] == "Unittest"):
res = requests.put(
API_URL + '/accounts/' +
account +
'/certificates/fa5c04850341c00ba074518db52ee6745bb49bc1/',
data=json.dumps({'status': 'ALLOWED'})
)
res = res.json()
self.assertEqual(res['status'], 200)
self.assertEqual(res['success'], True)
class TestCodec(unittest.TestCase):
def test_codecs(self):
print("\nGET /codecs/")
res = requests.get(API_URL + '/codecs/')
res = res.json()
self.assertEqual(res['status'], 200)
self.assertTrue('codecs' in res)
class TestCrypto(unittest.TestCase):
def test_crypto_tls(self):
print("\nGET /crypto/tls/")
res = requests.get(
API_URL + '/crypto/tls/',
{'type': 'settings'}
)
res = res.json()
self.assertTrue('settings' in res)
self.assertTrue('status' in res)
res = requests.get(
API_URL + '/crypto/tls/',
{'type': 'method'}
)
res = res.json()
self.assertTrue('methods' in res)
self.assertTrue('status' in res)
class TestCertificates(unittest.TestCase):
def test_certificates_get(self):
print("\nGET /certificates/")
res = requests.get(API_URL + '/certificates/')
res = res.json()
self.assertEqual(res['status'], 200)
self.assertTrue('pinned' in res)
def test_certificate_get(self):
print("\nGET /certificate/<cert_id>/")
res = requests.get(API_URL + '/certificates/')
res = res.json()
pinned = res['pinned']
for certificate in pinned:
res = requests.get(
API_URL + '/certificates/' + certificate + '/'
)
res = res.json()
self.assertEqual(res['status'], 200)
self.assertTrue('details' in res)
def test_certificate_post(self):
print("\nPOST /certificate/<cert_id>/")
res = requests.get(API_URL + '/certificates/')
res = res.json()
pinned = res['pinned']
for certificate in pinned:
res = requests.post(
API_URL + '/certificates/' + certificate + '/',
data=json.dumps({'action': 'pin', 'local': 'True'})
)
res = res.json()
self.assertEqual(res['status'], 200)
self.assertTrue('action' in res)
res = requests.post(
API_URL + '/certificates/' + certificate + '/',
data=json.dumps({'action': 'unpin'})
)
res = res.json()
self.assertEqual(res['status'], 200)
self.assertTrue('action' in res)
class TestAudio(unittest.TestCase):
def test_audio_plugins_get(self):
print("\nGET /audio/plugins/")
res = requests.get(API_URL + '/audio/plugins/')
res = res.json()
self.assertEqual(res['status'], 200)
self.assertTrue('plugins' in res)
class TestVideo(unittest.TestCase):
def test_video_device_get(self):
print("\nGET /video/devices/")
res = requests.get(
API_URL + '/video/devices/',
{'type': 'all'}
)
res = res.json()
self.assertEqual(res['status'], 200)
self.assertTrue('devices' in res)
res = requests.get(
API_URL + '/video/devices/',
{'type': 'default'}
)
res = res.json()
self.assertEqual(res['status'], 200)
self.assertTrue('default' in res)
def test_video_device_put(self):
print("\nPUT /video/devices/")
res = requests.get(
API_URL + '/video/devices/',
{'type': 'default'}
)
res = res.json()
default = res['default']
res = requests.put(
API_URL + '/video/devices/',
params={'type': 'default'},
data=json.dumps({'device': default})
)
res = res.json()
def test_video_settings_get(self):
print("\nGET /video/<device_name>/settings/")
res = requests.get(
API_URL + '/video/devices/',
{'type': 'default'}
)
res = res.json()
default = res['default']
res = requests.get(
API_URL + '/video/' + default + '/settings/'
)
res = res.json()
self.assertEqual(res['status'], 200)
self.assertTrue('settings' in res)
def test_video_settings_put(self):
print("\nPUT /video/<device_name>/settings/")
res = requests.get(
API_URL + '/video/devices/',
{'type': 'default'}
)
res = res.json()
default = res['default']
res = requests.get(
API_URL + '/video/' + default + '/settings/'
)
res = res.json()
settings = res['settings']
res = requests.put(
API_URL + '/video/' + default + '/settings/',
data=json.dumps({'settings': settings})
)
res = res.json()
self.assertEqual(res['status'], 200)
self.assertTrue('settings' in res)
def test_video_camera_get(self):
print("\nGET /video/camera/")
res = requests.get(API_URL + '/video/camera/')
res = res.json()
self.assertEqual(res['status'], 200)
self.assertTrue('cameraStatus' in res)
def test_video_camera_put(self):
print("\nPUT /video/camera/")
res = requests.put(
API_URL + '/video/camera/',
data=json.dumps({'action': 'start'})
)
res = res.json()
self.assertEqual(res['status'], 200)
self.assertTrue('cameraStatus' in res)
res = requests.put(
API_URL + '/video/camera/',
data=json.dumps({'action': 'stop'})
)
res = res.json()
self.assertEqual(res['status'], 200)
self.assertTrue('cameraStatus' in res)
def TestOrder():
suite = unittest.TestSuite()
suite.addTest(TestAccount('test_account_get'))
suite.addTest(TestAccount('test_accounts_get'))
#suite.addTest(TestAccount('test_account_post'))
suite.addTest(TestAccount('test_account_details_get'))
suite.addTest(TestAccount('test_account_details_put'))
suite.addTest(TestAccount('test_account_ciphers_get'))
suite.addTest(TestAccount('test_account_codecs_get'))
suite.addTest(TestAccount('test_account_codecs_put'))
suite.addTest(TestAccount('test_account_codec_details_get'))
suite.addTest(TestAccount('test_account_codec_details_put'))
suite.addTest(TestAccount('test_account_certificates_get'))
suite.addTest(TestAccount('test_account_certificates_put'))
suite.addTest(TestCodec('test_codecs'))
suite.addTest(TestCrypto('test_crypto_tls'))
suite.addTest(TestCertificates('test_certificates_get'))
suite.addTest(TestCertificates('test_certificate_get'))
suite.addTest(TestCertificates('test_certificate_post'))
suite.addTest(TestAudio('test_audio_plugins_get'))
suite.addTest(TestVideo('test_video_device_get'))
suite.addTest(TestVideo('test_video_device_put'))
suite.addTest(TestVideo('test_video_settings_get'))
suite.addTest(TestVideo('test_video_settings_put'))
suite.addTest(TestVideo('test_video_camera_get'))
suite.addTest(TestVideo('test_video_camera_put'))
suite.addTest(TestAccount('test_account_delete'))
return suite
def delete_test_data():
print("\nFlushing all remaining data")
res = requests.get(API_URL + '/accounts/')
res = res.json()
accounts = res['accounts']
for account in accounts:
res = requests.get(
API_URL + '/accounts/' + account + '/details/',
{'type': 'default'}
)
res = res.json()
if (res['details']['Account.alias'] == "Unittest"):
res = requests.delete(
API_URL + '/accounts/' + account + '/'
)
res = res.json()
if __name__ == '__main__':
runner = unittest.TextTestRunner(failfast=True)
runner.run(TestOrder())
delete_test_data()
| gpl-3.0 | -3,546,219,849,308,412,000 | 27.646512 | 78 | 0.507766 | false |
Designist/pybuilder | setup.py | 1 | 1823 | #!/usr/bin/env python
#
# -*- coding: utf-8 -*-
#
# This file is part of PyBuilder
#
# Copyright 2011-2015 PyBuilder Team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# This script allows to support installation via:
# pip install git+git://github.com/pybuilder/pybuilder.git@<branch>
#
# THIS IS A HACK, DO NOT RUN LOCALLY
#
import os
import subprocess
import sys
import glob
import shutil
script_dir = os.path.dirname(os.path.realpath(__file__))
build_script = os.path.join(script_dir, "build.py")
exit_code = 0
try:
subprocess.check_call([build_script, "clean", "install_dependencies", "publish"])
dist_dir = glob.glob(os.path.join(script_dir, "target", "dist", "*"))[0]
for src_file in glob.glob(os.path.join(dist_dir, "*")):
file_name = os.path.basename(src_file)
target_file_name = os.path.join(script_dir, file_name)
if os.path.exists(target_file_name):
if os.path.isdir(target_file_name):
os.removedirs(target_file_name)
else:
os.remove(target_file_name)
shutil.move(src_file, script_dir)
setup_args = sys.argv[1:]
subprocess.check_call(["./setup.py"] + setup_args, cwd=script_dir)
except subprocess.CalledProcessError as e:
exit_code = e.returncode
sys.exit(exit_code)
| apache-2.0 | -1,571,231,106,159,978,200 | 32.759259 | 85 | 0.679649 | false |
nens/sufriblib | setup.py | 1 | 1083 | from setuptools import setup
version = '0.5.dev0'
long_description = '\n\n'.join([
open('README.rst').read(),
open('CREDITS.rst').read(),
open('CHANGES.rst').read(),
])
install_requires = [
'setuptools',
'pyproj',
],
tests_require = [
'nose',
'coverage',
]
setup(name='sufriblib',
version=version,
description="A library for working with SUFRIB 2.1 files (.RIB and .RMB files, sewer system measurement data)",
long_description=long_description,
# Get strings from http://www.python.org/pypi?%3Aaction=list_classifiers
classifiers=[],
keywords=[],
author='Remco Gerlich',
author_email='[email protected]',
url='',
license='GPL',
packages=['sufriblib'],
include_package_data=True,
zip_safe=False,
install_requires=install_requires,
tests_require=tests_require,
extras_require={'test': tests_require},
entry_points={
'console_scripts': [
'sufribcat=sufriblib.scripts:sufribcat',
]},
)
| gpl-3.0 | 5,829,553,528,773,567,000 | 24.785714 | 117 | 0.604801 | false |
pdevetto/super-duper-disco | movies/migrations/0001_initial.py | 1 | 1056 | # -*- coding: utf-8 -*-
# Generated by Django 1.10 on 2016-11-28 14:30
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Director',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=200)),
],
),
migrations.CreateModel(
name='Movie',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=200)),
('year', models.DateTimeField(verbose_name='Movie year')),
('director', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='movies.Director')),
],
),
]
| gpl-3.0 | 874,054,082,737,347,300 | 31 | 115 | 0.574811 | false |
jun66j5/trac-ja | sample-plugins/workflow/VoteOperation.py | 1 | 2994 | # -*- coding: utf-8 -*-
#
# Copyright (C) 2007-2013 Edgewall Software
# Copyright (C) 2007 Eli Carter <[email protected]>
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at http://trac.edgewall.com/license.html.
#
# This software consists of voluntary contributions made by many
# individuals. For the exact contribution history, see the revision
# history and logs, available at http://trac.edgewall.org/.
from genshi.builder import tag
from trac.core import implements,Component
from trac.ticket.api import ITicketActionController
from trac.ticket.default_workflow import ConfigurableTicketWorkflow
from trac.ticket.model import Priority, Ticket
#from trac.perm import IPermissionRequestor # (TODO)
revision = "$Rev: 6326 $"
url = "$URL: https://svn.edgewall.org/repos/trac/trunk/sample-plugins/workflow/VoteOperation.py $"
class VoteOperation(Component):
"""Provides a simplistic vote feature.
This is a sample action controller illustrating how to create additional
''operations''.
Don't forget to add `VoteOperation` to the workflow
option in the `[ticket]` section in TracIni.
If there is no other workflow option, the line will look like this:
{{{
workflow = ConfigurableTicketWorkflow,VoteOperation
}}}
"""
implements(ITicketActionController)
def get_ticket_actions(self, req, ticket):
controller = ConfigurableTicketWorkflow(self.env)
return controller.get_actions_by_operation_for_req(req, ticket, 'vote')
def get_all_status(self):
return []
def render_ticket_action_control(self, req, ticket, action):
id = 'vote_%s_result' % (action, )
selected_value = req.args.get(id, 'for')
options = ['for', 'against']
return ("vote",
tag.select([tag.option(x, selected=(x == selected_value or
None))
for x in options], name=id, id=id),
"Vote on the issue, raising or lowering its priority")
def get_ticket_changes(self, req, ticket, action):
id = 'vote_%s_result' % (action, )
selected = req.args.get(id, 'for')
priorities = list(Priority.select(self.env))
orig_ticket = Ticket(self.env, ticket.id)
current_priority = int(Priority(self.env, name=
orig_ticket['priority']).value)
if selected == 'for':
# priorities are 1-based, not 0-based
new_value = max(1, current_priority - 1)
else:
maxval = max([int(p.value) for p in priorities])
new_value = min(maxval, current_priority + 1)
return {'priority': [p.name for p in priorities
if int(p.value) == new_value][0]}
def apply_action_side_effects(self, req, ticket, action):
pass
| bsd-3-clause | 6,424,570,206,271,528,000 | 38.394737 | 98 | 0.642619 | false |
CloudVLab/professional-services | tools/gsuite-exporter/setup.py | 1 | 2139 | # Copyright 2018 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""A setuptools based setup module.
See:
https://packaging.python.org/en/latest/distributing.html
https://github.com/pypa/sampleproject
"""
from setuptools import setup, find_packages
from os import path
from io import open
here = path.abspath(path.dirname(__file__))
# Get the long description from the README file
with open(path.join(here, 'README.md'), encoding='utf-8') as f:
long_description = f.read()
setup(
name='gsuite-exporter',
version='0.0.3',
description='GSuite Admin API Exporter',
long_description=long_description,
long_description_content_type='text/markdown',
author='Google Inc.',
author_email='[email protected]',
packages=find_packages(exclude=['contrib', 'docs', 'tests']),
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Topic :: Software Development :: Build Tools',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
],
keywords='gsuite exporter stackdriver',
install_requires=[
'google-api-python-client',
'python-dateutil',
'requests'
],
entry_points={
'console_scripts': [
'gsuite-exporter=gsuite_exporter.cli:main',
],
},
python_requires='>=2.7'
)
| apache-2.0 | -8,173,679,682,487,673,000 | 31.907692 | 74 | 0.661992 | false |
UTNkar/moore | src/home/migrations/0027_manual_paragraph_data.py | 1 | 1268 | # Generated by Django 2.2.10 on 2020-04-02 21:08
from django.db import migrations
from itertools import chain
from utils.data_migrations import stream_field_filter_map
def richtext_to_paragraph(block):
return {
'type': 'paragraph',
'value': {
'text': block['value'],
'alignment': "Left"
}
}
def paragraph_to_richtext(block):
return {
'type': 'paragraph',
'value': block['value']['text'],
}
def apply_to_all_pages(apps, mapper):
HomePage = apps.get_model('home', 'HomePage')
WebPage = apps.get_model('home', 'WebPage')
hps = HomePage.objects.all()
wps = WebPage.objects.all();
for obj in chain(hps, wps):
obj.body_en = stream_field_filter_map(obj.body_en, "paragraph", mapper)
obj.body_sv = stream_field_filter_map(obj.body_sv, "paragraph", mapper)
obj.save();
def forwards(apps, schema_editor):
apply_to_all_pages(apps, richtext_to_paragraph)
def backwards(apps, schema_editor):
apply_to_all_pages(apps, paragraph_to_richtext)
class Migration(migrations.Migration):
dependencies = [
('home', '0026_auto_20200402_2308'),
]
operations = [
migrations.RunPython(forwards, backwards)
]
| agpl-3.0 | -3,568,786,183,921,958,400 | 25.978723 | 79 | 0.621451 | false |
JonnyH/pyra-kernel | GTA04/root/vibra.py | 2 | 3531 | #!/usr/bin/env python
import fcntl, struct, time, array
#
# There are two steps to creating a rumble effect
# 1/ describe the effect and give it to the driver using an
# ioctl.
# There a 3 paramaters:
# strength: from 0 to 0xffff - this code takes a value from 0 to
# 1 and scales it
# duration: milliseconds
# delay until start: milliseconds.
#
# 2/ write a request to play a specific effect.
#
# It is possible to have multiple effects active. If they have
# different delays they will start at different times.
# This demo shows combining 3 non-overlapping effects to make
# a simple vibration pattern
#
# An effect is created with f.new_vibe(strength, duration, delay)
# That effect can then be started with 'play' and stopped with 'stop'.
# EVIOCRMFF = _IOW('E', 0x81, int)
# dir: 2 WRITE = 1 == 0x40000
# size 14 4
# type 8 'E' == 0x45
# nr: 8 0x81
#
EVIOCRMFF = 0x40044581
# EVIOCSFF _IOC(_IOC_WRITE, 'E', 0x80, sizeof(struct ff_effect))
EVIOCSFF = 0x402c4580
class Vibra:
def __init__(self, file = "/dev/input/rumble"):
self.f = open(file, "r+")
def close(self):
self.f.close()
def new_vibe(self, strength, length, delay):
# strength is from 0 to 1
# length and delay are in millisecs
# this is 'struct ff_effect' from "linux/input.h"
effect = struct.pack('HhHHHHHxxHH',
0x50, -1, 0, # FF_RUMBLE, id, direction
0, 0, # trigger (button interval)
length, delay,
int(strength * 0xFFFF), 0)
a = array.array('h', effect)
fcntl.ioctl(self.f, EVIOCSFF, a, True)
return a[1]
id = a[1]
return (ev_play, ev_stop)
def multi_vibe(self, length, repeats = 1, delay = None, strength = 1):
start = 0
if delay == None:
delay = length
v = []
for i in range(0, repeats):
v.append(self.new_vibe(strength, length, start))
start += length + delay
return v
def play(self, id):
# this is 'struct input_event': sec, nsec, type, code, value
if type(id) == tuple or type(id) == list:
ev_play = ''
for i in id:
ev_play = ev_play + struct.pack('LLHHi', 0, 0, 0x15, i, 1)
else:
ev_play = struct.pack('LLHHi', 0, 0, 0x15, id, 1)
self.f.write(ev_play)
self.f.flush()
def stop(self, id):
# this is 'struct input_event': sec, nsec, type, code, value
if type(id) == tuple or type(id) == list:
ev_stop = ''
for i in id:
ev_stop = ev_stop + struct.pack('LLHHi', 0, 0, 0x15, i, 0)
else:
ev_stop = struct.pack('LLHHi', 0, 0, 0x15, id, 0)
self.f.write(ev_stop)
self.f.flush()
def forget(self, id):
if type(id) == tuple or type(id) == list:
for i in id:
fcntl.ioctl(self.f, EVIOCRMFF, i)
else:
fcntl.ioctl(self.f, EVIOCRMFF, id)
if __name__ == '__main__':
f = Vibra("/dev/input/rumble")
# rumble for 300ms, pause for 100ms, rumble for 300ms, pause for 200ms
# then half-speed rumble for 600ms
p1 = f.new_vibe(1, 300, 0)
p2 = f.new_vibe(1, 300,400)
p3 = f.new_vibe(0.5, 600, 900)
f.play((p1, p2, p3))
time.sleep(2)
f.forget((p1, p2, p3))
f.play(f.multi_vibe(200, 14, delay=100))
time.sleep(5)
| gpl-2.0 | -7,004,655,211,986,307,000 | 30.810811 | 74 | 0.54857 | false |
serge-sans-paille/pythran | pythran/transformations/remove_nested_functions.py | 1 | 4044 | """ RemoveNestedFunctions turns nested function into top-level functions. """
from pythran.analyses import GlobalDeclarations, ImportedIds
from pythran.passmanager import Transformation
from pythran.tables import MODULES
from pythran.conversion import mangle
import pythran.metadata as metadata
import gast as ast
class _NestedFunctionRemover(ast.NodeTransformer):
def __init__(self, parent):
ast.NodeTransformer.__init__(self)
self.parent = parent
self.identifiers = set(self.global_declarations.keys())
def __getattr__(self, attr):
return getattr(self.parent, attr)
def visit_FunctionDef(self, node):
self.update = True
if MODULES['functools'] not in self.global_declarations.values():
import_ = ast.Import([ast.alias('functools', mangle('functools'))])
self.ctx.module.body.insert(0, import_)
functools_module = MODULES['functools']
self.global_declarations[mangle('functools')] = functools_module
self.ctx.module.body.append(node)
former_name = node.name
seed = 0
new_name = "pythran_{}{}"
while new_name.format(former_name, seed) in self.identifiers:
seed += 1
new_name = new_name.format(former_name, seed)
self.identifiers.add(new_name)
ii = self.gather(ImportedIds, node)
binded_args = [ast.Name(iin, ast.Load(), None, None)
for iin in sorted(ii)]
node.args.args = ([ast.Name(iin, ast.Param(), None, None)
for iin in sorted(ii)] +
node.args.args)
metadata.add(node, metadata.Local())
class Renamer(ast.NodeTransformer):
def visit_Call(self, node):
self.generic_visit(node)
if (isinstance(node.func, ast.Name) and
node.func.id == former_name):
node.func.id = new_name
node.args = (
[ast.Name(iin, ast.Load(), None, None)
for iin in sorted(ii)] +
node.args
)
return node
Renamer().visit(node)
node.name = new_name
self.global_declarations[node.name] = node
proxy_call = ast.Name(new_name, ast.Load(), None, None)
new_node = ast.Assign(
[ast.Name(former_name, ast.Store(), None, None)],
ast.Call(
ast.Attribute(
ast.Name(mangle('functools'), ast.Load(), None, None),
"partial",
ast.Load()
),
[proxy_call] + binded_args,
[],
),
None)
self.generic_visit(node)
return new_node
class RemoveNestedFunctions(Transformation):
"""
Replace nested function by top-level functions.
Also add a call to a bind intrinsic that
generates a local function with some arguments binded.
>>> import gast as ast
>>> from pythran import passmanager, backend
>>> node = ast.parse("def foo(x):\\n def bar(y): return x+y\\n bar(12)")
>>> pm = passmanager.PassManager("test")
>>> _, node = pm.apply(RemoveNestedFunctions, node)
>>> print(pm.dump(backend.Python, node))
import functools as __pythran_import_functools
def foo(x):
bar = __pythran_import_functools.partial(pythran_bar0, x)
bar(12)
def pythran_bar0(x, y):
return (x + y)
"""
def __init__(self):
super(RemoveNestedFunctions, self).__init__(GlobalDeclarations)
def visit_Module(self, node):
# keep original node as it's updated by _NestedFunctionRemover
for stmt in node.body:
self.visit(stmt)
return node
def visit_FunctionDef(self, node):
nfr = _NestedFunctionRemover(self)
node.body = [nfr.visit(stmt) for stmt in node.body]
self.update |= nfr.update
return node
| bsd-3-clause | -783,504,513,930,404,400 | 32.7 | 79 | 0.568249 | false |
Hikasgai/HikasgaiApp | placeForMe/settings.py | 1 | 4053 | """
Django settings for gettingstarted project, on Heroku. For more info, see:
https://github.com/heroku/heroku-django-template
For more information on this file, see
https://docs.djangoproject.com/en/1.8/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.8/ref/settings/
"""
import os
import dj_database_url
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
PROJECT_ROOT = os.path.dirname(os.path.abspath(__file__))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.8/howto/deployment/checklist/
# SECURITY WARNING: change this before deploying to production!
SECRET_KEY = 'i+acxn5(akgsn!sr4^qgf(^m&*@+g1@u^t@=8s@axc41ml*f=s'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'usuarios',
'asignaturas'
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'django.middleware.security.SecurityMiddleware',
)
ROOT_URLCONF = 'placeForMe.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [
os.path.join(BASE_DIR, 'templates')
],
'APP_DIRS': True,
'OPTIONS': {
'debug': True,
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'placeForMe.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.9/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/1.9/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.8/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Update database configuration with $DATABASE_URL.
db_from_env = dj_database_url.config(conn_max_age=500)
DATABASES['default'].update(db_from_env)
# Honor the 'X-Forwarded-Proto' header for request.is_secure()
SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https')
# Allow all host headers
ALLOWED_HOSTS = ['*']
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.9/howto/static-files/
MEDIA_ROOT = 'media'
MEDIA_URL = '/media/'
STATIC_ROOT = os.path.join(PROJECT_ROOT, 'staticfiles')
STATIC_URL = '/static/'
# Extra places for collectstatic to find static files.
STATICFILES_DIRS = [
os.path.join(BASE_DIR, 'static'),
]
# Simplified static file serving.
# https://warehouse.python.org/project/whitenoise/
STATICFILES_STORAGE = 'whitenoise.django.GzipManifestStaticFilesStorage'
| mit | 7,661,293,483,091,938,000 | 27.342657 | 91 | 0.694547 | false |
pombredanne/invenio | modules/bibindex/lib/bibindex_engine_stemmer.py | 1 | 18378 | ## This file is part of Invenio.
## Copyright (C) 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010 CERN.
##
## Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""
BibIndex stemmer facility based on the Porter Stemming Algorithm.
<http://tartarus.org/~martin/PorterStemmer/>
"""
__revision__ = "$Id$"
from thread import get_ident
from invenio.bibindex_engine_stemmer_greek import greek_stemmer
_stemmers = {}
try:
### Let's try to use SnowBall PyStemmer
import Stemmer
_lang_map = {
'danish' : 'da',
'dutch' : 'nl',
'english' : 'en',
'finnish' : 'fi',
'french' : 'fr',
'german' : 'de',
'hungarian' : 'hu',
'italian' : 'it',
'norwegian' : 'no',
'portuguese' : 'pt',
'romanian' : 'ro',
'russian' : 'ru',
'spanish' : 'es',
'swedish' : 'sv',
'turkish' : 'tr'
}
def is_stemmer_available_for_language(lang):
"""Return true if stemmer for language LANG is available.
Return false otherwise.
"""
thread_ident = get_ident()
if not _stemmers.has_key(thread_ident):
_stemmers[thread_ident] = _create_stemmers()
return _stemmers[thread_ident].has_key(lang)
def stem(word, lang):
"""Return WORD stemmed according to language LANG (e.g. 'en')."""
if lang and is_stemmer_available_for_language(lang):
return _stemmers[get_ident()][lang].stemWord(word)
elif lang == 'el':
#TODO: first we have to capitalize the word
# and remove accents from the vowels
return greek_stemmer().stem_word(word)
else:
return word
def stemWords(words, lang):
"""Return WORDS stemmed according to language LANG (e.g. 'en')."""
if lang and is_stemmer_available_for_language(lang):
return _stemmers[get_ident()][lang].stemWords(words)
else:
return words
def get_stemming_language_map():
"""Return a diction of code language, language name for all the available
languages."""
ret = {}
for language_name, language_code in _lang_map.iteritems():
if is_stemmer_available_for_language(language_code):
ret[language_name] = language_code
return ret
def _create_stemmers():
"""Create stemmers dictionary for all possible languages."""
stemmers_initialized = {}
for src_lang in Stemmer.algorithms():
try:
dst_lang = _lang_map.get(src_lang)
if dst_lang:
stemmers_initialized[dst_lang] = Stemmer.Stemmer(src_lang, 40000)
except (TypeError, KeyError):
pass
return stemmers_initialized
except ImportError:
### Here is the original PorterStemmer class provided as a fallback,
### the "free of charge for any purpose" implementation of the Porter stemmer
### algorithm in Python. The Invenio API interface follows below.
class PorterStemmer:
"""
This is the Porter stemming algorithm, ported to Python from the
version coded up in ANSI C by the author. It may be be regarded
as canonical, in that it follows the algorithm presented in
Porter, 1980, An algorithm for suffix stripping, Program, Vol. 14,
no. 3, pp 130-137,
only differing from it at the points maked --DEPARTURE-- below.
See also http://www.tartarus.org/~martin/PorterStemmer
The algorithm as described in the paper could be exactly replicated
by adjusting the points of DEPARTURE, but this is barely necessary,
because (a) the points of DEPARTURE are definitely improvements, and
(b) no encoding of the Porter stemmer I have seen is anything like
as exact as this version, even with the points of DEPARTURE!
Vivake Gupta ([email protected])
Release 1: January 2001
"""
def __init__(self):
"""The main part of the stemming algorithm starts here.
b is a buffer holding a word to be stemmed. The letters are in b[k0],
b[k0+1] ... ending at b[k]. In fact k0 = 0 in this demo program. k is
readjusted downwards as the stemming progresses. Zero termination is
not in fact used in the algorithm.
Note that only lower case sequences are stemmed. Forcing to lower case
should be done before stem(...) is called.
"""
self.b = "" # buffer for word to be stemmed
self.k = 0
self.k0 = 0
self.j = 0 # j is a general offset into the string
def cons(self, i):
"""cons(i) is TRUE <=> b[i] is a consonant."""
if self.b[i] == 'a' or self.b[i] == 'e' or self.b[i] == 'i' or self.b[i] == 'o' or self.b[i] == 'u':
return 0
if self.b[i] == 'y':
if i == self.k0:
return 1
else:
return (not self.cons(i - 1))
return 1
def m(self):
"""m() measures the number of consonant sequences between k0 and j.
if c is a consonant sequence and v a vowel sequence, and <..>
indicates arbitrary presence,
<c><v> gives 0
<c>vc<v> gives 1
<c>vcvc<v> gives 2
<c>vcvcvc<v> gives 3
....
"""
n = 0
i = self.k0
while 1:
if i > self.j:
return n
if not self.cons(i):
break
i = i + 1
i = i + 1
while 1:
while 1:
if i > self.j:
return n
if self.cons(i):
break
i = i + 1
i = i + 1
n = n + 1
while 1:
if i > self.j:
return n
if not self.cons(i):
break
i = i + 1
i = i + 1
def vowelinstem(self):
"""vowelinstem() is TRUE <=> k0,...j contains a vowel"""
for i in range(self.k0, self.j + 1):
if not self.cons(i):
return 1
return 0
def doublec(self, j):
"""doublec(j) is TRUE <=> j,(j-1) contain a double consonant."""
if j < (self.k0 + 1):
return 0
if (self.b[j] != self.b[j-1]):
return 0
return self.cons(j)
def cvc(self, i):
"""cvc(i) is TRUE <=> i-2,i-1,i has the form consonant - vowel - consonant
and also if the second c is not w,x or y. this is used when trying to
restore an e at the end of a short e.g.
cav(e), lov(e), hop(e), crim(e), but
snow, box, tray.
"""
if i < (self.k0 + 2) or not self.cons(i) or self.cons(i-1) or not self.cons(i-2):
return 0
ch = self.b[i]
if ch == 'w' or ch == 'x' or ch == 'y':
return 0
return 1
def ends(self, s):
"""ends(s) is TRUE <=> k0,...k ends with the string s."""
length = len(s)
if s[length - 1] != self.b[self.k]: # tiny speed-up
return 0
if length > (self.k - self.k0 + 1):
return 0
if self.b[self.k-length+1:self.k+1] != s:
return 0
self.j = self.k - length
return 1
def setto(self, s):
"""setto(s) sets (j+1),...k to the characters in the string s, readjusting k."""
length = len(s)
self.b = self.b[:self.j+1] + s + self.b[self.j+length+1:]
self.k = self.j + length
def r(self, s):
"""r(s) is used further down."""
if self.m() > 0:
self.setto(s)
def step1ab(self):
"""step1ab() gets rid of plurals and -ed or -ing. e.g.
caresses -> caress
ponies -> poni
ties -> ti
caress -> caress
cats -> cat
feed -> feed
agreed -> agree
disabled -> disable
matting -> mat
mating -> mate
meeting -> meet
milling -> mill
messing -> mess
meetings -> meet
"""
if self.b[self.k] == 's':
if self.ends("sses"):
self.k = self.k - 2
elif self.ends("ies"):
self.setto("i")
elif self.b[self.k - 1] != 's':
self.k = self.k - 1
if self.ends("eed"):
if self.m() > 0:
self.k = self.k - 1
elif (self.ends("ed") or self.ends("ing")) and self.vowelinstem():
self.k = self.j
if self.ends("at"): self.setto("ate")
elif self.ends("bl"): self.setto("ble")
elif self.ends("iz"): self.setto("ize")
elif self.doublec(self.k):
self.k = self.k - 1
ch = self.b[self.k]
if ch == 'l' or ch == 's' or ch == 'z':
self.k = self.k + 1
elif (self.m() == 1 and self.cvc(self.k)):
self.setto("e")
def step1c(self):
"""step1c() turns terminal y to i when there is another vowel in the stem."""
if (self.ends("y") and self.vowelinstem()):
self.b = self.b[:self.k] + 'i' + self.b[self.k+1:]
def step2(self):
"""step2() maps double suffices to single ones.
so -ization ( = -ize plus -ation) maps to -ize etc. note that the
string before the suffix must give m() > 0.
"""
if self.b[self.k - 1] == 'a':
if self.ends("ational"): self.r("ate")
elif self.ends("tional"): self.r("tion")
elif self.b[self.k - 1] == 'c':
if self.ends("enci"): self.r("ence")
elif self.ends("anci"): self.r("ance")
elif self.b[self.k - 1] == 'e':
if self.ends("izer"): self.r("ize")
elif self.b[self.k - 1] == 'l':
if self.ends("bli"): self.r("ble") # --DEPARTURE--
# To match the published algorithm, replace this phrase with
# if self.ends("abli"): self.r("able")
elif self.ends("alli"): self.r("al")
elif self.ends("entli"): self.r("ent")
elif self.ends("eli"): self.r("e")
elif self.ends("ousli"): self.r("ous")
elif self.b[self.k - 1] == 'o':
if self.ends("ization"): self.r("ize")
elif self.ends("ation"): self.r("ate")
elif self.ends("ator"): self.r("ate")
elif self.b[self.k - 1] == 's':
if self.ends("alism"): self.r("al")
elif self.ends("iveness"): self.r("ive")
elif self.ends("fulness"): self.r("ful")
elif self.ends("ousness"): self.r("ous")
elif self.b[self.k - 1] == 't':
if self.ends("aliti"): self.r("al")
elif self.ends("iviti"): self.r("ive")
elif self.ends("biliti"): self.r("ble")
elif self.b[self.k - 1] == 'g': # --DEPARTURE--
if self.ends("logi"): self.r("log")
# To match the published algorithm, delete this phrase
def step3(self):
"""step3() dels with -ic-, -full, -ness etc. similar strategy to step2."""
if self.b[self.k] == 'e':
if self.ends("icate"): self.r("ic")
elif self.ends("ative"): self.r("")
elif self.ends("alize"): self.r("al")
elif self.b[self.k] == 'i':
if self.ends("iciti"): self.r("ic")
elif self.b[self.k] == 'l':
if self.ends("ical"): self.r("ic")
elif self.ends("ful"): self.r("")
elif self.b[self.k] == 's':
if self.ends("ness"): self.r("")
def step4(self):
"""step4() takes off -ant, -ence etc., in context <c>vcvc<v>."""
if self.b[self.k - 1] == 'a':
if self.ends("al"): pass
else: return
elif self.b[self.k - 1] == 'c':
if self.ends("ance"): pass
elif self.ends("ence"): pass
else: return
elif self.b[self.k - 1] == 'e':
if self.ends("er"): pass
else: return
elif self.b[self.k - 1] == 'i':
if self.ends("ic"): pass
else: return
elif self.b[self.k - 1] == 'l':
if self.ends("able"): pass
elif self.ends("ible"): pass
else: return
elif self.b[self.k - 1] == 'n':
if self.ends("ant"): pass
elif self.ends("ement"): pass
elif self.ends("ment"): pass
elif self.ends("ent"): pass
else: return
elif self.b[self.k - 1] == 'o':
if self.ends("ion") and (self.b[self.j] == 's' or self.b[self.j] == 't'): pass
elif self.ends("ou"): pass
# takes care of -ous
else: return
elif self.b[self.k - 1] == 's':
if self.ends("ism"): pass
else: return
elif self.b[self.k - 1] == 't':
if self.ends("ate"): pass
elif self.ends("iti"): pass
else: return
elif self.b[self.k - 1] == 'u':
if self.ends("ous"): pass
else: return
elif self.b[self.k - 1] == 'v':
if self.ends("ive"): pass
else: return
elif self.b[self.k - 1] == 'z':
if self.ends("ize"): pass
else: return
else:
return
if self.m() > 1:
self.k = self.j
def step5(self):
"""step5() removes a final -e if m() > 1, and changes -ll to -l if
m() > 1.
"""
self.j = self.k
if self.b[self.k] == 'e':
a = self.m()
if a > 1 or (a == 1 and not self.cvc(self.k-1)):
self.k = self.k - 1
if self.b[self.k] == 'l' and self.doublec(self.k) and self.m() > 1:
self.k = self.k -1
def stem(self, p, i, j):
"""In stem(p,i,j), p is a char pointer, and the string to be stemmed
is from p[i] to p[j] inclusive. Typically i is zero and j is the
offset to the last character of a string, (p[j+1] == '\0'). The
stemmer adjusts the characters p[i] ... p[j] and returns the new
end-point of the string, k. Stemming never increases word length, so
i <= k <= j. To turn the stemmer into a module, declare 'stem' as
extern, and delete the remainder of this file.
"""
# copy the parameters into statics
self.b = p
self.k = j
self.k0 = i
if self.k <= self.k0 + 1:
return self.b # --DEPARTURE--
# With this line, strings of length 1 or 2 don't go through the
# stemming process, although no mention is made of this in the
# published algorithm. Remove the line to match the published
# algorithm.
self.step1ab()
self.step1c()
self.step2()
self.step3()
self.step4()
self.step5()
return self.b[self.k0:self.k+1]
_stemmers[get_ident()] = PorterStemmer()
def is_stemmer_available_for_language(lang):
"""Return true if stemmer for language LANG is available.
Return false otherwise.
"""
return lang == 'en'
def stem(word, lang):
"""Return WORD stemmed according to language LANG (e.g. 'en')."""
if lang == 'en' and _stemmers and _stemmers.has_key(get_ident()):
#make sure _stemmers[get_ident()] is avail..
return _stemmers[get_ident()].stem(word, 0, len(word)-1)
elif lang == 'el':
#TODO: first we have to capitalize the word
# and remove accents from the vowels
return greek_stemmer().stem_word(word)
else:
return word
def stemWords(words, lang):
"""Return WORDS stemmed according to language LANG (e.g. 'en')."""
if lang == 'en' and _stemmers and _stemmers.has_key(get_ident()):
#make sure _stemmers[get_ident()] is avail..
return [_stemmers[get_ident()].stem(word, 0, len(word)-1) for word in words]
else:
return words
def get_stemming_language_map():
"""Return a diction of code language, language name for all the available
languages."""
return {'english' : 'en'}
if __name__ == '__main__':
# when invoked via CLI, simply stem the arguments:
import sys
if len(sys.argv) > 1:
for word in sys.argv[1:]:
print stem(word)
| gpl-2.0 | -8,048,510,486,832,647,000 | 37.367432 | 112 | 0.482098 | false |
gurneyalex/odoo | addons/account/models/digest.py | 6 | 1557 | # -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import fields, models, _
from odoo.exceptions import AccessError
class Digest(models.Model):
_inherit = 'digest.digest'
kpi_account_total_revenue = fields.Boolean('Revenue')
kpi_account_total_revenue_value = fields.Monetary(compute='_compute_kpi_account_total_revenue_value')
def _compute_kpi_account_total_revenue_value(self):
if not self.env.user.has_group('account.group_account_invoice'):
raise AccessError(_("Do not have access, skip this data for user's digest email"))
for record in self:
start, end, company = record._get_kpi_compute_parameters()
self._cr.execute('''
SELECT SUM(line.debit)
FROM account_move_line line
JOIN account_move move ON move.id = line.move_id
JOIN account_journal journal ON journal.id = move.journal_id
WHERE line.company_id = %s AND line.date >= %s AND line.date < %s
AND journal.type = 'sale'
''', [company.id, start, end])
query_res = self._cr.fetchone()
record.kpi_account_total_revenue_value = query_res and query_res[0] or 0.0
def compute_kpis_actions(self, company, user):
res = super(Digest, self).compute_kpis_actions(company, user)
res['kpi_account_total_revenue'] = 'account.action_move_out_invoice_type&menu_id=%s' % self.env.ref('account.menu_finance').id
return res
| agpl-3.0 | -1,840,514,460,584,391,000 | 46.181818 | 134 | 0.635196 | false |
miguelinux/vbox | src/VBox/ValidationKit/testboxscript/testboxscript.py | 1 | 3651 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# $Id: testboxscript.py $
"""
TestBox Script Wrapper.
This script aimes at respawning the Test Box Script when it terminates
abnormally or due to an UPGRADE request.
"""
__copyright__ = \
"""
Copyright (C) 2012-2015 Oracle Corporation
This file is part of VirtualBox Open Source Edition (OSE), as
available from http://www.virtualbox.org. This file is free software;
you can redistribute it and/or modify it under the terms of the GNU
General Public License (GPL) as published by the Free Software
Foundation, in version 2 as it comes in the "COPYING" file of the
VirtualBox OSE distribution. VirtualBox OSE is distributed in the
hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
The contents of this file may alternatively be used under the terms
of the Common Development and Distribution License Version 1.0
(CDDL) only, as it comes in the "COPYING.CDDL" file of the
VirtualBox OSE distribution, in which case the provisions of the
CDDL are applicable instead of those of the GPL.
You may elect to license modified versions of this file under the
terms and conditions of either the GPL or the CDDL or both.
"""
__version__ = "$Revision: 108245 $"
import subprocess
import sys
import os
import time
## @name Test Box script exit statuses (see also RTEXITCODE)
# @remarks These will _never_ change
# @{
TBS_EXITCODE_FAILURE = 1 # RTEXITCODE_FAILURE
TBS_EXITCODE_SYNTAX = 2 # RTEXITCODE_SYNTAX
TBS_EXITCODE_NEED_UPGRADE = 9
## @}
class TestBoxScriptWrapper(object): # pylint: disable=R0903
"""
Wrapper class
"""
TESTBOX_SCRIPT_FILENAME = 'testboxscript_real.py'
def __init__(self):
"""
Init
"""
self.task = None
def __del__(self):
"""
Cleanup
"""
if self.task is not None:
print 'Wait for child task...'
self.task.terminate()
self.task.wait()
print 'done. Exiting'
self.task = None;
def run(self):
"""
Start spawning the real TestBox script.
"""
# Figure out where we live first.
try:
__file__
except:
__file__ = sys.argv[0];
sTestBoxScriptDir = os.path.dirname(os.path.abspath(__file__));
# Construct the argument list for the real script (same dir).
sRealScript = os.path.join(sTestBoxScriptDir, TestBoxScriptWrapper.TESTBOX_SCRIPT_FILENAME);
asArgs = sys.argv[1:];
asArgs.insert(0, sRealScript);
if sys.executable is not None and len(sys.executable) > 0:
asArgs.insert(0, sys.executable);
# Look for --pidfile <name> and write a pid file.
sPidFile = None;
for i, _ in enumerate(asArgs):
if asArgs[i] == '--pidfile' and i + 1 < len(asArgs):
sPidFile = asArgs[i + 1];
break;
if asArgs[i] == '--':
break;
if sPidFile is not None and len(sPidFile) > 0:
oPidFile = open(sPidFile, 'w');
oPidFile.write(str(os.getpid()));
oPidFile.close();
# Execute the testbox script almost forever in a relaxed loop.
rcExit = TBS_EXITCODE_FAILURE;
while True:
self.task = subprocess.Popen(asArgs, shell=False);
rcExit = self.task.wait();
self.task = None;
if rcExit == TBS_EXITCODE_SYNTAX:
break;
# Relax.
time.sleep(1);
return rcExit;
if __name__ == '__main__':
sys.exit(TestBoxScriptWrapper().run());
| gpl-2.0 | 1,803,930,489,939,122,200 | 29.173554 | 100 | 0.610244 | false |
phil0522/anote | anote-web/anoteweb/data/anote_pb2.py | 1 | 9335 | # Generated by the protocol buffer compiler. DO NOT EDIT!
# source: anote.proto
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import descriptor_pb2
# @@protoc_insertion_point(imports)
DESCRIPTOR = _descriptor.FileDescriptor(
name='anote.proto',
package='anote.proto',
serialized_pb='\n\x0b\x61note.proto\x12\x0b\x61note.proto\"\xfc\x01\n\x04Task\x12\x0f\n\x07task_id\x18\x01 \x01(\x05\x12\r\n\x05title\x18\x02 \x01(\t\x12\x0e\n\x06status\x18\x03 \x01(\t\x12\x0b\n\x03tag\x18\x04 \x03(\t\x12\x0f\n\x07project\x18\x05 \x01(\t\x12\x11\n\tparent_id\x18\x06 \x01(\x05\x12\x10\n\x08\x61ncestor\x18\x07 \x03(\x05\x12\x13\n\x0b\x64\x65scription\x18\x08 \x01(\t\x12\x0f\n\x07note_id\x18\t \x03(\x05\x12\x1f\n\x04note\x18\n \x03(\x0b\x32\x11.anote.proto.Note\x12\x13\n\x0b\x63reate_time\x18\x0b \x01(\x05\x12\x13\n\x0bupdate_time\x18\x0c \x01(\x05\x12\x10\n\x08position\x18\r \x01(\x05\"6\n\x04Note\x12\x0f\n\x07task_id\x18\x01 \x01(\x05\x12\x0f\n\x07note_id\x18\x02 \x01(\x05\x12\x0c\n\x04text\x18\x03 \x01(\t\"6\n\x03Tag\x12\r\n\x05title\x18\x01 \x01(\t\x12\x12\n\noccurrence\x18\x02 \x01(\x05\x12\x0c\n\x04hide\x18\x03 \x01(\x08')
_TASK = _descriptor.Descriptor(
name='Task',
full_name='anote.proto.Task',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='task_id', full_name='anote.proto.Task.task_id', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='title', full_name='anote.proto.Task.title', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='status', full_name='anote.proto.Task.status', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='tag', full_name='anote.proto.Task.tag', index=3,
number=4, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='project', full_name='anote.proto.Task.project', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='parent_id', full_name='anote.proto.Task.parent_id', index=5,
number=6, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='ancestor', full_name='anote.proto.Task.ancestor', index=6,
number=7, type=5, cpp_type=1, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='description', full_name='anote.proto.Task.description', index=7,
number=8, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='note_id', full_name='anote.proto.Task.note_id', index=8,
number=9, type=5, cpp_type=1, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='note', full_name='anote.proto.Task.note', index=9,
number=10, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='create_time', full_name='anote.proto.Task.create_time', index=10,
number=11, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='update_time', full_name='anote.proto.Task.update_time', index=11,
number=12, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='position', full_name='anote.proto.Task.position', index=12,
number=13, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=29,
serialized_end=281,
)
_NOTE = _descriptor.Descriptor(
name='Note',
full_name='anote.proto.Note',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='task_id', full_name='anote.proto.Note.task_id', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='note_id', full_name='anote.proto.Note.note_id', index=1,
number=2, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='text', full_name='anote.proto.Note.text', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=283,
serialized_end=337,
)
_TAG = _descriptor.Descriptor(
name='Tag',
full_name='anote.proto.Tag',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='title', full_name='anote.proto.Tag.title', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='occurrence', full_name='anote.proto.Tag.occurrence', index=1,
number=2, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='hide', full_name='anote.proto.Tag.hide', index=2,
number=3, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=339,
serialized_end=393,
)
_TASK.fields_by_name['note'].message_type = _NOTE
DESCRIPTOR.message_types_by_name['Task'] = _TASK
DESCRIPTOR.message_types_by_name['Note'] = _NOTE
DESCRIPTOR.message_types_by_name['Tag'] = _TAG
class Task(_message.Message):
__metaclass__ = _reflection.GeneratedProtocolMessageType
DESCRIPTOR = _TASK
# @@protoc_insertion_point(class_scope:anote.proto.Task)
class Note(_message.Message):
__metaclass__ = _reflection.GeneratedProtocolMessageType
DESCRIPTOR = _NOTE
# @@protoc_insertion_point(class_scope:anote.proto.Note)
class Tag(_message.Message):
__metaclass__ = _reflection.GeneratedProtocolMessageType
DESCRIPTOR = _TAG
# @@protoc_insertion_point(class_scope:anote.proto.Tag)
# @@protoc_insertion_point(module_scope)
| mit | -9,181,972,045,732,631,000 | 37.895833 | 857 | 0.681307 | false |
mfitzp/padua | setup.py | 1 | 1035 | from setuptools import setup, find_packages
version = '0.1.16'
setup(
name='padua',
version=version,
url='http://github.com/mfitzp/padua',
author='Martin Fitzpatrick',
author_email='[email protected]',
description='A Python interface for Proteomic Data Analysis, working with MaxQuant & Perseus outputs',
license='MIT',
packages=find_packages(),
include_package_data=True,
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Intended Audience :: Developers',
'Topic :: Desktop Environment',
'Topic :: Software Development :: Build Tools',
'Topic :: Software Development :: Widget Sets',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4'
],
install_requires=[
'numpy',
'scipy',
'matplotlib',
'pandas',
'statsmodels',
'matplotlib-venn',
'scikit-learn',
'requests',
'requests_toolbelt',
'adjustText'
]
)
| bsd-2-clause | 5,922,774,192,656,956,000 | 27.75 | 106 | 0.596135 | false |
solymosin/maps2winbugs | plugin/xdist.py | 1 | 1633 | # -*- coding: utf-8 -*-
"""
/***************************************************************************
maps2WinBUGS
A QGIS plugin a tool to facilitate data processing for Bayesian spatial modeling
-------------------
begin : 2015-07-31
git sha : $Format:%H$
copyright : (C) 2015 by Norbert Solymosi
email : [email protected]
***************************************************************************/
/***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************/
"""
from qgis.PyQt.QtWidgets import QDialog
from .xdist_dialog import Ui_xDist
class Dialog(QDialog, Ui_xDist):
def __init__(self):
"""Constructor for the dialog.
"""
QDialog.__init__(self)
self.setupUi(self)
self.pushCancel.clicked.connect(self.reject)
self.pushOK.clicked.connect(self.accept)
| gpl-2.0 | 865,070,765,404,318,000 | 37.880952 | 114 | 0.361298 | false |
jianghuaw/nova | nova/conductor/manager.py | 1 | 56574 | # Copyright 2013 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Handles database requests from other nova services."""
import contextlib
import copy
import functools
from oslo_config import cfg
from oslo_log import log as logging
import oslo_messaging as messaging
from oslo_utils import excutils
from oslo_utils import versionutils
import six
from nova import availability_zones
from nova.compute import instance_actions
from nova.compute import rpcapi as compute_rpcapi
from nova.compute import task_states
from nova.compute import utils as compute_utils
from nova.compute.utils import wrap_instance_event
from nova.compute import vm_states
from nova.conductor.tasks import live_migrate
from nova.conductor.tasks import migrate
from nova import context as nova_context
from nova.db import base
from nova import exception
from nova.i18n import _
from nova import image
from nova import manager
from nova import network
from nova import notifications
from nova import objects
from nova.objects import base as nova_object
from nova import profiler
from nova import rpc
from nova.scheduler import client as scheduler_client
from nova.scheduler import utils as scheduler_utils
from nova import servicegroup
from nova import utils
LOG = logging.getLogger(__name__)
CONF = cfg.CONF
def targets_cell(fn):
"""Wrap a method and automatically target the instance's cell.
This decorates a method with signature func(self, context, instance, ...)
and automatically targets the context with the instance's cell
mapping. It does this by looking up the InstanceMapping.
"""
@functools.wraps(fn)
def wrapper(self, context, *args, **kwargs):
instance = kwargs.get('instance') or args[0]
try:
im = objects.InstanceMapping.get_by_instance_uuid(
context, instance.uuid)
except exception.InstanceMappingNotFound:
LOG.error('InstanceMapping not found, unable to target cell',
instance=instance)
im = None
else:
LOG.debug('Targeting cell %(cell)s for conductor method %(meth)s',
{'cell': im.cell_mapping.identity,
'meth': fn.__name__})
# NOTE(danms): Target our context to the cell for the rest of
# this request, so that none of the subsequent code needs to
# care about it.
nova_context.set_target_cell(context, im.cell_mapping)
return fn(self, context, *args, **kwargs)
return wrapper
class ConductorManager(manager.Manager):
"""Mission: Conduct things.
The methods in the base API for nova-conductor are various proxy operations
performed on behalf of the nova-compute service running on compute nodes.
Compute nodes are not allowed to directly access the database, so this set
of methods allows them to get specific work done without locally accessing
the database.
The nova-conductor service also exposes an API in the 'compute_task'
namespace. See the ComputeTaskManager class for details.
"""
target = messaging.Target(version='3.0')
def __init__(self, *args, **kwargs):
super(ConductorManager, self).__init__(service_name='conductor',
*args, **kwargs)
self.compute_task_mgr = ComputeTaskManager()
self.additional_endpoints.append(self.compute_task_mgr)
# NOTE(hanlind): This can be removed in version 4.0 of the RPC API
def provider_fw_rule_get_all(self, context):
# NOTE(hanlind): Simulate an empty db result for compat reasons.
return []
def _object_dispatch(self, target, method, args, kwargs):
"""Dispatch a call to an object method.
This ensures that object methods get called and any exception
that is raised gets wrapped in an ExpectedException for forwarding
back to the caller (without spamming the conductor logs).
"""
try:
# NOTE(danms): Keep the getattr inside the try block since
# a missing method is really a client problem
return getattr(target, method)(*args, **kwargs)
except Exception:
raise messaging.ExpectedException()
def object_class_action_versions(self, context, objname, objmethod,
object_versions, args, kwargs):
objclass = nova_object.NovaObject.obj_class_from_name(
objname, object_versions[objname])
args = tuple([context] + list(args))
result = self._object_dispatch(objclass, objmethod, args, kwargs)
# NOTE(danms): The RPC layer will convert to primitives for us,
# but in this case, we need to honor the version the client is
# asking for, so we do it before returning here.
# NOTE(hanlind): Do not convert older than requested objects,
# see bug #1596119.
if isinstance(result, nova_object.NovaObject):
target_version = object_versions[objname]
requested_version = versionutils.convert_version_to_tuple(
target_version)
actual_version = versionutils.convert_version_to_tuple(
result.VERSION)
do_backport = requested_version < actual_version
other_major_version = requested_version[0] != actual_version[0]
if do_backport or other_major_version:
result = result.obj_to_primitive(
target_version=target_version,
version_manifest=object_versions)
return result
def object_action(self, context, objinst, objmethod, args, kwargs):
"""Perform an action on an object."""
oldobj = objinst.obj_clone()
result = self._object_dispatch(objinst, objmethod, args, kwargs)
updates = dict()
# NOTE(danms): Diff the object with the one passed to us and
# generate a list of changes to forward back
for name, field in objinst.fields.items():
if not objinst.obj_attr_is_set(name):
# Avoid demand-loading anything
continue
if (not oldobj.obj_attr_is_set(name) or
getattr(oldobj, name) != getattr(objinst, name)):
updates[name] = field.to_primitive(objinst, name,
getattr(objinst, name))
# This is safe since a field named this would conflict with the
# method anyway
updates['obj_what_changed'] = objinst.obj_what_changed()
return updates, result
def object_backport_versions(self, context, objinst, object_versions):
target = object_versions[objinst.obj_name()]
LOG.debug('Backporting %(obj)s to %(ver)s with versions %(manifest)s',
{'obj': objinst.obj_name(),
'ver': target,
'manifest': ','.join(
['%s=%s' % (name, ver)
for name, ver in object_versions.items()])})
return objinst.obj_to_primitive(target_version=target,
version_manifest=object_versions)
def reset(self):
objects.Service.clear_min_version_cache()
@contextlib.contextmanager
def try_target_cell(context, cell):
"""If cell is not None call func with context.target_cell.
This is a method to help during the transition period. Currently
various mappings may not exist if a deployment has not migrated to
cellsv2. If there is no mapping call the func as normal, otherwise
call it in a target_cell context.
"""
if cell:
with nova_context.target_cell(context, cell) as cell_context:
yield cell_context
else:
yield context
@contextlib.contextmanager
def obj_target_cell(obj, cell):
"""Run with object's context set to a specific cell"""
with try_target_cell(obj._context, cell) as target:
with obj.obj_alternate_context(target):
yield target
@profiler.trace_cls("rpc")
class ComputeTaskManager(base.Base):
"""Namespace for compute methods.
This class presents an rpc API for nova-conductor under the 'compute_task'
namespace. The methods here are compute operations that are invoked
by the API service. These methods see the operation to completion, which
may involve coordinating activities on multiple compute nodes.
"""
target = messaging.Target(namespace='compute_task', version='1.17')
def __init__(self):
super(ComputeTaskManager, self).__init__()
self.compute_rpcapi = compute_rpcapi.ComputeAPI()
self.image_api = image.API()
self.network_api = network.API()
self.servicegroup_api = servicegroup.API()
self.scheduler_client = scheduler_client.SchedulerClient()
self.notifier = rpc.get_notifier('compute', CONF.host)
def reset(self):
LOG.info('Reloading compute RPC API')
compute_rpcapi.LAST_VERSION = None
self.compute_rpcapi = compute_rpcapi.ComputeAPI()
# TODO(tdurakov): remove `live` parameter here on compute task api RPC
# version bump to 2.x
@messaging.expected_exceptions(
exception.NoValidHost,
exception.ComputeServiceUnavailable,
exception.ComputeHostNotFound,
exception.InvalidHypervisorType,
exception.InvalidCPUInfo,
exception.UnableToMigrateToSelf,
exception.DestinationHypervisorTooOld,
exception.InvalidLocalStorage,
exception.InvalidSharedStorage,
exception.HypervisorUnavailable,
exception.InstanceInvalidState,
exception.MigrationPreCheckError,
exception.MigrationPreCheckClientException,
exception.LiveMigrationWithOldNovaNotSupported,
exception.UnsupportedPolicyException)
@targets_cell
@wrap_instance_event(prefix='conductor')
def migrate_server(self, context, instance, scheduler_hint, live, rebuild,
flavor, block_migration, disk_over_commit, reservations=None,
clean_shutdown=True, request_spec=None):
if instance and not isinstance(instance, nova_object.NovaObject):
# NOTE(danms): Until v2 of the RPC API, we need to tolerate
# old-world instance objects here
attrs = ['metadata', 'system_metadata', 'info_cache',
'security_groups']
instance = objects.Instance._from_db_object(
context, objects.Instance(), instance,
expected_attrs=attrs)
# NOTE: Remove this when we drop support for v1 of the RPC API
if flavor and not isinstance(flavor, objects.Flavor):
# Code downstream may expect extra_specs to be populated since it
# is receiving an object, so lookup the flavor to ensure this.
flavor = objects.Flavor.get_by_id(context, flavor['id'])
if live and not rebuild and not flavor:
self._live_migrate(context, instance, scheduler_hint,
block_migration, disk_over_commit, request_spec)
elif not live and not rebuild and flavor:
instance_uuid = instance.uuid
with compute_utils.EventReporter(context, 'cold_migrate',
instance_uuid):
self._cold_migrate(context, instance, flavor,
scheduler_hint['filter_properties'],
reservations, clean_shutdown, request_spec)
else:
raise NotImplementedError()
def _cold_migrate(self, context, instance, flavor, filter_properties,
reservations, clean_shutdown, request_spec):
image = utils.get_image_from_system_metadata(
instance.system_metadata)
# NOTE(sbauza): If a reschedule occurs when prep_resize(), then
# it only provides filter_properties legacy dict back to the
# conductor with no RequestSpec part of the payload.
if not request_spec:
# Make sure we hydrate a new RequestSpec object with the new flavor
# and not the nested one from the instance
request_spec = objects.RequestSpec.from_components(
context, instance.uuid, image,
flavor, instance.numa_topology, instance.pci_requests,
filter_properties, None, instance.availability_zone)
else:
# NOTE(sbauza): Resizes means new flavor, so we need to update the
# original RequestSpec object for make sure the scheduler verifies
# the right one and not the original flavor
request_spec.flavor = flavor
task = self._build_cold_migrate_task(context, instance, flavor,
request_spec,
reservations, clean_shutdown)
# TODO(sbauza): Provide directly the RequestSpec object once
# _set_vm_state_and_notify() accepts it
legacy_spec = request_spec.to_legacy_request_spec_dict()
try:
task.execute()
except exception.NoValidHost as ex:
vm_state = instance.vm_state
if not vm_state:
vm_state = vm_states.ACTIVE
updates = {'vm_state': vm_state, 'task_state': None}
self._set_vm_state_and_notify(context, instance.uuid,
'migrate_server',
updates, ex, legacy_spec)
# if the flavor IDs match, it's migrate; otherwise resize
if flavor.id == instance.instance_type_id:
msg = _("No valid host found for cold migrate")
else:
msg = _("No valid host found for resize")
raise exception.NoValidHost(reason=msg)
except exception.UnsupportedPolicyException as ex:
with excutils.save_and_reraise_exception():
vm_state = instance.vm_state
if not vm_state:
vm_state = vm_states.ACTIVE
updates = {'vm_state': vm_state, 'task_state': None}
self._set_vm_state_and_notify(context, instance.uuid,
'migrate_server',
updates, ex, legacy_spec)
except Exception as ex:
with excutils.save_and_reraise_exception():
updates = {'vm_state': instance.vm_state,
'task_state': None}
self._set_vm_state_and_notify(context, instance.uuid,
'migrate_server',
updates, ex, legacy_spec)
# NOTE(sbauza): Make sure we persist the new flavor in case we had
# a successful scheduler call if and only if nothing bad happened
if request_spec.obj_what_changed():
request_spec.save()
def _set_vm_state_and_notify(self, context, instance_uuid, method, updates,
ex, request_spec):
scheduler_utils.set_vm_state_and_notify(
context, instance_uuid, 'compute_task', method, updates,
ex, request_spec)
def _cleanup_allocated_networks(
self, context, instance, requested_networks):
try:
# If we were told not to allocate networks let's save ourselves
# the trouble of calling the network API.
if not (requested_networks and requested_networks.no_allocate):
self.network_api.deallocate_for_instance(
context, instance, requested_networks=requested_networks)
except Exception:
LOG.exception('Failed to deallocate networks', instance=instance)
return
instance.system_metadata['network_allocated'] = 'False'
try:
instance.save()
except exception.InstanceNotFound:
# NOTE: It's possible that we're cleaning up the networks
# because the instance was deleted. If that's the case then this
# exception will be raised by instance.save()
pass
@wrap_instance_event(prefix='conductor')
def live_migrate_instance(self, context, instance, scheduler_hint,
block_migration, disk_over_commit, request_spec):
self._live_migrate(context, instance, scheduler_hint,
block_migration, disk_over_commit, request_spec)
def _live_migrate(self, context, instance, scheduler_hint,
block_migration, disk_over_commit, request_spec):
destination = scheduler_hint.get("host")
def _set_vm_state(context, instance, ex, vm_state=None,
task_state=None):
request_spec = {'instance_properties': {
'uuid': instance.uuid, },
}
scheduler_utils.set_vm_state_and_notify(context,
instance.uuid,
'compute_task', 'migrate_server',
dict(vm_state=vm_state,
task_state=task_state,
expected_task_state=task_states.MIGRATING,),
ex, request_spec)
migration = objects.Migration(context=context.elevated())
migration.dest_compute = destination
migration.status = 'accepted'
migration.instance_uuid = instance.uuid
migration.source_compute = instance.host
migration.migration_type = 'live-migration'
if instance.obj_attr_is_set('flavor'):
migration.old_instance_type_id = instance.flavor.id
migration.new_instance_type_id = instance.flavor.id
else:
migration.old_instance_type_id = instance.instance_type_id
migration.new_instance_type_id = instance.instance_type_id
migration.create()
task = self._build_live_migrate_task(context, instance, destination,
block_migration, disk_over_commit,
migration, request_spec)
try:
task.execute()
except (exception.NoValidHost,
exception.ComputeHostNotFound,
exception.ComputeServiceUnavailable,
exception.InvalidHypervisorType,
exception.InvalidCPUInfo,
exception.UnableToMigrateToSelf,
exception.DestinationHypervisorTooOld,
exception.InvalidLocalStorage,
exception.InvalidSharedStorage,
exception.HypervisorUnavailable,
exception.InstanceInvalidState,
exception.MigrationPreCheckError,
exception.MigrationPreCheckClientException,
exception.LiveMigrationWithOldNovaNotSupported,
exception.MigrationSchedulerRPCError) as ex:
with excutils.save_and_reraise_exception():
# TODO(johngarbutt) - eventually need instance actions here
_set_vm_state(context, instance, ex, instance.vm_state)
migration.status = 'error'
migration.save()
except Exception as ex:
LOG.error('Migration of instance %(instance_id)s to host'
' %(dest)s unexpectedly failed.',
{'instance_id': instance.uuid, 'dest': destination},
exc_info=True)
# Reset the task state to None to indicate completion of
# the operation as it is done in case of known exceptions.
_set_vm_state(context, instance, ex, vm_states.ERROR,
task_state=None)
migration.status = 'error'
migration.save()
raise exception.MigrationError(reason=six.text_type(ex))
def _build_live_migrate_task(self, context, instance, destination,
block_migration, disk_over_commit, migration,
request_spec=None):
return live_migrate.LiveMigrationTask(context, instance,
destination, block_migration,
disk_over_commit, migration,
self.compute_rpcapi,
self.servicegroup_api,
self.scheduler_client,
request_spec)
def _build_cold_migrate_task(self, context, instance, flavor,
request_spec, reservations,
clean_shutdown):
return migrate.MigrationTask(context, instance, flavor,
request_spec,
reservations, clean_shutdown,
self.compute_rpcapi,
self.scheduler_client)
def _destroy_build_request(self, context, instance):
# The BuildRequest needs to be stored until the instance is mapped to
# an instance table. At that point it will never be used again and
# should be deleted.
build_request = objects.BuildRequest.get_by_instance_uuid(
context, instance.uuid)
# TODO(alaski): Sync API updates of the build_request to the
# instance before it is destroyed. Right now only locked_by can
# be updated before this is destroyed.
build_request.destroy()
def _populate_instance_mapping(self, context, instance, host):
try:
inst_mapping = objects.InstanceMapping.get_by_instance_uuid(
context, instance.uuid)
except exception.InstanceMappingNotFound:
# NOTE(alaski): If nova-api is up to date this exception should
# never be hit. But during an upgrade it's possible that an old
# nova-api didn't create an instance_mapping during this boot
# request.
LOG.debug('Instance was not mapped to a cell, likely due '
'to an older nova-api service running.',
instance=instance)
return None
else:
try:
host_mapping = objects.HostMapping.get_by_host(context,
host['host'])
except exception.HostMappingNotFound:
# NOTE(alaski): For now this exception means that a
# deployment has not migrated to cellsv2 and we should
# remove the instance_mapping that has been created.
# Eventually this will indicate a failure to properly map a
# host to a cell and we may want to reschedule.
inst_mapping.destroy()
return None
else:
inst_mapping.cell_mapping = host_mapping.cell_mapping
inst_mapping.save()
return inst_mapping
# NOTE(danms): This is never cell-targeted because it is only used for
# cellsv1 (which does not target cells directly) and n-cpu reschedules
# (which go to the cell conductor and thus are always cell-specific).
def build_instances(self, context, instances, image, filter_properties,
admin_password, injected_files, requested_networks,
security_groups, block_device_mapping=None, legacy_bdm=True):
# TODO(ndipanov): Remove block_device_mapping and legacy_bdm in version
# 2.0 of the RPC API.
# TODO(danms): Remove this in version 2.0 of the RPC API
if (requested_networks and
not isinstance(requested_networks,
objects.NetworkRequestList)):
requested_networks = objects.NetworkRequestList.from_tuples(
requested_networks)
# TODO(melwitt): Remove this in version 2.0 of the RPC API
flavor = filter_properties.get('instance_type')
if flavor and not isinstance(flavor, objects.Flavor):
# Code downstream may expect extra_specs to be populated since it
# is receiving an object, so lookup the flavor to ensure this.
flavor = objects.Flavor.get_by_id(context, flavor['id'])
filter_properties = dict(filter_properties, instance_type=flavor)
request_spec = {}
try:
# check retry policy. Rather ugly use of instances[0]...
# but if we've exceeded max retries... then we really only
# have a single instance.
# TODO(sbauza): Provide directly the RequestSpec object
# when _set_vm_state_and_notify() and populate_retry()
# accept it
request_spec = scheduler_utils.build_request_spec(
context, image, instances)
scheduler_utils.populate_retry(
filter_properties, instances[0].uuid)
instance_uuids = [instance.uuid for instance in instances]
spec_obj = objects.RequestSpec.from_primitives(
context, request_spec, filter_properties)
hosts = self._schedule_instances(
context, spec_obj, instance_uuids)
except Exception as exc:
updates = {'vm_state': vm_states.ERROR, 'task_state': None}
for instance in instances:
self._set_vm_state_and_notify(
context, instance.uuid, 'build_instances', updates,
exc, request_spec)
try:
# If the BuildRequest stays around then instance show/lists
# will pull from it rather than the errored instance.
self._destroy_build_request(context, instance)
except exception.BuildRequestNotFound:
pass
self._cleanup_allocated_networks(
context, instance, requested_networks)
return
for (instance, host) in six.moves.zip(instances, hosts):
instance.availability_zone = (
availability_zones.get_host_availability_zone(context,
host['host']))
try:
# NOTE(danms): This saves the az change above, refreshes our
# instance, and tells us if it has been deleted underneath us
instance.save()
except (exception.InstanceNotFound,
exception.InstanceInfoCacheNotFound):
LOG.debug('Instance deleted during build', instance=instance)
continue
local_filter_props = copy.deepcopy(filter_properties)
scheduler_utils.populate_filter_properties(local_filter_props,
host)
# The block_device_mapping passed from the api doesn't contain
# instance specific information
bdms = objects.BlockDeviceMappingList.get_by_instance_uuid(
context, instance.uuid)
# This is populated in scheduler_utils.populate_retry
num_attempts = local_filter_props.get('retry',
{}).get('num_attempts', 1)
if num_attempts <= 1:
# If this is a reschedule the instance is already mapped to
# this cell and the BuildRequest is already deleted so ignore
# the logic below.
inst_mapping = self._populate_instance_mapping(context,
instance,
host)
try:
self._destroy_build_request(context, instance)
except exception.BuildRequestNotFound:
# This indicates an instance delete has been requested in
# the API. Stop the build, cleanup the instance_mapping and
# potentially the block_device_mappings
# TODO(alaski): Handle block_device_mapping cleanup
if inst_mapping:
inst_mapping.destroy()
return
self.compute_rpcapi.build_and_run_instance(context,
instance=instance, host=host['host'], image=image,
request_spec=request_spec,
filter_properties=local_filter_props,
admin_password=admin_password,
injected_files=injected_files,
requested_networks=requested_networks,
security_groups=security_groups,
block_device_mapping=bdms, node=host['nodename'],
limits=host['limits'])
def _schedule_instances(self, context, request_spec,
instance_uuids=None):
scheduler_utils.setup_instance_group(context, request_spec)
hosts = self.scheduler_client.select_destinations(context,
request_spec, instance_uuids)
return hosts
@targets_cell
def unshelve_instance(self, context, instance, request_spec=None):
sys_meta = instance.system_metadata
def safe_image_show(ctx, image_id):
if image_id:
return self.image_api.get(ctx, image_id, show_deleted=False)
else:
raise exception.ImageNotFound(image_id='')
if instance.vm_state == vm_states.SHELVED:
instance.task_state = task_states.POWERING_ON
instance.save(expected_task_state=task_states.UNSHELVING)
self.compute_rpcapi.start_instance(context, instance)
elif instance.vm_state == vm_states.SHELVED_OFFLOADED:
image = None
image_id = sys_meta.get('shelved_image_id')
# No need to check for image if image_id is None as
# "shelved_image_id" key is not set for volume backed
# instance during the shelve process
if image_id:
with compute_utils.EventReporter(
context, 'get_image_info', instance.uuid):
try:
image = safe_image_show(context, image_id)
except exception.ImageNotFound:
instance.vm_state = vm_states.ERROR
instance.save()
reason = _('Unshelve attempted but the image %s '
'cannot be found.') % image_id
LOG.error(reason, instance=instance)
raise exception.UnshelveException(
instance_id=instance.uuid, reason=reason)
try:
with compute_utils.EventReporter(context, 'schedule_instances',
instance.uuid):
if not request_spec:
# NOTE(sbauza): We were unable to find an original
# RequestSpec object - probably because the instance is
# old. We need to mock that the old way
filter_properties = {}
request_spec = scheduler_utils.build_request_spec(
context, image, [instance])
else:
# NOTE(sbauza): Force_hosts/nodes needs to be reset
# if we want to make sure that the next destination
# is not forced to be the original host
request_spec.reset_forced_destinations()
# TODO(sbauza): Provide directly the RequestSpec object
# when populate_filter_properties and populate_retry()
# accept it
filter_properties = request_spec.\
to_legacy_filter_properties_dict()
request_spec = request_spec.\
to_legacy_request_spec_dict()
scheduler_utils.populate_retry(filter_properties,
instance.uuid)
request_spec = objects.RequestSpec.from_primitives(
context, request_spec, filter_properties)
# NOTE(cfriesen): Ensure that we restrict the scheduler to
# the cell specified by the instance mapping.
instance_mapping = \
objects.InstanceMapping.get_by_instance_uuid(
context, instance.uuid)
LOG.debug('Requesting cell %(cell)s while unshelving',
{'cell': instance_mapping.cell_mapping.identity},
instance=instance)
if ('requested_destination' in request_spec and
request_spec.requested_destination):
request_spec.requested_destination.cell = (
instance_mapping.cell_mapping)
else:
request_spec.requested_destination = (
objects.Destination(
cell=instance_mapping.cell_mapping))
hosts = self._schedule_instances(context, request_spec,
[instance.uuid])
host_state = hosts[0]
scheduler_utils.populate_filter_properties(
filter_properties, host_state)
(host, node) = (host_state['host'], host_state['nodename'])
instance.availability_zone = (
availability_zones.get_host_availability_zone(
context, host))
self.compute_rpcapi.unshelve_instance(
context, instance, host, image=image,
filter_properties=filter_properties, node=node)
except (exception.NoValidHost,
exception.UnsupportedPolicyException):
instance.task_state = None
instance.save()
LOG.warning("No valid host found for unshelve instance",
instance=instance)
return
except Exception:
with excutils.save_and_reraise_exception():
instance.task_state = None
instance.save()
LOG.error("Unshelve attempted but an error "
"has occurred", instance=instance)
else:
LOG.error('Unshelve attempted but vm_state not SHELVED or '
'SHELVED_OFFLOADED', instance=instance)
instance.vm_state = vm_states.ERROR
instance.save()
return
@targets_cell
def rebuild_instance(self, context, instance, orig_image_ref, image_ref,
injected_files, new_pass, orig_sys_metadata,
bdms, recreate, on_shared_storage,
preserve_ephemeral=False, host=None,
request_spec=None):
with compute_utils.EventReporter(context, 'rebuild_server',
instance.uuid):
node = limits = None
if not host:
if not request_spec:
# NOTE(sbauza): We were unable to find an original
# RequestSpec object - probably because the instance is old
# We need to mock that the old way
# TODO(sbauza): Provide directly the RequestSpec object
# when _set_vm_state_and_notify() accepts it
filter_properties = {'ignore_hosts': [instance.host]}
request_spec = scheduler_utils.build_request_spec(
context, image_ref, [instance])
request_spec = objects.RequestSpec.from_primitives(
context, request_spec, filter_properties)
else:
# NOTE(sbauza): Augment the RequestSpec object by excluding
# the source host for avoiding the scheduler to pick it
request_spec.ignore_hosts = request_spec.ignore_hosts or []
request_spec.ignore_hosts.append(instance.host)
# NOTE(sbauza): Force_hosts/nodes needs to be reset
# if we want to make sure that the next destination
# is not forced to be the original host
request_spec.reset_forced_destinations()
try:
hosts = self._schedule_instances(context, request_spec,
[instance.uuid])
host_dict = hosts.pop(0)
host, node, limits = (host_dict['host'],
host_dict['nodename'],
host_dict['limits'])
except exception.NoValidHost as ex:
request_spec = request_spec.to_legacy_request_spec_dict()
with excutils.save_and_reraise_exception():
self._set_vm_state_and_notify(context, instance.uuid,
'rebuild_server',
{'vm_state': instance.vm_state,
'task_state': None}, ex, request_spec)
LOG.warning("No valid host found for rebuild",
instance=instance)
except exception.UnsupportedPolicyException as ex:
request_spec = request_spec.to_legacy_request_spec_dict()
with excutils.save_and_reraise_exception():
self._set_vm_state_and_notify(context, instance.uuid,
'rebuild_server',
{'vm_state': instance.vm_state,
'task_state': None}, ex, request_spec)
LOG.warning("Server with unsupported policy "
"cannot be rebuilt", instance=instance)
try:
migration = objects.Migration.get_by_instance_and_status(
context, instance.uuid, 'accepted')
except exception.MigrationNotFoundByStatus:
LOG.debug("No migration record for the rebuild/evacuate "
"request.", instance=instance)
migration = None
compute_utils.notify_about_instance_usage(
self.notifier, context, instance, "rebuild.scheduled")
instance.availability_zone = (
availability_zones.get_host_availability_zone(
context, host))
self.compute_rpcapi.rebuild_instance(context,
instance=instance,
new_pass=new_pass,
injected_files=injected_files,
image_ref=image_ref,
orig_image_ref=orig_image_ref,
orig_sys_metadata=orig_sys_metadata,
bdms=bdms,
recreate=recreate,
on_shared_storage=on_shared_storage,
preserve_ephemeral=preserve_ephemeral,
migration=migration,
host=host, node=node, limits=limits)
# TODO(avolkov): move method to bdm
@staticmethod
def _volume_size(instance_type, bdm):
size = bdm.get('volume_size')
# NOTE (ndipanov): inherit flavor size only for swap and ephemeral
if (size is None and bdm.get('source_type') == 'blank' and
bdm.get('destination_type') == 'local'):
if bdm.get('guest_format') == 'swap':
size = instance_type.get('swap', 0)
else:
size = instance_type.get('ephemeral_gb', 0)
return size
def _create_block_device_mapping(self, cell, instance_type, instance_uuid,
block_device_mapping):
"""Create the BlockDeviceMapping objects in the db.
This method makes a copy of the list in order to avoid using the same
id field in case this is called for multiple instances.
"""
LOG.debug("block_device_mapping %s", list(block_device_mapping),
instance_uuid=instance_uuid)
instance_block_device_mapping = copy.deepcopy(block_device_mapping)
for bdm in instance_block_device_mapping:
bdm.volume_size = self._volume_size(instance_type, bdm)
bdm.instance_uuid = instance_uuid
with obj_target_cell(bdm, cell):
bdm.update_or_create()
return instance_block_device_mapping
def _create_tags(self, context, instance_uuid, tags):
"""Create the Tags objects in the db."""
if tags:
tag_list = [tag.tag for tag in tags]
instance_tags = objects.TagList.create(
context, instance_uuid, tag_list)
return instance_tags
else:
return tags
def _bury_in_cell0(self, context, request_spec, exc,
build_requests=None, instances=None):
"""Ensure all provided build_requests and instances end up in cell0.
Cell0 is the fake cell we schedule dead instances to when we can't
schedule them somewhere real. Requests that don't yet have instances
will get a new instance, created in cell0. Instances that have not yet
been created will be created in cell0. All build requests are destroyed
after we're done. Failure to delete a build request will trigger the
instance deletion, just like the happy path in
schedule_and_build_instances() below.
"""
try:
cell0 = objects.CellMapping.get_by_uuid(
context, objects.CellMapping.CELL0_UUID)
except exception.CellMappingNotFound:
# Not yet setup for cellsv2. Instances will need to be written
# to the configured database. This will become a deployment
# error in Ocata.
LOG.error('No cell mapping found for cell0 while '
'trying to record scheduling failure. '
'Setup is incomplete.')
return
build_requests = build_requests or []
instances = instances or []
instances_by_uuid = {inst.uuid: inst for inst in instances}
for build_request in build_requests:
if build_request.instance_uuid not in instances_by_uuid:
# This is an instance object with no matching db entry.
instance = build_request.get_new_instance(context)
instances_by_uuid[instance.uuid] = instance
updates = {'vm_state': vm_states.ERROR, 'task_state': None}
legacy_spec = request_spec.to_legacy_request_spec_dict()
for instance in instances_by_uuid.values():
with obj_target_cell(instance, cell0) as cctxt:
instance.create()
# Use the context targeted to cell0 here since the instance is
# now in cell0.
self._set_vm_state_and_notify(
cctxt, instance.uuid, 'build_instances', updates,
exc, legacy_spec)
try:
# We don't need the cell0-targeted context here because the
# instance mapping is in the API DB.
inst_mapping = \
objects.InstanceMapping.get_by_instance_uuid(
context, instance.uuid)
inst_mapping.cell_mapping = cell0
inst_mapping.save()
except exception.InstanceMappingNotFound:
pass
for build_request in build_requests:
try:
build_request.destroy()
except exception.BuildRequestNotFound:
# Instance was deleted before we finished scheduling
inst = instances_by_uuid[build_request.instance_uuid]
with obj_target_cell(inst, cell0):
inst.destroy()
def schedule_and_build_instances(self, context, build_requests,
request_specs, image,
admin_password, injected_files,
requested_networks, block_device_mapping,
tags=None):
# Add all the UUIDs for the instances
instance_uuids = [spec.instance_uuid for spec in request_specs]
try:
hosts = self._schedule_instances(context, request_specs[0],
instance_uuids)
except Exception as exc:
LOG.exception('Failed to schedule instances')
self._bury_in_cell0(context, request_specs[0], exc,
build_requests=build_requests)
return
host_mapping_cache = {}
instances = []
for (build_request, request_spec, host) in six.moves.zip(
build_requests, request_specs, hosts):
instance = build_request.get_new_instance(context)
# Convert host from the scheduler into a cell record
if host['host'] not in host_mapping_cache:
try:
host_mapping = objects.HostMapping.get_by_host(
context, host['host'])
host_mapping_cache[host['host']] = host_mapping
except exception.HostMappingNotFound as exc:
LOG.error('No host-to-cell mapping found for selected '
'host %(host)s. Setup is incomplete.',
{'host': host['host']})
self._bury_in_cell0(context, request_spec, exc,
build_requests=[build_request],
instances=[instance])
# This is a placeholder in case the quota recheck fails.
instances.append(None)
continue
else:
host_mapping = host_mapping_cache[host['host']]
cell = host_mapping.cell_mapping
# Before we create the instance, let's make one final check that
# the build request is still around and wasn't deleted by the user
# already.
try:
objects.BuildRequest.get_by_instance_uuid(
context, instance.uuid)
except exception.BuildRequestNotFound:
# the build request is gone so we're done for this instance
LOG.debug('While scheduling instance, the build request '
'was already deleted.', instance=instance)
# This is a placeholder in case the quota recheck fails.
instances.append(None)
continue
else:
instance.availability_zone = (
availability_zones.get_host_availability_zone(
context, host['host']))
with obj_target_cell(instance, cell):
instance.create()
instances.append(instance)
# NOTE(melwitt): We recheck the quota after creating the
# objects to prevent users from allocating more resources
# than their allowed quota in the event of a race. This is
# configurable because it can be expensive if strict quota
# limits are not required in a deployment.
if CONF.quota.recheck_quota:
try:
compute_utils.check_num_instances_quota(
context, instance.flavor, 0, 0,
orig_num_req=len(build_requests))
except exception.TooManyInstances as exc:
with excutils.save_and_reraise_exception():
self._cleanup_build_artifacts(context, exc, instances,
build_requests,
request_specs)
for (build_request, request_spec, host, instance) in six.moves.zip(
build_requests, request_specs, hosts, instances):
if instance is None:
# Skip placeholders that were buried in cell0 or had their
# build requests deleted by the user before instance create.
continue
filter_props = request_spec.to_legacy_filter_properties_dict()
scheduler_utils.populate_retry(filter_props, instance.uuid)
scheduler_utils.populate_filter_properties(filter_props,
host)
# send a state update notification for the initial create to
# show it going from non-existent to BUILDING
notifications.send_update_with_states(context, instance, None,
vm_states.BUILDING, None, None, service="conductor")
with obj_target_cell(instance, cell) as cctxt:
objects.InstanceAction.action_start(
cctxt, instance.uuid, instance_actions.CREATE,
want_result=False)
instance_bdms = self._create_block_device_mapping(
cell, instance.flavor, instance.uuid, block_device_mapping)
instance_tags = self._create_tags(cctxt, instance.uuid, tags)
# TODO(Kevin Zheng): clean this up once instance.create() handles
# tags; we do this so the instance.create notification in
# build_and_run_instance in nova-compute doesn't lazy-load tags
instance.tags = instance_tags if instance_tags \
else objects.TagList()
# Update mapping for instance. Normally this check is guarded by
# a try/except but if we're here we know that a newer nova-api
# handled the build process and would have created the mapping
inst_mapping = objects.InstanceMapping.get_by_instance_uuid(
context, instance.uuid)
inst_mapping.cell_mapping = cell
inst_mapping.save()
if not self._delete_build_request(
context, build_request, instance, cell, instance_bdms,
instance_tags):
# The build request was deleted before/during scheduling so
# the instance is gone and we don't have anything to build for
# this one.
continue
# NOTE(danms): Compute RPC expects security group names or ids
# not objects, so convert this to a list of names until we can
# pass the objects.
legacy_secgroups = [s.identifier
for s in request_spec.security_groups]
with obj_target_cell(instance, cell) as cctxt:
self.compute_rpcapi.build_and_run_instance(
cctxt, instance=instance, image=image,
request_spec=request_spec,
filter_properties=filter_props,
admin_password=admin_password,
injected_files=injected_files,
requested_networks=requested_networks,
security_groups=legacy_secgroups,
block_device_mapping=instance_bdms,
host=host['host'], node=host['nodename'],
limits=host['limits'])
def _cleanup_build_artifacts(self, context, exc, instances, build_requests,
request_specs):
for (instance, build_request, request_spec) in six.moves.zip(
instances, build_requests, request_specs):
# Skip placeholders that were buried in cell0 or had their
# build requests deleted by the user before instance create.
if instance is None:
continue
updates = {'vm_state': vm_states.ERROR, 'task_state': None}
legacy_spec = request_spec.to_legacy_request_spec_dict()
self._set_vm_state_and_notify(context, instance.uuid,
'build_instances', updates, exc,
legacy_spec)
# Be paranoid about artifacts being deleted underneath us.
try:
build_request.destroy()
except exception.BuildRequestNotFound:
pass
try:
request_spec.destroy()
except exception.RequestSpecNotFound:
pass
def _delete_build_request(self, context, build_request, instance, cell,
instance_bdms, instance_tags):
"""Delete a build request after creating the instance in the cell.
This method handles cleaning up the instance in case the build request
is already deleted by the time we try to delete it.
:param context: the context of the request being handled
:type context: nova.context.RequestContext
:param build_request: the build request to delete
:type build_request: nova.objects.BuildRequest
:param instance: the instance created from the build_request
:type instance: nova.objects.Instance
:param cell: the cell in which the instance was created
:type cell: nova.objects.CellMapping
:param instance_bdms: list of block device mappings for the instance
:type instance_bdms: nova.objects.BlockDeviceMappingList
:param instance_tags: list of tags for the instance
:type instance_tags: nova.objects.TagList
:returns: True if the build request was successfully deleted, False if
the build request was already deleted and the instance is now gone.
"""
try:
build_request.destroy()
except exception.BuildRequestNotFound:
# This indicates an instance deletion request has been
# processed, and the build should halt here. Clean up the
# bdm, tags and instance record.
with obj_target_cell(instance, cell) as cctxt:
with compute_utils.notify_about_instance_delete(
self.notifier, cctxt, instance):
try:
instance.destroy()
except exception.InstanceNotFound:
pass
except exception.ObjectActionError:
# NOTE(melwitt): Instance became scheduled during
# the destroy, "host changed". Refresh and re-destroy.
try:
instance.refresh()
instance.destroy()
except exception.InstanceNotFound:
pass
for bdm in instance_bdms:
with obj_target_cell(bdm, cell):
try:
bdm.destroy()
except exception.ObjectActionError:
pass
if instance_tags:
with try_target_cell(context, cell) as target_ctxt:
try:
objects.TagList.destroy(target_ctxt, instance.uuid)
except exception.InstanceNotFound:
pass
return False
return True
| apache-2.0 | -8,708,059,897,833,092,000 | 48.152042 | 79 | 0.565101 | false |
E7ernal/quizwhiz | quizard/views/Results.py | 1 | 5042 | # vim: ts=4:sw=4:expandtabs
__author__ = '[email protected]'
from django.conf import settings
from django.views import generic
from django.contrib import messages
from django.shortcuts import redirect
from django.template.loader import get_template
from django.utils.translation import ugettext_lazy as _
from email_utils.tasks import send_mail
from quizard.models.Assignment import Assignment
class Results(generic.DetailView):
model = Assignment
slug_field = 'code'
slug_url_kwarg = 'code'
context_object_name = 'assignment'
template_name = 'quizard/results.html'
def get(self, request, *pos, **kw):
# If the user isn't currently working on an assignment,
# they shouldn't be allowed to access the results page.
if 'assignment_code' not in self.request.session:
messages.info(request, _('You must complete an assignment before visiting the results page.'))
return redirect('index')
# If the assignment is still in progress (i.e., we have a current position),
# send the user back to that position rather than allowing them to view their
# (incomplete) results.
if isinstance(request.session.get('assignment_in_progress', None), basestring):
messages.info(request, _('You must complete this assignment before viewing your results.'))
return redirect(request.session['assignment_in_progress'])
return super(Results, self).get(request, *pos, **kw)
def get_context_data(self, **kw):
context = super(Results, self).get_context_data(**kw)
context.update({
'points_earned': self.object.calculate_score(self.request.session['answers']),
'questions': self.build_question_dicts(
context['assignment'],
self.request.session['answers']
)
})
# Record the user's score on this assignment.
completed_assignments = self.request.session.get('completed_assignments', {})
completed_assignments[self.object.code] = context['points_earned']
self.request.session['completed_assignments'] = completed_assignments
# Clear the user's current assignment.
# del self.request.session['assignment_code']
self.request.session.modified = True
self.send_emails()
return context
def build_question_dicts(self, assignment, answers):
question_list = []
for question in assignment.questions.all():
question_list.append({
'question': question,
'answer': answers[str(question.pk)],
'correct': question.validate_answer(answers[str(question.pk)]),
})
return question_list
def send_emails(self):
self.send_teacher_email(self.object)
self.send_summary_email(self.object)
def send_teacher_email(self, assignment):
"""
Email the assignment creator the results of this particular
quiz-taking session.
"""
self._send_email(
assignment,
assignment.created_by.email,
_("{assignment.code} results -- {assignee_name}").format(
assignment=assignment,
assignee_name=self.request.session['assignee_name'],
),
'quizard/emails/assignment_results.txt'
)
def send_summary_email(self, assignment):
"""
Sent a results receipt to the given third-party, if there is one.
"""
if self.request.session.get('assignee_email', None):
self._send_email(
assignment,
self.request.session['assignee_email'],
_("{assignment.code} summary -- {assignee_name}").format(
assignment=assignment,
assignee_name=self.request.session['assignee_name']
),
'quizard/emails/assignment_results_summary.txt'
)
def _send_email(self, assignment, to_address, subject, email_template):
template_instance = get_template(email_template)
context = {
'assignment': assignment,
'points_earned': assignment.calculate_score(self.request.session['answers']),
'questions': self.build_question_dicts(
assignment,
self.request.session['answers'],
),
'assignee_name': self.request.session['assignee_name'],
'DEFAULT_FROM_EMAIL': settings.DEFAULT_FROM_EMAIL,
'BRAND_NAME': settings.BRAND_NAME
}
args = (
subject,
template_instance.render(context),
settings.DEFAULT_FROM_EMAIL,
to_address
)
# Don't try to invoke the task asynchronously in DEBUG mode,
# because it's a dev environment and celery probably isn't configured.
if settings.DEBUG:
return send_mail(*args)
else:
return send_mail.apply_async(args)
| mit | -2,658,239,532,650,944,500 | 35.80292 | 106 | 0.61067 | false |
macosforge/ccs-calendarserver | calendarserver/tools/agent.py | 1 | 10761 | #!/usr/bin/env python
# -*- test-case-name: calendarserver.tools.test.test_agent -*-
##
# Copyright (c) 2013-2017 Apple Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##
"""
A service spawned on-demand by launchd, meant to handle configuration requests
from Server.app. When a request comes in on the socket specified in the
launchd agent.plist, launchd will run "caldavd -t Agent" which ends up creating
this service. Requests are made using HTTP POSTS to /gateway, and are
authenticated by OpenDirectory.
"""
from __future__ import print_function
__all__ = [
"makeAgentService",
]
import cStringIO
from plistlib import readPlistFromString, writePlistToString
import socket
from twext.python.launchd import launchActivateSocket
from twext.python.log import Logger
from twext.who.checker import HTTPDigestCredentialChecker
from twext.who.opendirectory import (
DirectoryService as OpenDirectoryDirectoryService,
NoQOPDigestCredentialFactory
)
from twisted.application.internet import StreamServerEndpointService
from twisted.cred.portal import IRealm, Portal
from twisted.internet.defer import inlineCallbacks, returnValue
from twisted.internet.endpoints import AdoptedStreamServerEndpoint
from twisted.internet.protocol import Factory
from twisted.protocols import amp
from twisted.web.guard import HTTPAuthSessionWrapper
from twisted.web.resource import IResource, Resource, ForbiddenResource
from twisted.web.server import Site, NOT_DONE_YET
from zope.interface import implements
log = Logger()
class AgentRealm(object):
"""
Only allow a specified list of avatar IDs to access the site
"""
implements(IRealm)
def __init__(self, root, allowedAvatarIds):
"""
@param root: The root resource of the site
@param allowedAvatarIds: The list of IDs to allow access to
"""
self.root = root
self.allowedAvatarIds = allowedAvatarIds
def requestAvatar(self, avatarId, mind, *interfaces):
if IResource in interfaces:
if avatarId.shortNames[0] in self.allowedAvatarIds:
return (IResource, self.root, lambda: None)
else:
return (IResource, ForbiddenResource(), lambda: None)
raise NotImplementedError()
class AgentGatewayResource(Resource):
"""
The gateway resource which forwards incoming requests through
gateway.Runner.
"""
isLeaf = True
def __init__(self, store, directory, inactivityDetector):
"""
@param store: an already opened store
@param directory: a directory service
@param inactivityDetector: the InactivityDetector to tell when requests
come in
"""
Resource.__init__(self)
self.store = store
self.directory = directory
self.inactivityDetector = inactivityDetector
def render_POST(self, request):
"""
Take the body of the POST request and feed it to gateway.Runner();
return the result as the response body.
"""
self.inactivityDetector.activity()
def onSuccess(result, output):
txt = output.getvalue()
output.close()
request.write(txt)
request.finish()
def onError(failure):
message = failure.getErrorMessage()
tbStringIO = cStringIO.StringIO()
failure.printTraceback(file=tbStringIO)
tbString = tbStringIO.getvalue()
tbStringIO.close()
error = {
"Error": message,
"Traceback": tbString,
}
log.error("command failed {error}", error=failure)
request.write(writePlistToString(error))
request.finish()
from calendarserver.tools.gateway import Runner
body = request.content.read()
command = readPlistFromString(body)
output = cStringIO.StringIO()
runner = Runner(self.store, [command], output=output)
d = runner.run()
d.addCallback(onSuccess, output)
d.addErrback(onError)
return NOT_DONE_YET
def makeAgentService(store):
"""
Returns a service which will process GatewayAMPCommands, using a socket
file descripter acquired by launchd
@param store: an already opened store
@returns: service
"""
from twisted.internet import reactor
sockets = launchActivateSocket("AgentSocket")
fd = sockets[0]
family = socket.AF_INET
endpoint = AdoptedStreamServerEndpoint(reactor, fd, family)
directory = store.directoryService()
def becameInactive():
log.warn("Agent inactive; shutting down")
reactor.stop()
from twistedcaldav.config import config
inactivityDetector = InactivityDetector(
reactor, config.AgentInactivityTimeoutSeconds, becameInactive
)
root = Resource()
root.putChild(
"gateway",
AgentGatewayResource(
store, directory, inactivityDetector
)
)
# We need this service to be able to return com.apple.calendarserver,
# so tell it not to suppress system accounts.
directory = OpenDirectoryDirectoryService(
"/Local/Default", suppressSystemRecords=False
)
portal = Portal(
AgentRealm(root, [u"com.apple.calendarserver"]),
[HTTPDigestCredentialChecker(directory)]
)
credentialFactory = NoQOPDigestCredentialFactory(
"md5", "/Local/Default"
)
wrapper = HTTPAuthSessionWrapper(portal, [credentialFactory])
site = Site(wrapper)
return StreamServerEndpointService(endpoint, site)
class InactivityDetector(object):
"""
If no 'activity' takes place for a specified amount of time, a method
will get called. Activity causes the inactivity time threshold to be
reset.
"""
def __init__(self, reactor, timeoutSeconds, becameInactive):
"""
@param reactor: the reactor
@timeoutSeconds: the number of seconds considered to mean inactive
@becameInactive: the method to call (with no arguments) when
inactivity is reached
"""
self._reactor = reactor
self._timeoutSeconds = timeoutSeconds
self._becameInactive = becameInactive
if self._timeoutSeconds > 0:
self._delayedCall = self._reactor.callLater(
self._timeoutSeconds,
self._inactivityThresholdReached
)
def _inactivityThresholdReached(self):
"""
The delayed call has fired. We're inactive. Call the becameInactive
method.
"""
self._becameInactive()
def activity(self):
"""
Call this to let the InactivityMonitor that there has been activity.
It will reset the timeout.
"""
if self._timeoutSeconds > 0:
if self._delayedCall.active():
self._delayedCall.reset(self._timeoutSeconds)
else:
self._delayedCall = self._reactor.callLater(
self._timeoutSeconds,
self._inactivityThresholdReached
)
def stop(self):
"""
Cancels the delayed call
"""
if self._timeoutSeconds > 0:
if self._delayedCall.active():
self._delayedCall.cancel()
#
# Alternate implementation using AMP instead of HTTP
#
class GatewayAMPCommand(amp.Command):
"""
A command to be executed by gateway.Runner
"""
arguments = [('command', amp.String())]
response = [('result', amp.String())]
class GatewayAMPProtocol(amp.AMP):
"""
Passes commands to gateway.Runner and returns the results
"""
def __init__(self, store, directory):
"""
@param store: an already opened store
operations
@param directory: a directory service
"""
amp.AMP.__init__(self)
self.store = store
self.directory = directory
@GatewayAMPCommand.responder
@inlineCallbacks
def gatewayCommandReceived(self, command):
"""
Process a command via gateway.Runner
@param command: GatewayAMPCommand
@returns: a deferred returning a dict
"""
command = readPlistFromString(command)
output = cStringIO.StringIO()
from calendarserver.tools.gateway import Runner
runner = Runner(
self.store,
[command], output=output
)
try:
yield runner.run()
result = output.getvalue()
output.close()
except Exception as e:
error = {"Error": str(e)}
result = writePlistToString(error)
output.close()
returnValue(dict(result=result))
class GatewayAMPFactory(Factory):
"""
Builds GatewayAMPProtocols
"""
protocol = GatewayAMPProtocol
def __init__(self, store):
"""
@param store: an already opened store
"""
self.store = store
self.directory = self.store.directoryService()
def buildProtocol(self, addr):
return GatewayAMPProtocol(
self.store, self.davRootResource, self.directory
)
#
# A test AMP client
#
command = """<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple Computer//DTD PLIST 1.0//EN"
"http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>command</key>
<string>getLocationAndResourceList</string>
</dict>
</plist>"""
def getList():
# For the sample client, below:
from twisted.internet import reactor
from twisted.internet.protocol import ClientCreator
creator = ClientCreator(reactor, amp.AMP)
host = '127.0.0.1'
import sys
if len(sys.argv) > 1:
host = sys.argv[1]
d = creator.connectTCP(host, 62308)
def connected(ampProto):
return ampProto.callRemote(GatewayAMPCommand, command=command)
d.addCallback(connected)
def resulted(result):
return result['result']
d.addCallback(resulted)
def done(result):
print('Done: %s' % (result,))
reactor.stop()
d.addCallback(done)
reactor.run()
if __name__ == '__main__':
getList()
| apache-2.0 | -6,349,575,303,463,743,000 | 28.401639 | 79 | 0.646222 | false |
rocky/python2-trepan | trepan/bwprocessor/main.py | 1 | 18126 | # -*- coding: utf-8 -*-
# Copyright (C) 2008-2010, 2013-2015 Rocky Bernstein <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import inspect, linecache, sys, traceback, types
import pyficache
from repr import Repr
from trepan import vprocessor as Mprocessor
from trepan import exception as Mexcept, misc as Mmisc
from trepan.lib import bytecode as Mbytecode, display as Mdisplay
from trepan.lib import thred as Mthread
from trepan.bwprocessor import location as Mlocation, msg as Mmsg
def get_stack(f, t, botframe, proc_obj=None):
"""Return a stack of frames which the debugger will use for in
showing backtraces and in frame switching. As such various frame
that are really around may be excluded unless we are debugging the
sebugger. Also we will add traceback frame on top if that
exists."""
exclude_frame = lambda f: False
if proc_obj:
settings = proc_obj.debugger.settings
if not settings['dbg_trepan']:
exclude_frame = lambda f: \
proc_obj.core.ignore_filter.is_included(f)
pass
pass
stack = []
if t and t.tb_frame is f:
t = t.tb_next
while f is not None:
if exclude_frame(f): break # See commented alternative below
stack.append((f, f.f_lineno))
# bdb has:
# if f is botframe: break
f = f.f_back
pass
stack.reverse()
i = max(0, len(stack) - 1)
while t is not None:
stack.append((t.tb_frame, t.tb_lineno))
t = t.tb_next
pass
return stack, i
def run_hooks(obj, hooks, *args):
"""Run each function in `hooks' with args"""
for hook in hooks:
if hook(obj, *args): return True
pass
return False
def resolve_name(obj, command_name):
if command_name not in obj.commands:
return None
return command_name
# Default settings for command processor method call
DEFAULT_PROC_OPTS = {
# A list of debugger initialization files to read on first command
# loop entry. Often this something like [~/.trepanrc] which the
# front-end sets.
'initfile_list' : []
}
class BWProcessor(Mprocessor.Processor):
def __init__(self, core_obj, opts=None):
Mprocessor.Processor.__init__(self, core_obj)
self.response = {'errs': [], 'msg': []}
self.continue_running = False # True if we should leave command loop
self.cmd_instances = self._populate_commands()
self.cmd_name = '' # command name before alias or
# macro resolution
self.current_command = '' # Current command getting run
self.debug_nest = 1
self.display_mgr = Mdisplay.DisplayMgr()
self.intf = core_obj.debugger.intf
self.last_command = None # Initially a no-op
self.precmd_hooks = []
# If not:
# self.location = lambda : print_location(self)
self.preloop_hooks = []
self.postcmd_hooks = []
self._populate_cmd_lists()
# Stop only if line/file is different from last time
self.different_line = None
# These values updated on entry. Set initial values.
self.curframe = None
self.event = None
self.event_arg = None
self.frame = None
self.list_lineno = 0
# Create a custom safe Repr instance and increase its maxstring.
# The default of 30 truncates error messages too easily.
self._repr = Repr()
self._repr.maxstring = 100
self._repr.maxother = 60
self._repr.maxset = 10
self._repr.maxfrozen = 10
self._repr.array = 10
self._saferepr = self._repr.repr
self.stack = []
self.thread_name = None
self.frame_thread_name = None
return
def add_preloop_hook(self, hook, position=-1, nodups = True):
if hook in self.preloop_hooks: return False
self.preloop_hooks.insert(position, hook)
return True
def adjust_frame(self, pos, absolute_pos):
"""Adjust stack frame by pos positions. If absolute_pos then
pos is an absolute number. Otherwise it is a relative number.
A negative number indexes from the other end."""
if not self.curframe:
Mmsg.errmsg(self, "No stack.")
return
# Below we remove any negativity. At the end, pos will be
# the new value of self.curindex.
if absolute_pos:
if pos >= 0:
pos = len(self.stack)-pos-1
else:
pos = -pos-1
else:
pos += self.curindex
if pos < 0:
Mmsg.errmsg(self,
"Adjusting would put us beyond the oldest frame.")
return
elif pos >= len(self.stack):
Mmsg.errmsg(self,
"Adjusting would put us beyond the newest frame.")
return
self.curindex = pos
self.curframe = self.stack[self.curindex][0]
self.print_location()
self.list_lineno = None
return
# To be overridden in derived debuggers
def defaultFile(self):
"""Produce a reasonable default."""
filename = self.curframe.f_code.co_filename
# Consider using is_exec_stmt(). I just don't understand
# the conditions under which the below test is true.
if filename == '<string>' and self.debugger.mainpyfile:
filename = self.debugger.mainpyfile
pass
return filename
def event_processor(self, frame, event, event_arg, prompt='Trepan'):
'command event processor: reading a commands do something with them.'
self.frame = frame
self.event = event
self.event_arg = event_arg
filename = frame.f_code.co_filename
lineno = frame.f_lineno
line = linecache.getline(filename, lineno, frame.f_globals)
if not line:
opts = {'output': 'plain',
'reload_on_change': self.settings('reload'),
'strip_nl': False}
line = pyficache.getline(filename, lineno, opts)
self.current_source_text = line
if self.settings('skip') is not None:
if Mbytecode.is_def_stmt(line, frame):
return True
if Mbytecode.is_class_def(line, frame):
return True
pass
self.thread_name = Mthread.current_thread_name()
self.frame_thread_name = self.thread_name
self.process_commands()
return True
def forget(self):
""" Remove memory of state variables set in the command processor """
self.stack = []
self.curindex = 0
self.curframe = None
self.thread_name = None
self.frame_thread_name = None
return
def eval(self, arg):
"""Eval string arg in the current frame context."""
try:
return eval(arg, self.curframe.f_globals,
self.curframe.f_locals)
except:
t, v = sys.exc_info()[:2]
if isinstance(t, str):
exc_type_name = t
pass
else: exc_type_name = t.__name__
Mmsg.errmsg(self, str("%s: %s" % (exc_type_name, arg)))
raise
return None # Not reached
def exec_line(self, line):
if self.curframe:
local_vars = self.curframe.f_locals
global_vars = self.curframe.f_globals
else:
local_vars = None
# FIXME: should probably have place where the
# user can store variables inside the debug session.
# The setup for this should be elsewhere. Possibly
# in interaction.
global_vars = None
try:
code = compile(line + '\n', '"%s"' % line, 'single')
exec code in global_vars, local_vars
except:
t, v = sys.exc_info()[:2]
if isinstance(t, types.StringType):
exc_type_name = t
else: exc_type_name = t.__name__
Mmsg.errmsg(self, '%s: %s' % (str(exc_type_name), str(v)))
pass
return
def ok_for_running(self, cmd_obj, name, cmd_hash):
'''We separate some of the common debugger command checks here:
whether it makes sense to run the command in this execution state,
if the command has the right number of arguments and so on.
'''
if hasattr(cmd_obj, 'execution_set'):
if not (self.core.execution_status in cmd_obj.execution_set):
part1 = ("Command '%s' is not available for execution "
"status:" % name)
Mmsg.errmsg(self,
Mmisc.
wrapped_lines(part1,
self.core.execution_status,
self.debugger.settings['width']))
return False
pass
if self.frame is None and cmd_obj.need_stack:
self.intf[-1].errmsg("Command '%s' needs an execution stack."
% name)
return False
return True
def process_commands(self):
"""Handle debugger commands."""
if self.core.execution_status != 'No program':
self.setup()
Mlocation.print_location(self, self.event)
pass
leave_loop = run_hooks(self, self.preloop_hooks)
self.continue_running = False
while not leave_loop:
try:
run_hooks(self, self.precmd_hooks)
# bdb had a True return to leave loop.
# A more straight-forward way is to set
# instance variable self.continue_running.
leave_loop = self.process_command()
if leave_loop or self.continue_running: break
except EOFError:
# If we have stacked interfaces, pop to the next
# one. If this is the last one however, we'll
# just stick with that. FIXME: Possibly we should
# check to see if we are interactive. and not
# leave if that's the case. Is this the right
# thing? investigate and fix.
if len(self.debugger.intf) > 1:
del self.debugger.intf[-1]
self.last_command = ''
else:
if self.debugger.intf[-1].output:
self.debugger.intf[-1].output.writeline('Leaving')
raise Mexcept.DebuggerQuit
pass
break
pass
pass
return run_hooks(self, self.postcmd_hooks)
def process_command(self):
# process command
self.response = {'errs': [], 'msg': []}
cmd_hash = self.intf[-1].read_command()
# FIXME: put this into a routine
if isinstance(cmd_hash, types.DictType):
Mmsg.errmsg(self, "invalid input, expecting a hash: %s" % cmd_hash,
{'set_name': True})
self.intf[-1].msg(self.response)
return False
if 'command' not in cmd_hash:
Mmsg.errmsg(self,
"invalid input, expecting a 'command' key: %s" %
cmd_hash,
{'set_name': True})
self.intf[-1].msg(self.response)
return False
self.cmd_name = cmd_hash['command']
cmd_name = resolve_name(self, self.cmd_name)
if cmd_name:
cmd_obj = self.commands[cmd_name]
if self.ok_for_running(cmd_obj, cmd_name, cmd_hash):
try:
self.response['name'] = cmd_name
result = cmd_obj.run(cmd_hash)
self.intf[-1].msg(self.response)
if result: return result
except (Mexcept.DebuggerQuit,
Mexcept.DebuggerRestart, SystemExit):
# Let these exceptions propagate through
raise
except:
Mmsg.errmsg(self, "INTERNAL ERROR: " +
traceback.format_exc())
pass
pass
else:
self.undefined_cmd(cmd_name)
pass
pass
return False
def remove_preloop_hook(self, hook):
try:
position = self.preloop_hooks.index(hook)
except ValueError:
return False
del self.preloop_hooks[position]
return True
def setup(self):
"""Initialization done before entering the debugger-command
loop. In particular we set up the call stack used for local
variable lookup and frame/up/down commands.
We return True if we should NOT enter the debugger-command
loop."""
self.forget()
if self.settings('dbg_trepan'):
self.frame = inspect.currentframe()
pass
if self.event in ['exception', 'c_exception']:
exc_type, exc_value, exc_traceback = self.event_arg
else:
_, _, exc_traceback = (None, None, None,) # NOQA
pass
if self.frame or exc_traceback:
self.stack, self.curindex = \
get_stack(self.frame, exc_traceback, None, self)
self.curframe = self.stack[self.curindex][0]
self.thread_name = Mthread.current_thread_name()
else:
self.stack = self.curframe = \
self.botframe = None
pass
if self.curframe:
self.list_lineno = \
max(1, inspect.getlineno(self.curframe))
else:
self.list_lineno = None
pass
# if self.execRcLines()==1: return True
return False
def undefined_cmd(self, cmd):
"""Error message when a command doesn't exist"""
Mmsg.errmsg(self, 'Undefined command: "%s". Try "help".' % cmd)
return
def _populate_commands(self):
""" Create an instance of each of the debugger
commands. Commands are found by importing files in the
directory 'command'. Some files are excluded via an array set
in __init__. For each of the remaining files, we import them
and scan for class names inside those files and for each class
name, we will create an instance of that class. The set of
DebuggerCommand class instances form set of possible debugger
commands."""
cmd_instances = []
from trepan.bwprocessor import command as Mcommand
eval_cmd_template = 'command_mod.%s(self)'
for mod_name in Mcommand.__modules__:
import_name = "command." + mod_name
try:
command_mod = getattr(__import__(import_name), mod_name)
except:
print('Error importing %s: %s' %
(mod_name, sys.exc_info()[0]))
continue
classnames = [ tup[0] for tup in
inspect.getmembers(command_mod, inspect.isclass)
if ('DebuggerCommand' != tup[0] and
tup[0].endswith('Command')) ]
for classname in classnames:
eval_cmd = eval_cmd_template % classname
try:
instance = eval(eval_cmd)
cmd_instances.append(instance)
except:
print ('Error loading %s from %s: %s' %
(classname, mod_name, sys.exc_info()[0]))
pass
pass
pass
return cmd_instances
def _populate_cmd_lists(self):
""" Populate self.commands"""
self.commands = {}
for cmd_instance in self.cmd_instances:
cmd_name = cmd_instance.name
self.commands[cmd_name] = cmd_instance
pass
return
pass
# Demo it
if __name__=='__main__':
from trepan.interfaces import bullwinkle as Mbullwinkle
class Debugger:
def __init__(self):
self.intf = [Mbullwinkle.BWInterface()]
self.settings = {'dbg_trepan': True, 'reload': False}
pass
class MockCore:
def filename(self, fn): return fn
def canonic_filename(self, frame): return frame.f_code.co_filename
def __init__(self):
self.debugger = Debugger()
return
pass
core = MockCore()
bwproc = BWProcessor(core)
print 'commands:'
commands = bwproc.commands.keys()
commands.sort()
print commands
print resolve_name(bwproc, 'quit')
# print '-' * 10
# print_source_line(sys.stdout.write, 100, 'source_line_test.py')
# print '-' * 10
bwproc.frame = sys._getframe()
bwproc.setup()
# print
# print '-' * 10
Mlocation.print_location(bwproc)
# print 'Removing non-existing quit hook: ', bwproc.remove_preloop_hook(fn)
# bwproc.add_preloop_hook(fn)
# print bwproc.preloop_hooks
# print 'Removed existing quit hook: ', bwproc.remove_preloop_hook(fn)
pass
| gpl-3.0 | 3,372,832,090,076,952,000 | 35.692308 | 79 | 0.549542 | false |
drewp/commentserve | commentServe.py | 1 | 12072 | #!/usr/bin/python
"""comment storage for blogs, photo site, etc
see also:
sioc:Post sioc:has_reply sioc:Post / types:Comment
sioc:content
content:encoded
dcterms:created
types:BlogPost
types:Comment
"""
import web, time, logging, pystache, traceback
from datetime import datetime
from uuid import uuid4
from html5lib import html5parser, sanitizer
from web.contrib.template import render_genshi
from rdflib import RDF, URIRef, Literal, Namespace
from dateutil.parser import parse
from honeypot import HoneypotChecker
import restkit
from dateutil.tz import tzlocal
import cyclone.web
from twisted.internet import reactor
from db import DbMongo
SIOC = Namespace("http://rdfs.org/sioc/ns#")
CONTENT = Namespace("http://purl.org/rss/1.0/modules/content/")
DCTERMS = Namespace("http://purl.org/dc/terms/")
XS = Namespace("http://www.w3.org/2001/XMLSchema#")
FOAF = Namespace("http://xmlns.com/foaf/0.1/")
HTTP = Namespace("http://www.w3.org/2006/http#")
OV = Namespace("http://open.vocab.org/terms/")
log = logging.getLogger()
logging.basicConfig(format='%(asctime)s:%(levelname)s:%(name)s:%(message)s')
log.setLevel(logging.INFO)
render = render_genshi(['.'], auto_reload=False)
def literalFromUnix(t):
i = datetime.fromtimestamp(int(t)).replace(tzinfo=tzlocal()).isoformat()
return Literal(i, datatype=XS['dateTime'])
def agoString(literalTime):
d = parse(str(literalTime))
# (assuming 'now' is in the same timezone as d)
return web.utils.datestr(d, datetime.now().replace(tzinfo=tzlocal()))
def newPublicUser(forwardedFor, name, email):
"""
a non-logged-in user is posting a comment on a resource that's
open for public comments. We make a new URI for this user (every
time) and store some extra statements.
pass your web post params, which might include 'name' and 'email'.
returns user URI and a list of triples to be stored
"""
stmts = []
user = URIRef('http://bigasterisk.com/guest/%s' % uuid4())
header = URIRef(user + "/header1")
stmts.extend([
(user, RDF.type, FOAF.Person),
(user, DCTERMS.created, literalFromUnix(time.time())),
(user, OV.usedHttpHeader, header),
(header, HTTP.fieldName, Literal('X-Forwarded-For')),
(header, HTTP.fieldValue, Literal(forwardedFor)),
])
if name:
stmts.append((user, FOAF.name, Literal(name)))
if email:
stmts.append((user, FOAF.mbox, URIRef("mailto:%s" % email)))
return user, stmts
def newCommentUri(secs=None):
"""this is essentially a bnode, but a real URI is easier to work with"""
if secs is None:
secs = time.time()
return URIRef("http://bigasterisk.com/comment/%r" % secs)
class AnyCase(sanitizer.HTMLSanitizer):
def __init__(self, stream, encoding=None, parseMeta=True, useChardet=True,
lowercaseElementName=True, lowercaseAttrName=True):
sanitizer.HTMLSanitizer.__init__(self, stream, encoding, parseMeta,
useChardet,
lowercaseElementName,
lowercaseAttrName)
class AnyCaseNoSrc(AnyCase):
allowed_attributes = AnyCase.allowed_attributes[:]
allowed_attributes.remove('src')
def sanitize_html(stream, srcAttr=False):
ret = ''.join([token.toxml() for token in
html5parser.HTMLParser(tokenizer=AnyCase if srcAttr else AnyCaseNoSrc).
parseFragment(stream).childNodes])
return ret
def spamCheck(article, content):
if content.lower().count("<a href") > 0:
log.error("too many links in %r" % content)
raise ValueError("too many links")
if '[url=' in content:
raise ValueError("url markup is too suspicious")
if content.split()[-1].startswith(('http://', 'https://')):
raise ValueError("please don't end with a link")
if article in [URIRef("http://drewp.quickwitretort.com/2008/02/22/0"),
URIRef("http://drewp.quickwitretort.com/2010/07/03/0"),
]:
raise ValueError("spam flood")
for pat in ['viagra', 'cialis', 'probleme de sante', 'pfizer', 'pilules']:
if pat in content.lower():
raise ValueError("spam pattern")
class Comments(cyclone.web.RequestHandler):
def get(self, public=False):
"""
post=<uri to post> (or use 'uri' for the arg)
returns html formatted comments (until i get some more content types)
"""
t1 = time.time()
post = (self.get_argument("post", default=None) or
self.get_argument("uri", default=None))
if not post:
raise ValueError("need 'uri' param")
post = URIRef(post)
foafAgent = None
try:
foafAgent = URIRef(self.request.headers['X-Foaf-Agent'])
except KeyError:
if not public:
self.write("Must login to see comments")
return
queryTime = time.time()
rows = self.findComments(post)
queryTime = time.time() - queryTime
self.set_header("Content-Type", "text/html")
ret = render.comments(
includeJs=self.get_argument("js", default="0") != "0",
public=public,
parent=post,
toHttps=lambda uri: uri.replace('http://', 'https://'),
agoString=agoString,
you=self.settings.db.value(foafAgent, FOAF.name) if foafAgent else None,
rows=rows,
)
self.write(ret + "<!-- %.2f ms (%.2f ms in query) -->" % (
1000 * (time.time() - t1),
1000 * queryTime))
def findComments(self, post):
rows = []
for who, when, content in self.settings.db.query("""
SELECT DISTINCT ?who ?when ?content WHERE {
?parent sioc:has_reply [
sioc:has_creator ?cr;
content:encoded ?content;
dcterms:created ?when
]
OPTIONAL { ?cr foaf:name ?who }
} ORDER BY ?when""", initBindings={"parent" : post}):
row = dict(who=who, when=when, content=sanitize_html(content))
rows.append(row)
log.debug("found %s rows with parent %r" % (len(rows), post))
return rows
def post(self, public=False):
"""
post=<parent post>
content=<html content>
we get the user from the x-foaf-agent header
"""
parent = self.get_argument('post', default=None) or self.get_argument("uri")
assert parent is not None
# maybe a legacy problem here with http/https, but blaster is still sending http
parent = URIRef(parent)
# this might be failing on ariblog, but that one is already safe
ip = self.request.headers.get("X-Forwarded-For")
if ip is not None:
HoneypotChecker(open("priv-honeypotkey").read().strip()).check(ip)
contentArg = self.get_argument("content", default="")
if not contentArg.strip():
raise ValueError("no text")
if contentArg.strip() == 'test':
return "not adding test comment"
spamCheck(parent, contentArg)
content = Literal(contentArg, datatype=RDF.XMLLiteral)
stmts = [] # gathered in one list for an atomic add
foafHeader = self.request.headers.get('X-Foaf-Agent')
if not public:
assert foafHeader
user = URIRef(foafHeader)
# make bnode-ish users for anonymous ones. need to get that username passed in here
else:
if foafHeader:
user = URIRef(foafHeader)
else:
user, moreStmts = newPublicUser(
self.request.headers.get("X-Forwarded-For"),
self.get_argument("name", ""),
self.get_argument("email", ""))
stmts.extend(moreStmts)
secs = time.time()
comment = newCommentUri(secs)
now = literalFromUnix(secs)
ctx = URIRef(parent + "/comments")
stmts.extend([(parent, SIOC.has_reply, comment),
(comment, DCTERMS.created, now),
(comment, SIOC.has_creator, user),
])
stmts.extend(commentStatements(user, comment, content))
db.writeFile(stmts, ctx, fileWords=[parent.split('/')[-1], now])
try:
self.sendAlerts(parent, user)
except Exception, e:
import traceback
log.error(e)
traceback.print_exc()
self.write("added")
def sendAlerts(self, parent, user):
c3po = restkit.Resource('http://bang:9040/')
for listener, mode in [
('http://bigasterisk.com/foaf.rdf#drewp', 'xmpp'),
('http://bigasterisk.com/kelsi/foaf.rdf#kelsi', 'xmpp')]:
c3po.post(
path='', payload={
'user': listener,
'msg': '%s comment from %s (http://10.1.0.1:9031/)' % (parent, user),
'mode': mode,
},
# shouldn't this be automatic?
headers={'content-type' : 'application/x-www-form-urlencoded'},
)
class CommentCount(cyclone.web.RequestHandler):
def get(self, public=False):
if not public:
try:
self.request.headers['X-Foaf-Agent']
except KeyError:
self.set_header("Content-Type", "text/plain")
self.write("Must login to see comments")
return
post = URIRef(self.get_argument("post"))
rows = self.settings.db.query("""
SELECT DISTINCT ?r WHERE {
?parent sioc:has_reply ?r
}""", initBindings={"parent" : post})
count = len(list(rows))
self.set_header("Content-Type", "text/plain")
self.write("%s comments" % count if count != 1 else "1 comment")
class Root(cyclone.web.RequestHandler):
def get(self):
recent = self.settings.db.getRecentComments(10, notOlderThan=60,
withSpam=False)
self.write(pystache.render(open("index.mustache").read(),
dict(recent=recent)))
class Spam(cyclone.web.RequestHandler):
def post(self):
try:
self.settings.db.setType(docId=self.get_argument('docId'), type="spam")
except Exception:
traceback.print_exc()
raise
self.redirect("/")
def commentStatements(user, commentUri, realComment):
# here you can put more processing on the comment text
realComment = Literal(realComment.replace("\r", ""), datatype=realComment.datatype) # rdflib n3 can't read these back
return [(commentUri, CONTENT.encoded, realComment)]
class Index(cyclone.web.RequestHandler):
def get(self):
self.set_header("Content-Type", "text/plain")
self.write("commentServe")
class Fav(cyclone.web.RequestHandler):
def get(self):
self.write(open("favicon.ico").read())
class Application(cyclone.web.Application):
def __init__(self, db):
handlers = [
(r'/comments', Comments),
(r'/(public)/comments', Comments),
(r'/commentCount', CommentCount),
(r'/(public)/commentCount', CommentCount),
(r'/', Root),
(r'/favicon.ico', Fav),
(r'/spam', Spam),
]
cyclone.web.Application.__init__(self, handlers,
db=db,
template_path=".")
if __name__ == '__main__':
db = DbMongo()
from twisted.python.log import startLogging
import sys
startLogging(sys.stdout)
reactor.listenTCP(9031, Application(db))
reactor.run()
| bsd-2-clause | -3,934,038,125,280,826,400 | 35.252252 | 121 | 0.574387 | false |
kivymd/KivyMD | demos/shrine/libs/baseclass/box_bottom_sheet.py | 1 | 4932 | from kivy.animation import Animation
from kivy.clock import Clock
from kivy.core.window import Window
from kivy.metrics import dp
from kivy.properties import BooleanProperty, ObjectProperty, StringProperty
from kivy.uix.behaviors import ButtonBehavior
from kivy.uix.image import Image
from kivy.uix.recycleview import RecycleView
from kivymd.theming import ThemableBehavior
from kivymd.uix.behaviors import CircularRippleBehavior
from kivymd.uix.boxlayout import MDBoxLayout
from kivymd.uix.button import MDIconButton
from kivymd.uix.list import TwoLineAvatarIconListItem
class BoxBottomSheetProductList(RecycleView):
pass
class TotalPriceForBoxBottomSheetProductList(MDBoxLayout):
pass
class ToolbarForBoxBottomSheetProductList(MDBoxLayout):
pass
class ItemForBoxBottomSheetProductList(TwoLineAvatarIconListItem):
pass
class PreviousImage(CircularRippleBehavior, ButtonBehavior, Image):
description = StringProperty()
_root = ObjectProperty()
class BoxBottomSheet(ThemableBehavior, MDBoxLayout):
open_sheet_box = BooleanProperty(False)
def clear_box(self):
while len(self.ids.previous_box.children) != 1:
for widget in self.ids.previous_box.children:
if widget.__class__ is not MDIconButton:
self.ids.previous_box.remove_widget(widget)
def restore_opacity_bottom_sheet(self):
Animation(opacity=1, d=0.2).start(self.ids.previous_box)
Animation(opacity=1, d=0.2).start(self)
def restore_width_bottom_sheet(self):
if len(self.ids.previous_box.children) != 1:
for widget in self.ids.previous_box.children:
self.ids.previous_box.width += widget.width
self.width += widget.width
self.ids.previous_box.height = dp(48)
if self.parent.ids.box_bottom_sheet_product_list.width == 0:
Animation(width=self.width + dp(48), d=0.2).start(self)
def remove_box_list(self, *args):
self.parent.ids.box_bottom_sheet_product_list.data = []
self.restore_width_bottom_sheet()
self.restore_opacity_bottom_sheet()
def hide_box_bottom_sheet(self):
Animation(width=0, d=0.2).start(self)
Animation(opacity=0, d=0.2).start(self)
def do_open_bottom_sheet(self, *args):
total_price = 0
count_item = 0
for widget in self.ids.previous_box.children:
if widget.__class__ is PreviousImage:
count_item += 1
total_price += int(
float(widget.description.split("\n")[1].split("$ ")[1])
)
self.parent.ids.box_bottom_sheet_product_list.data.append(
{
"viewclass": "ItemForBoxBottomSheetProductList",
"height": dp(72),
"path_to_image": widget.source,
"description": widget.description,
}
)
self.parent.ids.box_bottom_sheet_product_list.data.insert(
0,
{
"viewclass": "ToolbarForBoxBottomSheetProductList",
"count_item": count_item,
"callback": self.hide_bottom_sheet,
},
)
self.parent.ids.box_bottom_sheet_product_list.data.append(
{
"viewclass": "TotalPriceForBoxBottomSheetProductList",
"total_price": str(total_price),
}
)
Animation(opacity=1, d=0.2).start(
self.parent.ids.box_bottom_sheet_product_list
)
self.show_clear_button()
def show_clear_button(self):
self.parent.ids.clear_button.opacity = 1
self.parent.ids.clear_button.disabled = False
self.parent.ids.clear_button.grow()
def hide_clear_button(self, *args):
def hide_clear_button(interval):
self.parent.ids.clear_button.opacity = 0
self.parent.ids.clear_button.disabled = True
self.parent.ids.clear_button.grow()
Clock.schedule_once(hide_clear_button, 0.2)
def hide_bottom_sheet(self, *args):
Animation.stop_all(self)
self.hide_clear_button()
Animation(opacity=0, d=0.2).start(
self.parent.ids.box_bottom_sheet_product_list
)
animation = Animation(
height=Window.height // 3, width=Window.width // 2, d=0.1
) + Animation(height=dp(68), width=dp(68), d=0.2)
animation.bind(on_complete=self.remove_box_list)
animation.start(self)
self.open_sheet_box = False
def open_bottom_sheet(self):
Animation.stop_all(self)
anim = Animation(
height=Window.height // 2, width=Window.width, d=0.1
) + Animation(height=Window.height, d=0.1)
anim.bind(on_complete=self.do_open_bottom_sheet)
anim.start(self)
self.open_sheet_box = True
| mit | 7,916,162,452,998,090,000 | 34.228571 | 75 | 0.623682 | false |
wtsi-hgi/irobot | irobot/authentication/_http.py | 1 | 7456 | """
Copyright (c) 2017 Genome Research Ltd.
Author: Christopher Harrison <[email protected]>
This program is free software: you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by the
Free Software Foundation, either version 3 of the License, or (at your
option) any later version.
This program is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
Public License for more details.
You should have received a copy of the GNU General Public License along
with this program. If not, see <http://www.gnu.org/licenses/>.
"""
import atexit
import logging
from abc import abstractmethod
from threading import Lock, Timer
from typing import Dict, NamedTuple, Optional
from aiohttp import ClientSession, ClientResponse
from irobot.authentication._base import AuthenticatedUser, BaseAuthHandler
from irobot.authentication.parser import HTTPAuthMethod, ParseError, auth_parser
from irobot.config import Configuration
from irobot.logs import LogWriter
class HTTPValidatorParameters(NamedTuple):
""" Parameters for the HTTP validator """
url: str # URL to make the authentication response to
payload: str # Challenge response payload
method: str = "GET" # HTTP method
headers: Dict[str, str] = {} # Additional request headers
class BaseHTTPAuthHandler(LogWriter, BaseAuthHandler):
""" Base HTTP-based authentication handler with logging and caching """
## Implement these #################################################
@abstractmethod
def match_auth_method(self, challenge_response: HTTPAuthMethod) -> bool:
"""
Test the given challenge response matches the requirements of
the handler class
@params challenge_response Authentication challenge response (HTTPAuthMethod)
@return Match (bool)
"""
@abstractmethod
def set_handler_parameters(self, challenge_response: HTTPAuthMethod) -> HTTPValidatorParameters:
"""
Set the parameters for the authentication challenge response
@params challenge_response Authentication challenge reponse (HTTPAuthMethod)
@return Authentication request parameters (HTTPValidatorParameters)
"""
@abstractmethod
async def get_authenticated_user(self, challenge_response: HTTPAuthMethod,
auth_response: ClientResponse) -> AuthenticatedUser:
"""
Get the user from the authentication challenge response and any
response back from the authentication server
@params challenge_response Authentication challenge response (HTTPAuthMethod)
@param auth_response Response from authentication request (ClientResponse)
@return Authenticated user (AuthenticatedUser)
"""
####################################################################
def __init__(self, config: Configuration, logger: Optional[logging.Logger]=None) -> None:
"""
Constructor
@param config Authentication configuration
@param logger Logger
"""
super().__init__(logger=logger)
self._config = config
# Get the first word of the WWW-Authenticate string
self._auth_method, *_ = self.www_authenticate.split()
# Initialise the cache, if required
if self._config.cache:
self.log(logging.DEBUG, f"Creating {self._auth_method} authentication cache")
self._cache: Dict[HTTPAuthMethod, AuthenticatedUser] = {}
self._cache_lock = Lock()
self._schedule_cleanup()
atexit.register(self._cleanup_timer.cancel)
def _schedule_cleanup(self) -> None:
""" Initialise and start the clean up timer """
self._cleanup_timer = Timer(self._config.cache.total_seconds(), self._cleanup)
self._cleanup_timer.daemon = True
self._cleanup_timer.start()
def __del__(self) -> None:
""" Cancel any running clean up timer on GC """
if self._config.cache and self._cleanup_timer.is_alive():
self._cleanup_timer.cancel()
def _cleanup(self) -> None:
""" Clean up expired entries from the cache """
with self._cache_lock:
self.log(logging.DEBUG, f"Cleaning {self._auth_method} authentication cache")
for key, user in list(self._cache.items()):
if not user.valid(self._config.cache):
del self._cache[key]
self._schedule_cleanup()
async def _validate_request(self, params: HTTPValidatorParameters) -> Optional[ClientResponse]:
"""
Asynchronously make an authentication request to check validity
@param params Challenge response validator parameters (HTTPValidatorParameters)
@return Authentication response (ClientResponse; None on failure)
"""
async with ClientSession() as session:
req_headers = {
"Authorization": params.payload,
**params.headers
}
async with session.request(params.method, params.url, headers=req_headers) as response:
if 200 <= response.status < 300:
self.log(logging.DEBUG, f"{self._auth_method} authenticated")
return response
if response.status in [401, 403]:
self.log(logging.WARNING, f"{self._auth_method} couldn't authenticate")
else:
response.raise_for_status()
return None
async def authenticate(self, auth_header: str) -> Optional[AuthenticatedUser]:
"""
Validate the authorisation header
@param auth_header Contents of the "Authorization" header (string)
@return Authenticated user (AuthenticatedUser)
"""
try:
_auth_methods = auth_parser(auth_header)
challenge_response, *_ = filter(self.match_auth_method, _auth_methods)
except ParseError:
self.log(logging.WARNING,
f"{self._auth_method} authentication handler couldn't parse authentication header")
return None
except ValueError:
self.log(logging.ERROR, f"No HTTP {self._auth_method} authentication handler available")
return None
# Check the cache
if self._config.cache:
with self._cache_lock:
if challenge_response in self._cache:
user = self._cache[challenge_response]
if user.valid(self._config.cache):
self.log(logging.DEBUG, f"Authenticated user \"{user.user}\" from cache")
return user
# Clean up expired users
del self._cache[challenge_response]
auth_response = await self._validate_request(self.set_handler_parameters(challenge_response))
if auth_response:
user = await self.get_authenticated_user(challenge_response, auth_response)
# Put validated user in the cache
if self._config.cache:
with self._cache_lock:
self._cache[challenge_response] = user
return user
return None
| gpl-3.0 | 3,244,642,577,978,605,000 | 38.242105 | 104 | 0.628889 | false |
sernst/Trackway-Gait-Analysis | tracksim/cli/commands/list_.py | 1 | 1859 | from argparse import ArgumentParser
from tracksim import system
from tracksim import reader
from tracksim import paths
from tracksim import cli
DESCRIPTION = """
Removes all existing group and trial results from cached results folders
"""
def list_groups():
system.log('===== GROUPS =====', whitespace_bottom=1)
results_path = paths.results('group.html')
for uid, data_path in reader.listings('group').items():
url = 'file://{}?id={}'.format(results_path, uid)
system.log(
"""
--- {uid} ---
{url}
""".format(uid=uid, url=url),
whitespace_bottom=1
)
def list_trials():
system.log('===== TRIALS =====', whitespace_bottom=1)
results_path = paths.results('trials.html')
for uid, data_path in reader.listings('trial').items():
url = 'file://{}?id={}'.format(results_path, uid)
system.log(
"""
--- {uid} ---
{url}
""".format(uid=uid, url=url),
whitespace_bottom=1
)
def execute_command():
"""
:return:
"""
parser = ArgumentParser()
parser.description = cli.reformat(DESCRIPTION)
parser.add_argument(
'list_command',
type=str,
help='The list command itself'
)
parser.add_argument(
'report_type',
type=str,
nargs='?',
default=None,
help='The type of report to list.'
)
args = vars(parser.parse_args())
report_type = args['report_type']
if not report_type:
report_type = 'all'
else:
report_type = report_type.lower()
print('')
if report_type[0] == 'g':
list_groups()
elif report_type[0] == 't':
list_trials()
else:
list_groups()
print('')
list_trials()
| mit | 1,463,846,575,669,535,500 | 20.125 | 76 | 0.534696 | false |
num1r0/bb_tools | wordlist_generator.py | 1 | 4922 | """
Wordlist generator tool.
Generates extended wordlist based on an initial list of possible words
Used mainly with hash cracking tools: hashcat, john, etc.
TO DO:
- Add logging function
"""
import datetime
import itertools
import sys
import os
def usage():
""" Usage function """
usage_message = """Usage wordlist_generator.py [ OPTIONS ]
OPTIONS:
-i Path to initial wordlist file (default: wordlist.txt)
-o Name of the file to save generated wordlist (default: gen_ext_wl.txt)
-t Datetime got from 'date' command, used as origin timestamp (ex.: Sat 28 Oct 22:06:28 BST 2017)
-w Time window size (in seconds). Subtracted/added to origin timestamp
-h Display this menu
EXAMPLES:
wordlist_generator.py -i wl.txt -o res.txt -t "Sat 28 Oct 22:06:28 BST 2017" -w 10
"""
print usage_message
def create_permutations(wordlist):
"""
Creates all possible permutations for given wordlist
"""
extended_wordlist = []
for length in range(0, len(wordlist)+1):
for subset in itertools.permutations(wordlist, length):
extended_wordlist.append("".join(subset))
return extended_wordlist
def convert_to_epoch_time(origin):
"""
Converts datetime into unix timestamp. Gets as an argument, result of linux 'date' command.
Input example: Sat 28 Oct 22:06:28 BST 2017
"""
pattern = "%a %d %b %H:%M:%S %Z %Y"
timestamp = datetime.datetime.strptime(origin, pattern).strftime("%s")
return timestamp
def generate_timestamps(epoch_origin, seconds_interval):
"""
Gets origin timestamp and generates a list of them, based on specified interval of seconds
"""
timestamps = []
past_timestamp = int(epoch_origin) - int(seconds_interval)
future_timestamp = int(epoch_origin) + int(seconds_interval)
for timestamp in range(past_timestamp, future_timestamp+1):
timestamps.append(timestamp)
return timestamps
def generate_extended_wordlist(timestamps, wordlist):
"""
For each timestamp, we generate the wordlist using permutations
"""
extended_wordlist = []
iter_wordlist = []
for timestamp in timestamps:
iter_wordlist = list(wordlist)
iter_wordlist.append(str(timestamp))
iter_extended_wordlist = create_permutations(iter_wordlist)
del iter_wordlist[:]
diff_wordlist = list(set(iter_extended_wordlist) - set(extended_wordlist))
extended_wordlist += diff_wordlist
return sorted(extended_wordlist)
def get_wordlist_from_file(file_path):
"""
Simple read file function; omits newline '\n' character on each line
"""
f = open(str(file_path), "r")
wordlist = f.read().splitlines()
return wordlist
def save_to_file(file_path, wordlist):
"""
Simple write file function
"""
if not str(file_path):
file_path = "gen_ext_wl.txt"
with open(file_path, 'w') as f:
for word in wordlist:
f.write(word)
f.write("\n")
f.close()
def main():
"""
Entry point
"""
arguments = sys.argv[1:]
if len(arguments) <= 1:
usage()
exit(0)
try:
# Need help?
arguments.index("-h")
usage()
except:
# Get initial wordlist file name
try:
initial_wordlist_path = str(arguments[int(arguments.index("-i") + 1)])
except:
# Logging function
initial_wordlist_path = "wordlist.txt"
print initial_wordlist_path
# Get file name to store generated wordlist
try:
new_wordlist_path = str(arguments[int(arguments.index("-o") + 1)])
except:
# Logging function
new_wordlist_path = "gen_ext_wl.txt"
print new_wordlist_path
# Get origin timestamp
try:
origin_timestamp = str(arguments[int(arguments.index("-t") + 1)])
except:
# Logging function
process = os.popen("date")
origin_timestamp = str(process.read()).strip()
process.close()
print origin_timestamp
# Get time window in seconds
try:
time_window_seconds = str(arguments[int(arguments.index("-w") + 1)])
except:
# Logging function
time_window_seconds = 45
print time_window_seconds
initial_wordlist = get_wordlist_from_file(initial_wordlist_path)
if not origin_timestamp.isdigit():
origin_timestamp = convert_to_epoch_time(origin_timestamp)
generated_timestamps = generate_timestamps(origin_timestamp, time_window_seconds)
generated_extended_wordlist = generate_extended_wordlist(generated_timestamps, initial_wordlist)
save_to_file(new_wordlist_path, generated_extended_wordlist)
if __name__ == "__main__":
main()
| gpl-3.0 | -3,724,301,158,258,055,000 | 30.350318 | 110 | 0.621902 | false |
beakman/droidlab | droidlab/experiments/serializers.py | 1 | 1991 | from rest_framework import serializers
from rest_framework.reverse import reverse
from .models import Experiment, Result
class ResultSerializer(serializers.ModelSerializer):
class Meta:
model = Result
exclude = ('experiment',)
# class ExperimentSerializer(serializers.HyperlinkedModelSerializer):
# results = serializers.HyperlinkedIdentityField(view_name="results-list")
# class Meta:
# model = Experiment
# fields = ('name', 'date', 'results')
class ExperimentSerializer(serializers.ModelSerializer):
results = ResultSerializer(many=True)
class Meta:
model = Experiment
fields = ('id', 'name', 'date', 'user', 'results')
def create(self, validated_data):
results_data = validated_data.pop('results')
ex = Experiment.objects.create(**validated_data)
for result_data in results_data:
Result.objects.create(experiment=ex, **result_data)
return ex
def update(self, instance, validated_data):
results_data = validated_data.pop('results')
# Unless the application properly enforces that this field is
# always set, the follow could raise a `DoesNotExist`, which
# would need to be handled.
results = instance.results
instance.save()
results.save()
return instance
# class ResultHyperlink(serializers.HyperlinkedRelatedField):
# # We define these as class attributes, so we don't need to pass them as arguments.
# view_name = 'result-detail'
# queryset = Result.objects.all()
# def get_url(self, obj, view_name, request, format):
# url_kwargs = {
# 'experiment_name': obj.experiment.name,
# 'experiment_pk': obj.pk
# }
# return reverse(view_name, kwargs=url_kwargs, request=request, format=format)
# def get_object(self, view_name, view_args, view_kwargs):
# lookup_kwargs = {
# 'experiment__name': view_kwargs['experiment_name'],
# 'pk': view_kwargs['experiment_pk']
# }
# return self.get_queryset().get(**lookup_kwargs) | bsd-3-clause | 8,969,882,641,364,371,000 | 31.655738 | 88 | 0.690105 | false |
OSSOS/MOP | src/ossos/core/scripts/process.py | 1 | 1397 |
from ossos.pipeline import mk_mopheader, mkpsf, step1, slow
from ossos import util, storage
import logging
import sys
import os
import shutil
util.config_logging(logging.INFO)
version='p'
force=False
dry_run=False
prefix=''
lines = open(sys.argv[1]).readlines()
basedir=os.getcwd()
for line in lines:
expnum = int(line.strip())
for ccd in storage.get_ccdlist(expnum):
try:
os.chdir(basedir)
if not os.access(str(expnum),os.F_OK):
os.mkdir(str(expnum))
os.chdir(str(expnum))
if not os.access(str(ccd), os.F_OK):
os.mkdir(str(ccd))
os.chdir(str(ccd))
try:
print(os.getcwd())
mk_mopheader.run(expnum, ccd=ccd, version=version, dry_run=dry_run, prefix='', force=force, ignore_dependency=False)
mkpsf.run(expnum, ccd=ccd, version=version, dry_run=dry_run, prefix=prefix, force=force)
step1.run(expnum, ccd=ccd, version=version, dry_run=dry_run, prefix=prefix, force=force)
slow.run(expnum, ccd, version=version, dry_run=dry_run, prefix=prefix, force=force)
except Exception as ex:
print(ex)
except Exception as ex:
print(ex)
finally:
os.chdir(basedir)
shutil.rmtree("{}/{}".format(expnum, ccd), ignore_errors=True)
| gpl-3.0 | -1,160,384,444,843,936,800 | 30.75 | 132 | 0.59413 | false |
rrahmati/roboinstruct-2 | demonstrate_ros_package/scripts/record_demonstration.py | 1 | 15168 | #! /usr/bin/python
import rospy
from sensor_msgs.msg import Image
from cv_bridge import CvBridge, CvBridgeError
import numpy as np
import cv2
import sys
import os
from os.path import expanduser
import signal
import threading
from multiprocessing import Pool
import time
from random import randint
from std_msgs.msg import Float32MultiArray
from leap_client.msg import HandInfoList
def signal_handler(signal, frame):
global record_demonstratio
n
record_demonstration.end_thread = True
sys.exit(0)
signal.signal(signal.SIGINT, signal_handler)
class RecordDemonstration(object):
def __init__(self):
# parameters
self.task = 3006
# person controlling the robot: 1-Rouhollah, 2-Pooya
self.user_id = 1
self.image_shape = (540, 540)
self.recordDelay = .03
self.camera1 = True
self.camera2 = False
self.camera3 = False
self.al5d = True
self.mico = False
self.task_description = {
5000: "Human demonstrations",
3001: "Grab a bubble wrap and put it into plate",
3002: "Push the plate to the left",
3003: "Push the box towards the robot's base",
3004: "Push and roll the bottle towards the robot's base",
3005: "Pick up the towel and clean the screwdriver box",
3006: "rotate the pliers wrench to a perpendicular orientation",
# first camera calibration:
1001: "Put three small objects into the container",
1002: "Grab a pen and put it into user's hand",
1003: "Take the stirring bar from the user, stir a coffee cup, give it back to the user",
1004: "Grab capsules from the table and put them into their bottle",
1005: "Grab a paper cup and pour its content into a plate",
1006: "Push all small cubes and gather them in the middle of table",
1007: "The small towel is already folded. fold it one more time",
1008: "Grab a paper cup and put it into a tea cup",
1009: "Grab the spoon and fork and put them into the plate, spoon on right, fork on left",
1010: "Pick up a thick marker and put it into upright position",
1011: "Push and rotate the markers and gather them close to the robot base",
1012: "Stay in the middle position. Don't move!",
1013: "Pick up a mug and place it on the table where the user is pointing",
1014: "scoop ...",
# second camera calibration:
1501: "Grab 6 small cubes in a cluttered situation and put them into a plate",
1502: "Grab a marker and put it into the cup. Then, put it back on the table.",
# second camera calibration, each task 5 minutes, 10,000 waypoints
2001: "Grab 3 small markers and arrange them vertically on the right side",
2002: "Grab 3 small markers and arrange them horizontally on the right side",
2003: "Grab 3 small markers and arrange them vertically on the left side",
2004: "Grab 3 small markers and arrange them horizontally on the left side",
2005: "Grab 3 small markers and make a triangle with them",
2006: "Grab 3 small markers, put one on the left, one on the right, and one in the middle",
2007: "Grab 3 small markers and make a horizontal line with them",
2008: "Grab 3 small markers and write the character Y with them",
2009: "Grab 3 small markers and write the character U with them",
2010: "Grab 3 small markers and write the character H with them",
2011: "Grab 3 small markers and write the character N with them",
2012: "Grab 3 small markers and write the character T with them",
2013: "Grab 3 small markers and write the reversed character N with them",
2014: "Grab 3 small markers and write the reversed character Y with them",
2015: "Grab 3 small markers and write the reversed character U with them",
2016: "Grab 3 small markers and write the 90 degree rotated character H with them",
2017: "Grab 3 small markers and write the reversed character T with them",
2018: "Grab 3 small markers and write the character K with them",
2019: "Grab 3 small markers, put one vertically on the right, and two vertically on the left",
2020: "Grab 3 small markers, put one vertically on the left, and two vertically on the right",
2021: "Grab 3 small markers, put one horizontally on the right, and two horizontally on the left",
2022: "Grab 3 small markers, put one horizontally on the left, and two horizontally on the right",
2023: "Grab 3 small markers, put one vertically on the right, and two horizontally on the left",
2024: "Grab 3 small markers, put one horizontally on the left, and two vertically on the right",
2025: "Grab 3 small markers, put one vertically on the right, and make a vertical line with the other two",
2026: "Grab 3 small markers, put one vertically on the left, and make a vertical line with the other two",
2027: "Grab 3 small markers, put one vertically on the right, and make a horizontal line with the other two",
2028: "Grab 3 small markers, put one vertically on the left, and make a horizontal line with the other two",
2029: "Grab 3 small markers and put them into the coffee cup on the right",
2030: "Grab 3 small markers that are inside a coffee cup on the right and put them on the desk",
2031: "Grab 3 small markers and put them into the coffee cup on the left",
2032: "Grab 3 small markers that are inside a coffee cup on the left and put them on the desk",
2033: "Grab 3 small markers, put one into the coffee cup on the left, and the others into the coffee cup on the right",
2034: "Grab 3 small markers, put one into the coffee cup on the right, and the others into the coffee cup on the left",
2035: "Grab 2 small markers, put one into the coffee cup on the right, and the other into the coffee cup on the left",
2036: "Grab 2 small markers, put one into the coffee cup on the left, and the other into the coffee cup on the right",
2037: "Grab one small marker from each coffee cup and put them on the desk",
2038: "Grab one small marker from the coffee cup on the right and put it into the coffee cup on the left",
2039: "Grab one small marker from the coffee cup on the left and put it into the coffee cup on the right",
2040: "Grab 4 small markers and make a square with them",
2041: "Grab 4 small markers and make a cross with them",
2042: "Grab 4 small markers and make a 45 degree rotated square with them",
2043: "Grab 4 small markers and make a plus with them",
2044: "Grab 4 small markers, put one vertically on the right and three vertically on the left",
2045: "Grab 4 small markers, put one horizontally on the right and three vertically on the left",
2046: "Grab 4 small markers, put one vertically on the right and three horizontally on the left",
2047: "Grab 4 small markers, put one horizontally on the right and three horizontally on the left",
2048: "Grab 4 small markers, put two vertically on the right and two vertically on the left",
2049: "Grab 4 small markers, put two horizontally on the right and two vertically on the left",
2050: "Grab 4 small markers, put two vertically on the right and two horizontally on the left",
2051: "Grab 4 small markers, put two horizontally on the right and two horizontally on the left",
2052: "Grab 4 small markers and draw the bottom half of a star with them",
2053: "Grab 4 small markers and draw the upper half of a star with them",
2054: "Grab 4 small markers and draw the character '=' with them",
2055: "Grab 4 small markers and draw the 90 degree rotated character '=' with them",
2056: "Grab 4 small markers and draw the character 'W' with them",
2057: "Grab 4 small markers and draw the character 'M' with them",
2058: "Grab 4 small markers and draw the character 'E' with them",
2059: "Grab 4 small markers and draw the reversed character 'E' with them",
2060: "Grab 4 small markers and draw the character 'm' with them",
2061: "Grab 4 small markers and draw the reversed character 'm' with them",
}
# initialization
self.filepath = expanduser("~") + '/t/task-' + str(self.task) + '/' + str(randint(0,1000000))
rospy.init_node('record_demonstration')
if self.camera1:
self.create_folders(self.filepath + '/camera-' + str(1) + '/')
# self.create_folders(self.filepath + '/camera-' + str(1) + '-depth/')
rospy.Subscriber("/kinect2/qhd/image_color_rect", Image, self.camera1_callback)
# rospy.Subscriber("/kinect2/hd/image_depth_rect", Image, self.camera1_depth_callback)
if self.camera2:
self.create_folders(self.filepath + '/camera-' + str(2) + '/')
rospy.Subscriber("/usb_cam/image_raw", Image, self.camera2_callback)
if self.camera3:
self.create_folders(self.filepath + '/camera-' + str(3) + '/')
rospy.Subscriber("/kinect2/qhd/image_color_rect", Image, self.camera3_callback)
if self.al5d:
self.write_file_header()
rospy.Subscriber("/leap_al5d_info", Float32MultiArray, self.leap_al5d_callback)
if self.mico:
self.write_file_header()
rospy.Subscriber("/leap_mico_info", Float32MultiArray, self.leap_mico_callback)
self.bridge = CvBridge()
self.timestep = 0
self.task_complete_count = 0
self.rate = rospy.Rate(self.recordDelay*1000)
self.last_reward_time = 0
self.last_robot_msg = 0
self.start_time = rospy.get_time()
self.end_thread = False
self.pause = False
# self.pool = Pool(2)
self.thread = threading.Thread(target= self._update_thread)
self.thread.start()
def save_image(self, img_msg, camera):
try:
img = self.bridge.imgmsg_to_cv2(img_msg, "bgr8")
img = np.array(img, dtype=np.float)
except CvBridgeError, e:
print(e)
else:
img = img[0:540, 250:840]
img = cv2.resize(img, self.image_shape)
cv2.imwrite(self.filepath + '/camera-' + str(camera) + '/' + str(self.timestep) +
'.jpg', img, [int(cv2.IMWRITE_JPEG_QUALITY), 80])
def save_image_depth(self, img_msg, camera):
try:
img = self.bridge.imgmsg_to_cv2(img_msg, "16UC1")
img = np.array(img, dtype=np.float32)
cv2.normalize(img, img, 0, 1, cv2.NORM_MINMAX)
except CvBridgeError, e:
print(e)
else:
img = cv2.resize(img, self.image_shape)
cv2.imwrite(self.filepath + '/camera-' + str(camera) + '-depth/' + str(self.timestep) +
'.jpg', img*255.0, [int(cv2.IMWRITE_JPEG_QUALITY), 80])
def camera1_callback(self, msg):
self.camera1_msg = msg
def camera1_depth_callback(self, msg):
self.camera1_depth_msg = msg
def camera2_callback(self, msg):
self.camera2_msg = msg
def camera3_callback(self, msg):
self.camera3_msg = msg
def leap_al5d_callback(self, msg):
self.leap_al5d_msg = msg
self.last_robot_msg = rospy.get_time()
def leap_mico_callback(self, msg):
self.leap_mico_msg = msg
def create_folders(self, foldername):
if not os.path.exists(foldername):
try:
os.makedirs(foldername)
except OSError as exc: # Guard against race condition
if exc.errno != errno.EEXIST:
raise
def write_file_header(self):
with open(self.filepath + '.txt', 'w') as f:
f.write(str(time.strftime('%l:%M%p %z on %b %d, %Y')) + '\n' + str(self.task_description[self.task]) + '\n')
f.write('time,task,user,robot,reward,human,gripper,joint1,joint2,joint3,joint4,joint5,joint6')
def append_to_file(self, robot):
with open(self.filepath + '.txt', 'a') as f:
str_to_append = '\n' + str(rospy.get_time() - self.start_time) + ',' + str(self.task) + ',' + str(self.user_id) + ','
if robot == 'al5d':
str_to_append = str_to_append + str(1) + ','
data = [x for x in self.leap_al5d_msg.data]
elif robot == 'mico':
str_to_append = str_to_append + str(2) + ','
data = [x for x in self.leap_mico_msg.data]
if abs(data[0] - 1) < .01: # got reward
if rospy.get_time() - self.last_reward_time > 1:
self.task_complete_count += 1
self.last_reward_time = rospy.get_time()
else:
data[0] = 0
sys.stdout.write('\rTimestep: ' + str(self.timestep) + ' Task done: ' + str(self.task_complete_count))
sys.stdout.flush()
str_to_append = str_to_append + ','.join(str(e) for e in data)
f.write(str_to_append)
def _update_thread(self):
while not rospy.is_shutdown() and not self.end_thread:
if self.pause or rospy.get_time() - self.start_time < 1 or rospy.get_time() - self.last_robot_msg > .1:
continue
save_files = (self.camera1 == hasattr(self, 'camera1_msg') and self.camera2 == hasattr(self, 'camera2_msg')
and self.camera3 == hasattr(self, 'camera3_msg') and self.al5d == hasattr(self, 'leap_al5d_msg')
and self.mico == hasattr(self, 'leap_mico_msg'))
if save_files:
if self.camera1:
# # self.pool.map(self.save_image, [(self.camera1_msg, 1)])
self.save_image(self.camera1_msg, 1)
# self.save_image_depth(self.camera1_depth_msg, 1)
if self.camera2:
# self.pool.map(self.save_image, [(self.camera2_msg, 2)])
self.save_image(self.camera2_msg, 2)
if self.camera3:
self.save_image(self.camera2_msg, 3)
if self.al5d:
self.append_to_file('al5d')
if self.mico:
self.append_to_file('mico')
self.timestep += 1
self.rate.sleep()
def main():
global record_demonstration
record_demonstration = RecordDemonstration()
rospy.spin()
# while not rospy.is_shutdown() and not record_demonstration.end_thread:
# input = raw_input(">>>")
# record_demonstration.pause = not record_demonstration.pause
if __name__ == '__main__':
main()
| mit | -7,426,183,828,414,585,000 | 53.171429 | 131 | 0.610562 | false |
hobson/pug-invest | pug/invest/bin/fit-test.py | 1 | 4498 | from statsmodels.tsa import arima_model
import numpy as np
from pug.invest import util
y = util.simulate(poly=100, sinusoids=(10, 100, -20)).values
hr = np.arange(365*96)*.25
t = hr * 3600
sinusoids = [
np.random.normal(0.0, 0.1, 365*96)+10 + 3*np.sin(hr*2*np.pi/96/.25),
np.random.normal(0.0, 0.1, 365*96)+15 + 3*np.sin(hr*2*np.pi/96/.25) + 3*np.cos(t*2*np.pi/96./.25/365.),
np.random.normal(0.0, 1.0, 365*96)+15 + 3*np.sin(hr*2*np.pi/96/.25) + 3*np.cos(t*2*np.pi/96./.25/365.)+np.random.normal(0.0,1e-5,365*96).cumsum()]
arma20 = arima_model.ARMA(y, (2,0)).fit()
y2 = arma.predict(start=10*96, end=12*96)
y1 = y[10*96-1:12*96]
plt.plot(t[10*96-1:12*96],zip(*[y1,y2]))
plt.show()
y2 = arma30.predict(start=10*96, end=12*96)
plt.plot(t[10*96-1:12*96],zip(*[y1,y2]))
plt.show()
arma30.resid.plot()
plt.plot(arma30.resid)
plt.show()
plt.plot(arma30.resid/y2)
plt.plot(arma30.resid/y)
plt.show()
plt.plot(arma30.resid/y)
plt.show()
arma30 = arima_model.ARMA(y[:-96*30], (2,0)).fit()
y1 = y[-32*96:]
y2 = arma30.predict(start=N-32*96, end=N-28*96)
N=len(y)
y2 = arma30.predict(start=N-32*96, end=N-28*96)
plt.plot(t[-32*96-1:-28*96],zip(*[y1,y2]))
plt.show()
plt.plot(t[-32*96-1:-28*96],zip(*[y1,y2]))
plt.show()
N
arma30 = arima_model.ARMA(y[:-96*30], (3,0)).fit()
N_predict=len(y[:-96*30])
y_predict=y[:-96*30]
y2 = arma30.predict(start=N_predict,end=N_predict+96)
y1 = y[N_predict:N_predict+96]
y1-y2
y2 = arma30.predict(start=N_predict,end=N_predict+95)
plt.plot(zip(*[y1,y2]))
plt.plot(zip(*[y1,y2]))
plt.show()
arma41 = arima_model.ARMA(y_train, (4,1)).fit()
y_train=y[:-96*30]
arma41 = arima_model.ARMA(y_train, (4,1)).fit()
arma296 = arima_model.ARMA(y_train, (2,96)).fit()
arma296 = arima_model.ARMA(y_train.diff(), (2,96)).fit()
arma296 = arima_model.ARMA(pd.Series(y_train).diff(), (2,96)).fit()
import pandas as pd
y_diff = pd.Series(y).diff().values()
y_diff = pd.Series(y).diff().values
y_train=y_diff[:-96*30]
arma296 = arima_model.ARMA(y_train, (2,96)).fit()
arma296 = arima_model.ARMA(y_train, (2,0)).fit()
arma296 = arima_model.ARMA(y_train[1:], (2,0)).fit()
arma296 = arima_model.ARMA(y_train[-96*14:], (2,96)).fit()
arma296 = arima_model.ARMA(y_train[-96*7:], (2,96)).fit()
arma296 = arima_model.ARMA(y_train[-96*2:], (2,96)).fit()
arma296 = arima_model.ARMA(y_train[-96*3:], (2,96)).fit()
arma296 = arima_model.ARMA(y_train[-96*4:], (2,96)).fit()
arma296 = arima_model.ARMA(y_train[-96*14:], (2,48)).fit()
arma296 = arima_model.ARMA(y_train[-96*14:], (2,24)).fit()
arma296 = arima_model.ARMA(y_train[-96*14:], (0,96)).fit()
arma296 = arima_model.ARMA(y_train[-96*14:], (1,96)).fit()
arma296 = arima_model.ARMA(y_train[-96*14:], (1,96)).fit(meth='mle')
arma296 = arima_model.ARMA(y_train[-96*14:], (1,96)).fit(meth='css')
arma296 = arima_model.ARMA(np.diff(y_train[-96*14:]).dropna(), (1,96)).fit(meth='css')
arma296 = arima_model.ARMA(np.diff(y_train[-96*14:])[1:], (2,96)).fit(meth='css')
arma296 = arima_model.ARMA(np.diff(y_train[-96*14:])[1:], (2,96)).fit(meth='mle')
arma296 = arima_model.ARMA(np.diff(y_train[-96*14:])[1:], (2,96))
arma296.fit(trend='c',solver='bfgs')
arma296.fit(trend='c',solver='bfgs',transparams=True)
arma296.fit(trend='c',solver='bfgs',transparams=False)
arma296._fit_start_params
arma296._fit_start_params()
arma296.fit(meth='css-mle',trend='c',solver='bfgs',transparams=False)
arma296.fit(meth='css-mle',trend='c',solver='bfgs',transparams=True)
q = np.zeros(96)
q[0] = 1
q[-1]=1
q[-1]=.5
q[0] = .1
q[-1]=.9
p=[10, 1.2, -.2]
arma296.fit(meth='css-mle',trend='c',solver='bfgs',transparams=True,startparams=[p,q])
arma296.fit(meth='css-mle',trend='c',solver='bfgs',transparams=True,start_params=[p,q])
np.log
arma296.fit(meth='css-mle',trend='c',solver='bfgs',transparams=False,start_params=[p,q])
p=np.array([10, 1.2, -.2])
arma296.fit(meth='css-mle',trend='c',solver='bfgs',transparams=False,start_params=[p,q])
arma296.fit(meth='css-mle',trend='c',solver='bfgs',transparams=False,start_params=np.array([p,q]))
arma296.fit(meth='css-mle',trend='c',solver='bfgs',transparams=False,start_params=q)
q.shape
q = np.zeros(93)
q[-1]=.9
q[0]=.1
arma296.fit(meth='css-mle',trend='c',solver='bfgs',transparams=False,start_params=q)
arma296.fit(trend='c',solver='bfgs',transparams=False,start_params=q)
arma296.fit(trend='c',transparams=False,start_params=q)
arma296.fit(transparams=False,start_params=q)
len(q)
p=np.array([10, 1.2, -.2])
q = np.zeros(99)
q[0]=.1
q[0]=10
q[1]=1
q[2]=-.2
q[-1]=.95
arma296.fit(transparams=False,start_params=q)
| mit | -6,713,280,455,513,286,000 | 37.444444 | 150 | 0.659627 | false |
ChromiumWebApps/chromium | mojo/public/bindings/generators/mojom_js_generator.py | 1 | 7742 | # Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Generates JavaScript source files from a mojom.Module."""
from generate import mojom
from generate import mojom_pack
from generate import mojom_generator
from generate.template_expander import UseJinja
_kind_to_javascript_default_value = {
mojom.BOOL: "false",
mojom.INT8: "0",
mojom.UINT8: "0",
mojom.INT16: "0",
mojom.UINT16: "0",
mojom.INT32: "0",
mojom.UINT32: "0",
mojom.FLOAT: "0",
mojom.HANDLE: "core.kInvalidHandle",
mojom.DCPIPE: "core.kInvalidHandle",
mojom.DPPIPE: "core.kInvalidHandle",
mojom.MSGPIPE: "core.kInvalidHandle",
mojom.INT64: "0",
mojom.UINT64: "0",
mojom.DOUBLE: "0",
mojom.STRING: '""',
}
def JavaScriptDefaultValue(field):
if field.default:
raise Exception("Default values should've been handled in jinja.")
if field.kind in mojom.PRIMITIVES:
return _kind_to_javascript_default_value[field.kind]
if isinstance(field.kind, mojom.Struct):
return "null";
if isinstance(field.kind, mojom.Array):
return "[]";
if isinstance(field.kind, mojom.Interface):
return _kind_to_javascript_default_value[mojom.MSGPIPE]
def JavaScriptPayloadSize(packed):
packed_fields = packed.packed_fields
if not packed_fields:
return 0;
last_field = packed_fields[-1]
offset = last_field.offset + last_field.size
pad = mojom_pack.GetPad(offset, 8)
return offset + pad;
_kind_to_javascript_type = {
mojom.BOOL: "codec.Uint8",
mojom.INT8: "codec.Int8",
mojom.UINT8: "codec.Uint8",
mojom.INT16: "codec.Int16",
mojom.UINT16: "codec.Uint16",
mojom.INT32: "codec.Int32",
mojom.UINT32: "codec.Uint32",
mojom.FLOAT: "codec.Float",
mojom.HANDLE: "codec.Handle",
mojom.DCPIPE: "codec.Handle",
mojom.DPPIPE: "codec.Handle",
mojom.MSGPIPE: "codec.Handle",
mojom.INT64: "codec.Int64",
mojom.UINT64: "codec.Uint64",
mojom.DOUBLE: "codec.Double",
mojom.STRING: "codec.String",
}
def GetJavaScriptType(kind):
if kind in mojom.PRIMITIVES:
return _kind_to_javascript_type[kind]
if isinstance(kind, mojom.Struct):
return "new codec.PointerTo(%s)" % GetJavaScriptType(kind.name)
if isinstance(kind, mojom.Array):
return "new codec.ArrayOf(%s)" % GetJavaScriptType(kind.kind)
if isinstance(kind, mojom.Interface):
return GetJavaScriptType(mojom.MSGPIPE)
return kind
_kind_to_javascript_decode_snippet = {
mojom.BOOL: "read8() & 1",
mojom.INT8: "read8()",
mojom.UINT8: "read8()",
mojom.INT16: "read16()",
mojom.UINT16: "read16()",
mojom.INT32: "read32()",
mojom.UINT32: "read32()",
mojom.FLOAT: "decodeFloat()",
mojom.HANDLE: "decodeHandle()",
mojom.DCPIPE: "decodeHandle()",
mojom.DPPIPE: "decodeHandle()",
mojom.MSGPIPE: "decodeHandle()",
mojom.INT64: "read64()",
mojom.UINT64: "read64()",
mojom.DOUBLE: "decodeDouble()",
mojom.STRING: "decodeStringPointer()",
}
def JavaScriptDecodeSnippet(kind):
if kind in mojom.PRIMITIVES:
return _kind_to_javascript_decode_snippet[kind]
if isinstance(kind, mojom.Struct):
return "decodeStructPointer(%s)" % GetJavaScriptType(kind.name);
if isinstance(kind, mojom.Array):
return "decodeArrayPointer(%s)" % GetJavaScriptType(kind.kind);
if isinstance(kind, mojom.Interface):
return JavaScriptDecodeSnippet(mojom.MSGPIPE)
_kind_to_javascript_encode_snippet = {
mojom.BOOL: "write8(1 & ",
mojom.INT8: "write8(",
mojom.UINT8: "write8(",
mojom.INT16: "write16(",
mojom.UINT16: "write16(",
mojom.INT32: "write32(",
mojom.UINT32: "write32(",
mojom.FLOAT: "encodeFloat(",
mojom.HANDLE: "encodeHandle(",
mojom.DCPIPE: "encodeHandle(",
mojom.DPPIPE: "encodeHandle(",
mojom.MSGPIPE: "encodeHandle(",
mojom.INT64: "write64(",
mojom.UINT64: "write64(",
mojom.DOUBLE: "encodeDouble(",
mojom.STRING: "encodeStringPointer(",
}
def JavaScriptEncodeSnippet(kind):
if kind in mojom.PRIMITIVES:
return _kind_to_javascript_encode_snippet[kind]
if isinstance(kind, mojom.Struct):
return "encodeStructPointer(%s, " % GetJavaScriptType(kind.name);
if isinstance(kind, mojom.Array):
return "encodeArrayPointer(%s, " % GetJavaScriptType(kind.kind);
if isinstance(kind, mojom.Interface):
return JavaScriptEncodeSnippet(mojom.MSGPIPE)
def GetConstants(module):
"""Returns a generator that enumerates all constants that can be referenced
from this module."""
class Constant:
pass
for enum in module.enums:
for field in enum.fields:
constant = Constant()
constant.namespace = module.namespace
constant.is_current_namespace = True
constant.import_item = None
constant.name = (enum.name, field.name)
yield constant
for each in module.imports:
for enum in each["module"].enums:
for field in enum.fields:
constant = Constant()
constant.namespace = each["namespace"]
constant.is_current_namespace = constant.namespace == module.namespace
constant.import_item = each
constant.name = (enum.name, field.name)
yield constant
def TranslateConstants(value, module):
# We're assuming we're dealing with an identifier, but that may not be
# the case. If we're not, we just won't find any matches.
if value.find(".") != -1:
namespace, identifier = value.split(".")
else:
namespace, identifier = "", value
for constant in GetConstants(module):
if namespace == constant.namespace or (
namespace == "" and constant.is_current_namespace):
if constant.name[1] == identifier:
if constant.import_item:
return "%s.%s.%s" % (constant.import_item["unique_name"],
constant.name[0], constant.name[1])
else:
return "%s.%s" % (constant.name[0], constant.name[1])
return value
def ExpressionToText(value, module):
if value[0] != "EXPRESSION":
raise Exception("Expected EXPRESSION, got" + value)
return "".join(mojom_generator.ExpressionMapper(value,
lambda token: TranslateConstants(token, module)))
def JavascriptType(kind):
if kind.imported_from:
return kind.imported_from["unique_name"] + "." + kind.name
return kind.name
class Generator(mojom_generator.Generator):
js_filters = {
"camel_to_underscores": mojom_generator.CamelToUnderscores,
"default_value": JavaScriptDefaultValue,
"payload_size": JavaScriptPayloadSize,
"decode_snippet": JavaScriptDecodeSnippet,
"encode_snippet": JavaScriptEncodeSnippet,
"expression_to_text": ExpressionToText,
"is_object_kind": mojom_generator.IsObjectKind,
"is_string_kind": mojom_generator.IsStringKind,
"is_array_kind": lambda kind: isinstance(kind, mojom.Array),
"js_type": JavascriptType,
"stylize_method": mojom_generator.StudlyCapsToCamel,
"verify_token_type": mojom_generator.VerifyTokenType,
}
@UseJinja("js_templates/module.js.tmpl", filters=js_filters)
def GenerateJsModule(self):
return {
"imports": self.GetImports(),
"kinds": self.module.kinds,
"enums": self.module.enums,
"module": self.module,
"structs": self.GetStructs() + self.GetStructsFromMethods(),
"interfaces": self.module.interfaces,
}
def GenerateFiles(self):
self.Write(self.GenerateJsModule(), "%s.js" % self.module.name)
def GetImports(self):
# Since each import is assigned a variable in JS, they need to have unique
# names.
counter = 1
for each in self.module.imports:
each["unique_name"] = "import" + str(counter)
counter += 1
return self.module.imports
| bsd-3-clause | 4,849,057,587,439,504,000 | 30.34413 | 78 | 0.681478 | false |
kmolab/kmolab.github.io | data/Brython-3.3.4/Lib/logging/brython_handlers.py | 1 | 1179 | import logging
from browser.ajax import ajax
class XMLHTTPHandler(logging.Handler):
"""
A class which sends records to a Web server, using either GET or
POST semantics.
"""
def __init__(self, url, method="GET"):
"""
Initialize the instance with the host, the request URL, and the method
("GET" or "POST")
"""
logging.Handler.__init__(self)
method = method.upper()
if method not in ["GET", "POST"]:
raise ValueError("method must be GET or POST")
self.url = url
self.method = method
def mapLogRecord(self, record):
"""
Default implementation of mapping the log record into a dict
that is sent as the CGI data. Overwrite in your class.
Contributed by Franz Glasner.
"""
return record.__dict__
def emit(self, record):
"""
Emit a record.
Send the record to the Web server as a percent-encoded dictionary
"""
try:
req = ajax.open(self.method, self.url, async=False)
req.send(self.mapLogRecord(record))
except:
self.handleError(record)
| agpl-3.0 | 5,833,815,378,473,545,000 | 27.756098 | 78 | 0.579304 | false |
Azure/azure-sdk-for-python | sdk/network/azure-mgmt-network/azure/mgmt/network/v2019_11_01/operations/_route_filters_operations.py | 1 | 27170 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import TYPE_CHECKING
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpRequest, HttpResponse
from azure.core.polling import LROPoller, NoPolling, PollingMethod
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.arm_polling import ARMPolling
from .. import models as _models
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
class RouteFiltersOperations(object):
"""RouteFiltersOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2019_11_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def _delete_initial(
self,
resource_group_name, # type: str
route_filter_name, # type: str
**kwargs # type: Any
):
# type: (...) -> None
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-11-01"
accept = "application/json"
# Construct URL
url = self._delete_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'routeFilterName': self._serialize.url("route_filter_name", route_filter_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/routeFilters/{routeFilterName}'} # type: ignore
def begin_delete(
self,
resource_group_name, # type: str
route_filter_name, # type: str
**kwargs # type: Any
):
# type: (...) -> LROPoller[None]
"""Deletes the specified route filter.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param route_filter_name: The name of the route filter.
:type route_filter_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: Pass in True if you'd like the ARMPolling polling method,
False for no polling, or your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._delete_initial(
resource_group_name=resource_group_name,
route_filter_name=route_filter_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'routeFilterName': self._serialize.url("route_filter_name", route_filter_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/routeFilters/{routeFilterName}'} # type: ignore
def get(
self,
resource_group_name, # type: str
route_filter_name, # type: str
expand=None, # type: Optional[str]
**kwargs # type: Any
):
# type: (...) -> "_models.RouteFilter"
"""Gets the specified route filter.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param route_filter_name: The name of the route filter.
:type route_filter_name: str
:param expand: Expands referenced express route bgp peering resources.
:type expand: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: RouteFilter, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2019_11_01.models.RouteFilter
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.RouteFilter"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-11-01"
accept = "application/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'routeFilterName': self._serialize.url("route_filter_name", route_filter_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
if expand is not None:
query_parameters['$expand'] = self._serialize.query("expand", expand, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('RouteFilter', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/routeFilters/{routeFilterName}'} # type: ignore
def _create_or_update_initial(
self,
resource_group_name, # type: str
route_filter_name, # type: str
route_filter_parameters, # type: "_models.RouteFilter"
**kwargs # type: Any
):
# type: (...) -> "_models.RouteFilter"
cls = kwargs.pop('cls', None) # type: ClsType["_models.RouteFilter"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-11-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._create_or_update_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'routeFilterName': self._serialize.url("route_filter_name", route_filter_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(route_filter_parameters, 'RouteFilter')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('RouteFilter', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('RouteFilter', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/routeFilters/{routeFilterName}'} # type: ignore
def begin_create_or_update(
self,
resource_group_name, # type: str
route_filter_name, # type: str
route_filter_parameters, # type: "_models.RouteFilter"
**kwargs # type: Any
):
# type: (...) -> LROPoller["_models.RouteFilter"]
"""Creates or updates a route filter in a specified resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param route_filter_name: The name of the route filter.
:type route_filter_name: str
:param route_filter_parameters: Parameters supplied to the create or update route filter
operation.
:type route_filter_parameters: ~azure.mgmt.network.v2019_11_01.models.RouteFilter
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: Pass in True if you'd like the ARMPolling polling method,
False for no polling, or your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either RouteFilter or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.network.v2019_11_01.models.RouteFilter]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.RouteFilter"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._create_or_update_initial(
resource_group_name=resource_group_name,
route_filter_name=route_filter_name,
route_filter_parameters=route_filter_parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('RouteFilter', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'routeFilterName': self._serialize.url("route_filter_name", route_filter_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/routeFilters/{routeFilterName}'} # type: ignore
def update_tags(
self,
resource_group_name, # type: str
route_filter_name, # type: str
parameters, # type: "_models.TagsObject"
**kwargs # type: Any
):
# type: (...) -> "_models.RouteFilter"
"""Updates tags of a route filter.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param route_filter_name: The name of the route filter.
:type route_filter_name: str
:param parameters: Parameters supplied to update route filter tags.
:type parameters: ~azure.mgmt.network.v2019_11_01.models.TagsObject
:keyword callable cls: A custom type or function that will be passed the direct response
:return: RouteFilter, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2019_11_01.models.RouteFilter
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.RouteFilter"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-11-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.update_tags.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'routeFilterName': self._serialize.url("route_filter_name", route_filter_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'TagsObject')
body_content_kwargs['content'] = body_content
request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('RouteFilter', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
update_tags.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/routeFilters/{routeFilterName}'} # type: ignore
def list_by_resource_group(
self,
resource_group_name, # type: str
**kwargs # type: Any
):
# type: (...) -> Iterable["_models.RouteFilterListResult"]
"""Gets all route filters in a resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either RouteFilterListResult or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.network.v2019_11_01.models.RouteFilterListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.RouteFilterListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-11-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_by_resource_group.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('RouteFilterListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list_by_resource_group.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/routeFilters'} # type: ignore
def list(
self,
**kwargs # type: Any
):
# type: (...) -> Iterable["_models.RouteFilterListResult"]
"""Gets all route filters in a subscription.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either RouteFilterListResult or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.network.v2019_11_01.models.RouteFilterListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.RouteFilterListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-11-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('RouteFilterListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Network/routeFilters'} # type: ignore
| mit | 116,262,467,864,763,620 | 47.691756 | 193 | 0.63537 | false |
Stratos42/EveBot | plugins.disabled/bf.py | 1 | 2472 | '''brainfuck interpreter adapted from (public domain) code at
http://brainfuck.sourceforge.net/brain.py'''
import re
import random
from util import hook
BUFFER_SIZE = 5000
MAX_STEPS = 1000000
@hook.command
def bf(inp):
".bf <prog> -- executes brainfuck program <prog>"""
program = re.sub('[^][<>+-.,]', '', inp)
# create a dict of brackets pairs, for speed later on
brackets = {}
open_brackets = []
for pos in range(len(program)):
if program[pos] == '[':
open_brackets.append(pos)
elif program[pos] == ']':
if len(open_brackets) > 0:
brackets[pos] = open_brackets[-1]
brackets[open_brackets[-1]] = pos
open_brackets.pop()
else:
return 'unbalanced brackets'
if len(open_brackets) != 0:
return 'unbalanced brackets'
# now we can start interpreting
ip = 0 # instruction pointer
mp = 0 # memory pointer
steps = 0
memory = [0] * BUFFER_SIZE # initial memory area
rightmost = 0
output = "" # we'll save the output here
# the main program loop:
while ip < len(program):
c = program[ip]
if c == '+':
memory[mp] = memory[mp] + 1 % 256
elif c == '-':
memory[mp] = memory[mp] - 1 % 256
elif c == '>':
mp += 1
if mp > rightmost:
rightmost = mp
if mp >= len(memory):
# no restriction on memory growth!
memory.extend([0] * BUFFER_SIZE)
elif c == '<':
mp = mp - 1 % len(memory)
elif c == '.':
output += chr(memory[mp])
if len(output) > 500:
break
elif c == ',':
memory[mp] = random.randint(1, 255)
elif c == '[':
if memory[mp] == 0:
ip = brackets[ip]
elif c == ']':
if memory[mp] != 0:
ip = brackets[ip]
ip += 1
steps += 1
if steps > MAX_STEPS:
if output == '':
output = '(no output)'
output += '[exceeded %d iterations]' % MAX_STEPS
break
stripped_output = re.sub(r'[\x00-\x1F]', '', output)
if stripped_output == '':
if output != '':
return 'no printable output'
return 'no output'
return stripped_output[:430].decode('utf8', 'ignore')
| gpl-3.0 | 4,300,077,651,816,528,000 | 27.090909 | 61 | 0.480987 | false |
koala-ai/tensorflow_nlp | nlp/chatbot/model.py | 1 | 10775 | import copy
import numpy as np
import tensorflow as tf
from nlp.chatbot.dataset import data_utils
class S2SModel(object):
def __init__(self,
source_vocab_size,
target_vocab_size,
buckets,
size,
dropout,
num_layers,
max_gradient_norm,
batch_size,
learning_rate,
num_samples,
forward_only=False,
dtype=tf.float32):
# init member variales
self.source_vocab_size = source_vocab_size
self.target_vocab_size = target_vocab_size
self.buckets = buckets
self.batch_size = batch_size
self.learning_rate = learning_rate
# LSTM cells
cell = tf.contrib.rnn.BasicLSTMCell(size)
cell = tf.contrib.rnn.DropoutWrapper(cell, output_keep_prob=dropout)
cell = tf.contrib.rnn.MultiRNNCell([cell] * num_layers)
output_projection = None
softmax_loss_function = None
if num_samples > 0 and num_samples < self.target_vocab_size:
print('开启投影:{}'.format(num_samples))
w_t = tf.get_variable(
"proj_w",
[self.target_vocab_size, size],
dtype=dtype
)
w = tf.transpose(w_t)
b = tf.get_variable(
"proj_b",
[self.target_vocab_size],
dtype=dtype
)
output_projection = (w, b)
def sampled_loss(labels, logits):
labels = tf.reshape(labels, [-1, 1])
# 因为选项有选fp16的训练,这里同意转换为fp32
local_w_t = tf.cast(w_t, tf.float32)
local_b = tf.cast(b, tf.float32)
local_inputs = tf.cast(logits, tf.float32)
return tf.cast(
tf.nn.sampled_softmax_loss(
weights=local_w_t,
biases=local_b,
labels=labels,
inputs=local_inputs,
num_sampled=num_samples,
num_classes=self.target_vocab_size
),
dtype
)
softmax_loss_function = sampled_loss
# seq2seq_f
def seq2seq_f(encoder_inputs, decoder_inputs, do_decode):
tmp_cell = copy.deepcopy(cell)
return tf.contrib.legacy_seq2seq.embedding_attention_seq2seq(
encoder_inputs,
decoder_inputs,
tmp_cell,
num_encoder_symbols=source_vocab_size,
num_decoder_symbols=target_vocab_size,
embedding_size=size,
output_projection=output_projection,
feed_previous=do_decode,
dtype=dtype
)
# inputs
self.encoder_inputs = []
self.decoder_inputs = []
self.decoder_weights = []
# buckets中的最后一个是最大的(即第“-1”个)
for i in range(buckets[-1][0]):
self.encoder_inputs.append(tf.placeholder(
tf.int32,
shape=[None],
name='encoder_input_{}'.format(i)
))
# 输出比输入大 1,这是为了保证下面的targets可以向左shift 1位
for i in range(buckets[-1][1] + 1):
self.decoder_inputs.append(tf.placeholder(
tf.int32,
shape=[None],
name='decoder_input_{}'.format(i)
))
self.decoder_weights.append(tf.placeholder(
dtype,
shape=[None],
name='decoder_weight_{}'.format(i)
))
targets = [
self.decoder_inputs[i + 1] for i in range(buckets[-1][1])
]
if forward_only:
self.outputs, self.losses = tf.contrib.legacy_seq2seq.model_with_buckets(
self.encoder_inputs,
self.decoder_inputs,
targets,
self.decoder_weights,
buckets,
lambda x, y: seq2seq_f(x, y, True),
softmax_loss_function=softmax_loss_function
)
if output_projection is not None:
for b in range(len(buckets)):
self.outputs[b] = [
tf.matmul(
output,
output_projection[0]
) + output_projection[1]
for output in self.outputs[b]
]
else:
self.outputs, self.losses = tf.contrib.legacy_seq2seq.model_with_buckets(
self.encoder_inputs,
self.decoder_inputs,
targets,
self.decoder_weights,
buckets,
lambda x, y: seq2seq_f(x, y, False),
softmax_loss_function=softmax_loss_function
)
params = tf.trainable_variables()
opt = tf.train.AdamOptimizer(
learning_rate=learning_rate
)
if not forward_only:
self.gradient_norms = []
self.updates = []
for output, loss in zip(self.outputs, self.losses):
gradients = tf.gradients(loss, params)
clipped_gradients, norm = tf.clip_by_global_norm(
gradients,
max_gradient_norm
)
self.gradient_norms.append(norm)
self.updates.append(opt.apply_gradients(
zip(clipped_gradients, params)
))
# self.saver = tf.train.Saver(tf.all_variables())
self.saver = tf.train.Saver(
tf.all_variables(),
write_version=tf.train.SaverDef.V2
)
def step(
self,
session,
encoder_inputs,
decoder_inputs,
decoder_weights,
bucket_id,
forward_only
):
encoder_size, decoder_size = self.buckets[bucket_id]
if len(encoder_inputs) != encoder_size:
raise ValueError(
"Encoder length must be equal to the one in bucket,"
" %d != %d." % (len(encoder_inputs), encoder_size)
)
if len(decoder_inputs) != decoder_size:
raise ValueError(
"Decoder length must be equal to the one in bucket,"
" %d != %d." % (len(decoder_inputs), decoder_size)
)
if len(decoder_weights) != decoder_size:
raise ValueError(
"Weights length must be equal to the one in bucket,"
" %d != %d." % (len(decoder_weights), decoder_size)
)
input_feed = {}
for i in range(encoder_size):
input_feed[self.encoder_inputs[i].name] = encoder_inputs[i]
for i in range(decoder_size):
input_feed[self.decoder_inputs[i].name] = decoder_inputs[i]
input_feed[self.decoder_weights[i].name] = decoder_weights[i]
last_target = self.decoder_inputs[decoder_size].name
input_feed[last_target] = np.zeros([self.batch_size], dtype=np.int32)
if not forward_only:
output_feed = [
self.updates[bucket_id],
self.gradient_norms[bucket_id],
self.losses[bucket_id]
]
output_feed.append(self.outputs[bucket_id][i])
else:
output_feed = [self.losses[bucket_id]]
for i in range(decoder_size):
output_feed.append(self.outputs[bucket_id][i])
outputs = session.run(output_feed, input_feed)
if not forward_only:
return outputs[1], outputs[2], outputs[3:]
else:
return None, outputs[0], outputs[1:]
def get_batch_data(self, bucket_dbs, bucket_id):
data = []
data_in = []
bucket_db = bucket_dbs[bucket_id]
for _ in range(self.batch_size):
ask, answer = bucket_db.random()
data.append((ask, answer))
data_in.append((answer, ask))
return data, data_in
def get_batch(self, bucket_dbs, bucket_id, data):
encoder_size, decoder_size = self.buckets[bucket_id]
# bucket_db = bucket_dbs[bucket_id]
encoder_inputs, decoder_inputs = [], []
for encoder_input, decoder_input in data:
# encoder_input, decoder_input = random.choice(data[bucket_id])
# encoder_input, decoder_input = bucket_db.random()
encoder_input = data_utils.sentence_indice(encoder_input)
decoder_input = data_utils.sentence_indice(decoder_input)
# Encoder
encoder_pad = [data_utils.PAD_ID] * (
encoder_size - len(encoder_input)
)
encoder_inputs.append(list(reversed(encoder_input + encoder_pad)))
# Decoder
decoder_pad_size = decoder_size - len(decoder_input) - 2
decoder_inputs.append(
[data_utils.GO_ID] + decoder_input +
[data_utils.EOS_ID] +
[data_utils.PAD_ID] * decoder_pad_size
)
batch_encoder_inputs, batch_decoder_inputs, batch_weights = [], [], []
# batch encoder
for i in range(encoder_size):
batch_encoder_inputs.append(np.array(
[encoder_inputs[j][i] for j in range(self.batch_size)],
dtype=np.int32
))
# batch decoder
for i in range(decoder_size):
batch_decoder_inputs.append(np.array(
[decoder_inputs[j][i] for j in range(self.batch_size)],
dtype=np.int32
))
batch_weight = np.ones(self.batch_size, dtype=np.float32)
for j in range(self.batch_size):
if i < decoder_size - 1:
target = decoder_inputs[j][i + 1]
if i == decoder_size - 1 or target == data_utils.PAD_ID:
batch_weight[j] = 0.0
batch_weights.append(batch_weight)
return batch_encoder_inputs, batch_decoder_inputs, batch_weights
def create_model(forward_only, args):
"""建立模型"""
buckets = data_utils.buckets
dtype = tf.float16 if args.use_fp16 else tf.float32
model = S2SModel(
data_utils.dim,
data_utils.dim,
buckets,
args.size,
args.dropout,
args.num_layers,
args.max_gradient_norm,
args.batch_size,
args.learning_rate,
args.num_samples,
forward_only,
dtype
)
return model | apache-2.0 | 883,289,279,071,003,100 | 34.851852 | 85 | 0.506622 | false |
liosha2007/temporary-groupdocs-python-sdk | groupdocs/models/GetJobsDocumentsResponse.py | 1 | 1166 | #!/usr/bin/env python
"""
Copyright 2012 GroupDocs.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
class GetJobsDocumentsResponse:
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually."""
def __init__(self):
self.swaggerTypes = {
'result': 'GetJobsDocumentsResult',
'status': 'str',
'error_message': 'str',
'composedOn': 'long'
}
self.result = None # GetJobsDocumentsResult
self.status = None # str
self.error_message = None # str
self.composedOn = None # long
| apache-2.0 | 6,006,836,324,678,266,000 | 29.684211 | 77 | 0.650086 | false |
nickweinberg/werewolf-slackbot | test_fixtures.py | 1 | 2584 | import copy
def get_empty_game_state():
# hi there
# make mock game state.
# we'll have several fixtures
# and a basic one we can set up in each test.
return {'players':{},
'votes':{},
'STATUS': 'INACTIVE',
'ROUND': None
}
def all_vote_but_one_state():
return copy.deepcopy({
'players': {
'ab': {
'name': 'nick',
'DM': 'dm channel',
'role': 'v',
'side': 'v',
'status': 'alive'
},
'cd': {
'name': 'not_nick',
'dm': 'dm channel',
'role': 'w',
'side': 'w',
'status': 'alive'
},
'ef': {
'name': 'maksym',
'dm': 'dm channel',
'role': 'v',
'side': 'v',
'status': 'alive',
},
'gh': {
'name': 'who',
'dm': 'dm channel',
'role': 'v',
'side': 'v',
'status': 'alive'
}
},
'votes': {
'gh': 'cd',
'ef': 'cd',
'ab': 'cd'
},
'STATUS': 'RUNNING',
'ROUND': 'day'
})
def get_fake_game_state():
return copy.deepcopy({
'players': {
'ab': {
'name': 'nick',
'DM': 'dm channel',
'role': 'v',
'side': 'v',
'status': 'alive'
},
'cd': {
'name': 'not_nick',
'dm': 'dm channel',
'role': 'w',
'side': 'w',
'status': 'alive'
},
'ef': {
'name': 'maksym',
'dm': 'dm channel',
'role': 'v',
'side': 'v',
'status': 'alive',
},
'gh': {
'name': 'who',
'dm': 'dm channel',
'role': 'v',
'side': 'v',
'status': 'alive'
}
},
'votes': {},
'STATUS': 'RUNNING',
'ROUND': 'night'
})
| mit | -8,900,308,451,640,099,000 | 25.10101 | 49 | 0.251548 | false |
rbuffat/pyepw | tests/test_typical_or_extreme_periods.py | 1 | 2262 | import os
import tempfile
import unittest
from pyepw.epw import TypicalOrExtremePeriods, TypicalOrExtremePeriod, EPW
class TestTypicalOrExtremePeriods(unittest.TestCase):
def setUp(self):
self.fd, self.path = tempfile.mkstemp()
def tearDown(self):
os.remove(self.path)
def test_create_typical_or_extreme_periods(self):
obj = TypicalOrExtremePeriods()
typical_or_extreme_period_obj = TypicalOrExtremePeriod()
var_typical_or_extreme_period_typical_or_extreme_period_name = "typical_or_extreme_period_name"
typical_or_extreme_period_obj.typical_or_extreme_period_name = var_typical_or_extreme_period_typical_or_extreme_period_name
var_typical_or_extreme_period_typical_or_extreme_period_type = "typical_or_extreme_period_type"
typical_or_extreme_period_obj.typical_or_extreme_period_type = var_typical_or_extreme_period_typical_or_extreme_period_type
var_typical_or_extreme_period_period_start_day = "period_start_day"
typical_or_extreme_period_obj.period_start_day = var_typical_or_extreme_period_period_start_day
var_typical_or_extreme_period_period_end_day = "period_end_day"
typical_or_extreme_period_obj.period_end_day = var_typical_or_extreme_period_period_end_day
obj.add_typical_or_extreme_period(typical_or_extreme_period_obj)
epw = EPW(typical_or_extreme_periods=obj)
epw.save(self.path, check=False)
epw2 = EPW()
epw2.read(self.path)
self.assertEqual(
epw2.typical_or_extreme_periods.typical_or_extreme_periods[0].typical_or_extreme_period_name,
var_typical_or_extreme_period_typical_or_extreme_period_name)
self.assertEqual(
epw2.typical_or_extreme_periods.typical_or_extreme_periods[0].typical_or_extreme_period_type,
var_typical_or_extreme_period_typical_or_extreme_period_type)
self.assertEqual(
epw2.typical_or_extreme_periods.typical_or_extreme_periods[0].period_start_day,
var_typical_or_extreme_period_period_start_day)
self.assertEqual(
epw2.typical_or_extreme_periods.typical_or_extreme_periods[0].period_end_day,
var_typical_or_extreme_period_period_end_day)
| apache-2.0 | 546,571,778,201,440,830 | 49.266667 | 131 | 0.707339 | false |
3dfxsoftware/cbss-addons | project_phase_description/model/project_phase.py | 1 | 1240 | # -*- encoding: utf-8 -*-
#
# Module Writen to OpenERP, Open Source Management Solution
#
# Copyright (c) 2013 Vauxoo - http://www.vauxoo.com/
# All Rights Reserved.
# info Vauxoo ([email protected])
#
# Coded by: Jorge Angel Naranjo ([email protected])
#
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
from openerp.osv import osv, fields
from openerp.tools.translate import _
class project_phase(osv.Model):
_inherit = 'project.phase'
_columns = {
'description':fields.text('Description'),
}
class project_task(osv.Model):
_inherit = 'project.task'
| gpl-2.0 | -3,804,833,365,533,972,500 | 31.631579 | 77 | 0.698387 | false |
prov-suite/interop-test-harness | prov_interop/provman/converter.py | 1 | 5717 | """Manages invocation of ProvScala `provmanagement` script.
"""
# Copyright (c) 2015 University of Southampton
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation files
# (the "Software"), to deal in the Software without restriction,
# including without limitation the rights to use, copy, modify, merge,
# publish, distribute, sublicense, and/or sell copies of the Software,
# and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import os.path
import subprocess
from prov_interop.component import CommandLineComponent
from prov_interop.component import ConfigError
from prov_interop.converter import ConversionError
from prov_interop.converter import Converter
class ProvManConverter(Converter, CommandLineComponent):
"""Manages invocation of ProvScala `provmanagement` script."""
INPUT = "INPUT"
"""str or unicode: token for input file in command-line specification"""
OUTPUT = "OUTPUT"
"""str or unicode: token for output file in command-line specification"""
INFORMAT = "INFORMAT"
"""str or unicode: token for output file in command-line specification"""
OUTFORMAT = "OUTFORMAT"
"""str or unicode: token for output file in command-line specification"""
def __init__(self):
"""Create converter.
"""
super(ProvManConverter, self).__init__()
def configure(self, config):
"""Configure converter. The configuration must hold:
- :class:`prov_interop.converter.Converter` configuration
- :class:`prov_interop.component.CommandLineComponent` configuration
``arguments`` must have tokens ``INPUT``, ``OUTPUT`` which are
place-holders for the input file and output file.
A valid configuration is::
{
"executable": "/home/user/provman/bin/provmanagement"
"arguments": "translate --infile INPUT --outfile OUTPUT --inputFormat INFORMAT --outformat OUTFORMAT"
"input-formats": ["provn", "ttl", "trig", "provx", "json"]
"output-formats": ["provn", "ttl", "trig", "provx", "json"]
}
:param config: Configuration
:type config: dict
:raises ConfigError: if `config` does not hold the above entries
"""
super(ProvManConverter, self).configure(config)
for token in [ProvManConverter.INPUT, ProvManConverter.OUTPUT,
ProvManConverter.INFORMAT, ProvManConverter.OUTFORMAT]:
if token not in self._arguments:
raise ConfigError("Missing token " + token)
def convert(self, in_file, out_file):
"""Convert input file into output file.
- Input and output formats are derived from `in_file` and
`out_file` file extensions.
- A check is done to see that `in_file` exists and that the input
and output format are in ``input-formats`` and
``output-formats`` respectively.
- ``executable`` and ``arguments`` are used to create a
command-line invocation, with ``INPUT`` and ``OUTPUT`` being
replaced with `in_file`, and `out_file`
An example command-line invocation is::
/home/user/ProvToolbox/bin/provmanagement translate --infile testcase1.json --outfile testcase1.provx --inputFormat json --outformat provx
:param in_file: Input file
:type in_file: str or unicode
:param out_file: Output file
:type out_file: str or unicode
:raises ConversionError: if the input file cannot be found, or
the exit code of ``provmanagement`` is non-zero
:raises OSError: if there are problems invoking the converter
e.g. the script is not found
"""
super(ProvManConverter, self).convert(in_file, out_file)
in_format = os.path.splitext(in_file)[1][1:]
out_format = os.path.splitext(out_file)[1][1:]
super(ProvManConverter, self).check_formats(in_format, out_format)
command_line = list(self._executable)
command_line.extend(self._arguments)
command_line = [in_file if x == ProvManConverter.INPUT else x
for x in command_line]
command_line = [out_file if x == ProvManConverter.OUTPUT else x
for x in command_line]
command_line = [in_format if x == ProvManConverter.INFORMAT else x
for x in command_line]
command_line = [out_format if x == ProvManConverter.OUTFORMAT else x
for x in command_line]
print((" ".join(command_line)))
return_code = subprocess.call(command_line)
if return_code != 0:
raise ConversionError(" ".join(command_line) +
" returned " + str(return_code))
if not os.path.isfile(out_file):
raise ConversionError("Output file not found: " + out_file)
| mit | 7,667,281,623,494,982,000 | 44.015748 | 148 | 0.659612 | false |
eRestin/Mezz | mhnweb/settings.py | 1 | 12829 | ALLOWED_HOSTS = ['*']
######################
# MEZZANINE SETTINGS #
######################
# The following settings are already defined with default values in
# the ``defaults.py`` module within each of Mezzanine's apps, but are
# common enough to be put here, commented out, for convenient
# overriding. Please consult the settings documentation for a full list
# of settings Mezzanine implements:
# http://mezzanine.jupo.org/docs/configuration.html#default-settings
# Controls the ordering and grouping of the admin menu.
#
# ADMIN_MENU_ORDER = (
# ("Content", ("pages.Page", "blog.BlogPost", "prefooter.SitewideContent",
# "generic.ThreadedComment", ("Media Library", "fb_browse"),)),
# ("Site", ("sites.Site", "redirects.Redirect", "conf.Setting")),
# ("Users", ("auth.User", "auth.Group",)),
#)
# A three item sequence, each containing a sequence of template tags
# used to render the admin dashboard.
#
# DASHBOARD_TAGS = (
# ("blog_tags.quick_blog", "mezzanine_tags.app_list"),
# ("comment_tags.recent_comments",),
# ("mezzanine_tags.recent_actions",),
# )
# A sequence of templates used by the ``page_menu`` template tag. Each
# item in the sequence is a three item sequence, containing a unique ID
# for the template, a label for the template, and the template path.
# These templates are then available for selection when editing which
# menus a page should appear in. Note that if a menu template is used
# that doesn't appear in this setting, all pages will appear in it.
# PAGE_MENU_TEMPLATES = (
# (1, "Top navigation bar", "pages/menus/dropdown.html"),
# (2, "Left-hand tree", "pages/menus/tree.html"),
# (3, "Footer", "pages/menus/footer.html"),
# )
# A sequence of fields that will be injected into Mezzanine's (or any
# library's) models. Each item in the sequence is a four item sequence.
# The first two items are the dotted path to the model and its field
# name to be added, and the dotted path to the field class to use for
# the field. The third and fourth items are a sequence of positional
# args and a dictionary of keyword args, to use when creating the
# field instance. When specifying the field class, the path
# ``django.models.db.`` can be omitted for regular Django model fields.
#
# EXTRA_MODEL_FIELDS = (
# (
# # Dotted path to field.
# "mezzanine.blog.models.BlogPost.image",
# # Dotted path to field class.
# "somelib.fields.ImageField",
# # Positional args for field class.
# ("Image",),
# # Keyword args for field class.
# {"blank": True, "upload_to": "blog"},
# ),
# # Example of adding a field to *all* of Mezzanine's content types:
# (
# "mezzanine.pages.models.Page.another_field",
# "IntegerField", # 'django.db.models.' is implied if path is omitted.
# ("Another name",),
# {"blank": True, "default": 1},
# ),
# )
# Setting to turn on featured images for blog posts. Defaults to False.
#
# BLOG_USE_FEATURED_IMAGE = True
# If True, the south application will be automatically added to the
# INSTALLED_APPS setting.
USE_SOUTH = True
########################
# MAIN DJANGO SETTINGS #
########################
# People who get code error notifications.
# In the format (('Full Name', '[email protected]'),
# ('Full Name', '[email protected]'))
ADMINS = (
# ('Your Name', '[email protected]'),
)
MANAGERS = ADMINS
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# On Unix systems, a value of None will cause Django to use the same
# timezone as the operating system.
# If running in a Windows environment this must be set to the same as your
# system time zone.
TIME_ZONE = "Europe/London"
# If you set this to True, Django will use timezone-aware datetimes.
USE_TZ = True
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = "en"
# A boolean that turns on/off debug mode. When set to ``True``, stack traces
# are displayed for error pages. Should always be set to ``False`` in
# production. Best set to ``True`` in local_settings.py
DEBUG = True
# Whether a user's session cookie expires when the Web browser is closed.
SESSION_EXPIRE_AT_BROWSER_CLOSE = True
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = False
# Make this unique, and don't share it with anybody.
SECRET_KEY = "733be3b1-37d8-47ca-8915-e31a151a6846e2e59e1b-5393-407d-8542-ee1e12114827eddfb52e-3143-4f8c-80ec-b198f42cc3eb"
# Tuple of IP addresses, as strings, that:
# * See debug comments, when DEBUG is true
# * Receive x-headers
INTERNAL_IPS = ("127.0.0.1",)
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
"django.template.loaders.filesystem.Loader",
"django.template.loaders.app_directories.Loader",
)
AUTHENTICATION_BACKENDS = ("mezzanine.core.auth_backends.MezzanineBackend",)
# List of finder classes that know how to find static files in
# various locations.
STATICFILES_FINDERS = (
"django.contrib.staticfiles.finders.FileSystemFinder",
"django.contrib.staticfiles.finders.AppDirectoriesFinder",
# 'django.contrib.staticfiles.finders.DefaultStorageFinder',
)
#############
# DATABASES #
#############
CONTACT_EMAIL = '[email protected]'
EMAIL_BACKEND = 'django.core.mail.backends.smtp.EmailBackend'
EMAIL_USE_TLS = True
EMAIL_HOST = 'smtp.gmail.com'
EMAIL_HOST_USER = '[email protected]'
EMAIL_HOST_PASSWORD = 'trudnehaslo'
EMAIL_PORT = 587
DATABASES = {
"default": {
# Add "postgresql_psycopg2", "mysql", "sqlite3" or "oracle".
"ENGINE": 'django.db.backends.sqlite3', #"django.db.backends.",
# DB name or path to database file if using sqlite3.
"NAME": "mhnweb.db",
# Not used with sqlite3.
"USER": "",
# Not used with sqlite3.
"PASSWORD": "",
# Set to empty string for localhost. Not used with sqlite3.
"HOST": "",
# Set to empty string for default. Not used with sqlite3.
"PORT": "",
}
}
#########
# PATHS #
#########
import os
# Full filesystem path to the project.
PROJECT_ROOT = os.path.dirname(os.path.abspath(__file__))
# Name of the directory for the project.
PROJECT_DIRNAME = PROJECT_ROOT.split(os.sep)[-1]
# Every cache key will get prefixed with this value - here we set it to
# the name of the directory the project is in to try and use something
# project specific.
CACHE_MIDDLEWARE_KEY_PREFIX = PROJECT_DIRNAME
# URL prefix for static files.
# Example: "http://media.lawrence.com/static/"
STATIC_URL = "/static/"
# Absolute path to the directory static files should be collected to.
# Don't put anything in this directory yourself; store your static files
# in apps' "static/" subdirectories and in STATICFILES_DIRS.
# Example: "/home/media/media.lawrence.com/static/"
STATIC_ROOT = os.path.join(PROJECT_ROOT, STATIC_URL.strip("/"))
#STATIC_ROOT = ''
#STATICFILES_DIRS = (os.path.join('static'),)
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash.
# Examples: "http://media.lawrence.com/media/", "http://example.com/media/"
MEDIA_URL = STATIC_URL + "media/"
# Absolute filesystem path to the directory that will hold user-uploaded files.
# Example: "/home/media/media.lawrence.com/media/"
MEDIA_ROOT = os.path.join(PROJECT_ROOT, *MEDIA_URL.strip("/").split("/"))
# Package/module name to import the root urlpatterns from for the project.
ROOT_URLCONF = "%s.urls" % PROJECT_DIRNAME
# Put strings here, like "/home/html/django_templates"
# or "C:/www/django/templates".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
TEMPLATE_DIRS = (os.path.join(PROJECT_ROOT, "templates"),)
################
# APPLICATIONS #
################
INSTALLED_APPS = (
"django.contrib.admin",
"django.contrib.auth",
"django.contrib.contenttypes",
"django.contrib.redirects",
"django.contrib.sessions",
"django.contrib.sites",
"django.contrib.sitemaps",
"django.contrib.staticfiles",
"mezzanine.boot",
"mezzanine.conf",
"mezzanine.core",
"mezzanine.generic",
"mezzanine.blog",
"mezzanine.forms",
"mezzanine.pages",
"mezzanine.galleries",
"mezzanine.twitter",
#"mezzanine.accounts",
#"mezzanine.mobile",
)
# List of processors used by RequestContext to populate the context.
# Each one should be a callable that takes the request object as its
# only parameter and returns a dictionary to add to the context.
TEMPLATE_CONTEXT_PROCESSORS = (
"django.contrib.auth.context_processors.auth",
"django.contrib.messages.context_processors.messages",
"django.core.context_processors.debug",
"django.core.context_processors.i18n",
"django.core.context_processors.static",
"django.core.context_processors.media",
"django.core.context_processors.request",
"django.core.context_processors.tz",
"mezzanine.conf.context_processors.settings",
)
# List of middleware classes to use. Order is important; in the request phase,
# these middleware classes will be applied in the order given, and in the
# response phase the middleware will be applied in reverse order.
MIDDLEWARE_CLASSES = (
"mezzanine.core.middleware.UpdateCacheMiddleware",
"django.contrib.sessions.middleware.SessionMiddleware",
"django.contrib.auth.middleware.AuthenticationMiddleware",
"django.middleware.common.CommonMiddleware",
"django.middleware.csrf.CsrfViewMiddleware",
"django.contrib.messages.middleware.MessageMiddleware",
"mezzanine.core.request.CurrentRequestMiddleware",
"mezzanine.core.middleware.RedirectFallbackMiddleware",
"mezzanine.core.middleware.TemplateForDeviceMiddleware",
"mezzanine.core.middleware.TemplateForHostMiddleware",
"mezzanine.core.middleware.AdminLoginInterfaceSelectorMiddleware",
"mezzanine.core.middleware.SitePermissionMiddleware",
# Uncomment the following if using any of the SSL settings:
# "mezzanine.core.middleware.SSLRedirectMiddleware",
"mezzanine.pages.middleware.PageMiddleware",
"mezzanine.core.middleware.FetchFromCacheMiddleware",
)
# Store these package names here as they may change in the future since
# at the moment we are using custom forks of them.
PACKAGE_NAME_FILEBROWSER = "filebrowser_safe"
PACKAGE_NAME_GRAPPELLI = "grappelli_safe"
#########################
# OPTIONAL APPLICATIONS #
#########################
# These will be added to ``INSTALLED_APPS``, only if available.
OPTIONAL_APPS = (
"debug_toolbar",
"django_extensions",
"compressor",
PACKAGE_NAME_FILEBROWSER,
PACKAGE_NAME_GRAPPELLI,
)
DEBUG_TOOLBAR_CONFIG = {"INTERCEPT_REDIRECTS": False}
###################
# DEPLOY SETTINGS #
###################
# These settings are used by the default fabfile.py provided.
# Check fabfile.py for defaults.
# FABRIC = {
# "SSH_USER": "", # SSH username
# "SSH_PASS": "", # SSH password (consider key-based authentication)
# "SSH_KEY_PATH": "", # Local path to SSH key file, for key-based auth
# "HOSTS": [], # List of hosts to deploy to
# "VIRTUALENV_HOME": "", # Absolute remote path for virtualenvs
# "PROJECT_NAME": "", # Unique identifier for project
# "REQUIREMENTS_PATH": "", # Path to pip requirements, relative to project
# "GUNICORN_PORT": 8000, # Port gunicorn will listen on
# "LOCALE": "en_US.UTF-8", # Should end with ".UTF-8"
# "LIVE_HOSTNAME": "www.example.com", # Host for public site.
# "REPO_URL": "", # Git or Mercurial remote repo URL for the project
# "DB_PASS": "", # Live database password
# "ADMIN_PASS": "", # Live admin user password
# }
##################
# LOCAL SETTINGS #
##################
# Allow any settings to be defined in local_settings.py which should be
# ignored in your version control system allowing for settings to be
# defined per machine.
try:
from local_settings import *
except ImportError:
pass
####################
# DYNAMIC SETTINGS #
####################
# set_dynamic_settings() will rewrite globals based on what has been
# defined so far, in order to provide some better defaults where
# applicable. We also allow this settings module to be imported
# without Mezzanine installed, as the case may be when using the
# fabfile, where setting the dynamic settings below isn't strictly
# required.
try:
from mezzanine.utils.conf import set_dynamic_settings
except ImportError:
pass
else:
set_dynamic_settings(globals())
| bsd-2-clause | 3,279,203,851,268,689,400 | 34.244505 | 123 | 0.689376 | false |
wwj718/edx-platform | openedx/core/djangoapps/programs/models.py | 1 | 3607 | """Models providing Programs support for the LMS and Studio."""
from collections import namedtuple
from urlparse import urljoin
from django.utils.translation import ugettext_lazy as _
from django.db import models
from config_models.models import ConfigurationModel
AuthoringAppConfig = namedtuple('AuthoringAppConfig', ['js_url', 'css_url'])
class ProgramsApiConfig(ConfigurationModel):
"""
Manages configuration for connecting to the Programs service and using its
API.
"""
OAUTH2_CLIENT_NAME = 'programs'
CACHE_KEY = 'programs.api.data'
API_NAME = 'programs'
api_version_number = models.IntegerField(verbose_name=_("API Version"))
internal_service_url = models.URLField(verbose_name=_("Internal Service URL"))
public_service_url = models.URLField(verbose_name=_("Public Service URL"))
authoring_app_js_path = models.CharField(
verbose_name=_("Path to authoring app's JS"),
max_length=255,
blank=True,
help_text=_(
"This value is required in order to enable the Studio authoring interface."
)
)
authoring_app_css_path = models.CharField(
verbose_name=_("Path to authoring app's CSS"),
max_length=255,
blank=True,
help_text=_(
"This value is required in order to enable the Studio authoring interface."
)
)
cache_ttl = models.PositiveIntegerField(
verbose_name=_("Cache Time To Live"),
default=0,
help_text=_(
"Specified in seconds. Enable caching by setting this to a value greater than 0."
)
)
enable_student_dashboard = models.BooleanField(
verbose_name=_("Enable Student Dashboard Displays"),
default=False
)
enable_studio_tab = models.BooleanField(
verbose_name=_("Enable Studio Authoring Interface"),
default=False
)
@property
def internal_api_url(self):
"""
Generate a URL based on internal service URL and API version number.
"""
return urljoin(self.internal_service_url, '/api/v{}/'.format(self.api_version_number))
@property
def public_api_url(self):
"""
Generate a URL based on public service URL and API version number.
"""
return urljoin(self.public_service_url, '/api/v{}/'.format(self.api_version_number))
@property
def authoring_app_config(self):
"""
Returns a named tuple containing information required for working with the Programs
authoring app, a Backbone app hosted by the Programs service.
"""
js_url = urljoin(self.public_service_url, self.authoring_app_js_path)
css_url = urljoin(self.public_service_url, self.authoring_app_css_path)
return AuthoringAppConfig(js_url=js_url, css_url=css_url)
@property
def is_cache_enabled(self):
"""Whether responses from the Programs API will be cached."""
return self.cache_ttl > 0
@property
def is_student_dashboard_enabled(self):
"""
Indicates whether LMS dashboard functionality related to Programs should
be enabled or not.
"""
return self.enabled and self.enable_student_dashboard
@property
def is_studio_tab_enabled(self):
"""
Indicates whether Studio functionality related to Programs should
be enabled or not.
"""
return (
self.enabled and
self.enable_studio_tab and
bool(self.authoring_app_js_path) and
bool(self.authoring_app_css_path)
)
| agpl-3.0 | 5,051,498,663,007,869,000 | 31.495495 | 94 | 0.642085 | false |
quaddra/engage-utils | engage_utils/test_wakeable_queue.py | 1 | 5086 | """Test the wakeable_queue.Queue class
"""
import unittest
import os
import sys
import random
import logging
import threading
from time import sleep
logger = logging.getLogger(__name__)
from wakeable_queue import Queue, AbortRequested, WakeableQueueWorker, WorkerStatus
random.seed()
STOP_MSG = []
class Worker(WakeableQueueWorker):
def __init__(self, worker_id, queue, consume_messages=False):
WakeableQueueWorker.__init__(self, worker_id, queue, STOP_MSG, logger)
self.consume_messages = consume_messages
if consume_messages:
logger.info("Worker %s will consume messages" % worker_id)
def process_batch(self, data):
if data[0]>=0:
#logger.info("worker %s got message %s" % (self.worker_id, data[0]))
# normal processing
sleep(0.1)
if not self.consume_messages:
#logger.info("worker %s got %s" % (self.worker_id, data[0]))
self.queue.put(data)
else:
logger.info("worker %s consuming message %s" % (self.worker_id, data[0]))
elif data[0]==(-1): # a -1 means we should signal the abort
logger.info("Worker %s requesting an abort" % self.worker_id)
self.status = WorkerStatus.ABORT_REQUESTED
self.queue.abort_request()
elif data[0]==(-2): # a -2 means we should throw an exception
raise Exception("Expected error")
else:
assert 0
NUM_WORKERS = 4
class TestWakeableQueue(unittest.TestCase):
def setUp(self):
self.queue = Queue()
self.workers = [Worker(i, self.queue, False) for i in range(NUM_WORKERS)]
def test_normal_processing(self):
logger.info("Runing test_normal_processing")
self.workers.append(Worker(NUM_WORKERS, self.queue, consume_messages=True))
for i in range(100):
self.queue.put([i+1, 2, 3, 4, 5])
WakeableQueueWorker.run_workers(self.workers, self.queue, STOP_MSG,
logger)
for w in self.workers:
self.assertTrue(w.status==WorkerStatus.STOPPED,
"Worker %s not stopped, status was %s" %
(w.worker_id, w.status))
def test_abort_processing(self):
logger.info("Runing test_abort_processing")
poison_pill = random.randint(25, 80)
for i in range(100):
if i == poison_pill:
logger.info("poison pill is at %d" % poison_pill)
self.queue.put([-1])
else:
self.queue.put([1, 2, 3, 4, 5])
try:
WakeableQueueWorker.run_workers(self.workers, self.queue, STOP_MSG,
logger)
self.assertTrue(False, "Did not get the expected abort request")
except AbortRequested:
logger.info("Master got AbortRequested, as expected")
for w in self.workers:
self.assertTrue(w.status==WorkerStatus.ABORTED or
w.status==WorkerStatus.ABORT_REQUESTED,
"Worker %s has status %s, expecting an abort" % (w.worker_id, w.status))
logger.info("All workers aborted as expected")
def test_exception_processing(self):
logger.info("Runing test_exception_processing")
poison_pill = random.randint(25, 80)
for i in range(100):
if i == poison_pill:
logger.info("poison pill is at %d" % poison_pill)
self.queue.put([-2])
else:
self.queue.put([1, 2, 3, 4, 5])
try:
WakeableQueueWorker.run_workers(self.workers, self.queue, STOP_MSG,
logger)
self.assertTrue(False, "Did not get the expected abort request")
except AbortRequested:
logger.info("Master got AbortRequested, as expected")
for w in self.workers:
self.assertTrue(w.status==WorkerStatus.ABORTED or
w.status==WorkerStatus.ABORT_REQUESTED,
"Worker %s has status %s, expecting an abort" % (w.worker_id, w.status))
logger.info("All workers aborted as expected")
def test_single_batch(self):
"""Test case where we have two workers, both competing for a single
batch.
"""
logger.info("Runing test_single_batch")
self.workers = [Worker(i, self.queue, True) for i in range(2)]
self.queue.put([1, 2, 3, 4, 5])
WakeableQueueWorker.run_workers(self.workers, self.queue, STOP_MSG,
logger)
for w in self.workers:
self.assertTrue(w.status==WorkerStatus.STOPPED,
"Worker %s not stopped, status was %s" %
(w.worker_id, w.status))
if __name__ == '__main__':
logging.basicConfig(level=logging.DEBUG)
unittest.main(argv=sys.argv)
| apache-2.0 | 5,507,107,772,898,618,000 | 38.734375 | 100 | 0.559772 | false |
tavallaie/RoboDynamixel | dxl/dxlregisters.py | 1 | 1221 | #!/usr/bin/env python
# Dynamixel library for MX28 and MX64
# WINDOWS WARNING: For best performance, parameters of the COM Port should be set to maximum baud rate, and 1ms delay (Device Manager, COM Ports, properties, advanced)
class DxlRegister():
def __init__(self,address,size,mode='r',eeprom=False,fromdxl= lambda x: x,todxl= lambda x: x,fromsi=lambda x:x,tosi=lambda x:x,range=None):
self.address=address
self.size=size
self.mode=mode
self.eeprom=eeprom
self.fromdxl=fromdxl
self.todxl=todxl
self.fromsi=fromsi
self.tosi=tosi
self.range=range
class DxlRegisterByte(DxlRegister):
def __init__(self,address,mode='r',eeprom=False,fromsi=lambda x:x,tosi=lambda x:x,range=None):
DxlRegister.__init__(self,address,1,mode,eeprom,fromdxl=lambda x:x[0],todxl=lambda x:[x],range=range,fromsi=fromsi,tosi=tosi)
class DxlRegisterWord(DxlRegister):
def __init__(self,address,mode='r',eeprom=False,fromsi=lambda x:x,tosi=lambda x:x,range=None):
DxlRegister.__init__(self,address,2,mode,eeprom,fromdxl=lambda x:x[0]+(x[1]<<8),todxl=lambda x:[int(x)&0xFF,(int(x)>>8)&0xFF] ,range=range,fromsi=fromsi,tosi=tosi)
| mit | 3,903,318,515,910,308,000 | 41.103448 | 171 | 0.685504 | false |
trmznt/genaf | genaf/views/utils/plot.py | 1 | 3274 |
# general plot / graphics utility using matplotlib
from genaf.views.tools import *
from matplotlib import pyplot as plt
from matplotlib.backends.backend_agg import FigureCanvasAgg as FigureCanvas
from matplotlib.figure import Figure
import pandas
import io, base64
@roles( PUBLIC )
def index(request):
# check
if not request.GET.get('_method', None) in [ '_exec', '_dfexec' ]:
pform, jscode = create_form( request )
return render_to_response('genaf:templates/utils/index.mako',
{ 'title': 'Plotting Utility',
'html': pform,
'code': jscode,
}, request = request )
if request.GET.get('method') == '_dfexec':
df = parse_df(request.GET.get('dfdata'))
else:
df = parse_textdata(request.GET.get('textdata'))
plot_type = request.GET.get('plot_type')
if plot_type == 'B':
html, jscode = column_chart(df)
elif plot_type == 'S':
return error_page(request, 'Scatter plot not implemented yet')
elif plot_type == 'P':
html, jscode = pie_chart(df)
return render_to_response('genaf:templates/utils/index.mako',
{ 'title': 'Plot',
'html': html,
'code': jscode,
}, request = request )
def create_form(request):
""" return html, jscode """
pform = form(name='plotform', action='#')
pform.add(
fieldset(name='data')[
input_textarea('textdata', label='Data'),
],
fieldset(name='options')[
input_select(name='plot_type', label='Plot type', value='B',
options = [ ('B', 'Bar (vertical) / column chart'),
('S', 'Scatter x,y plot'),
('P', 'Pie chart'),
] ),
],
fieldset()[ submit_bar('Create plot', '_exec')]
)
return (pform, '')
def parse_textdata(textdata):
""" parse data, with the first line as header, and consecutive lines as data """
header, content = textdata.split('\n', 1)
columns = [ x.strip() for x in header.split('|') ]
buff = io.StringIO(content)
dataframe = pandas.read_table(buff, header=None, names = columns)
return dataframe
def save_figure(canvas):
figfile = io.BytesIO()
canvas.print_figure(figfile)
figfile.seek(0)
figdata_png = figfile.getvalue()
figdata_png = base64.b64encode(figdata_png).decode('ASCII')
fig_html = literal('<img src="data:image/png;base64,%s" >' % figdata_png)
return fig_html,''
def column_chart(df):
""" creates column (vertical bar) chart """
fig = Figure()
canvas = FigureCanvas(fig)
ax = fig.add_subplot(111)
ax.bar(df.index, df.iloc[:,1], align='center')
ax.set_xlabel(df.columns[0])
ax.set_xticks(df.index)
ax.set_xticklabels(df.iloc[:,0], rotation='vertical')
ax.set_ylabel(df.columns[1])
fig.tight_layout()
return save_figure(canvas)
def pie_chart(df):
fig = Figure()
canvas = FigureCanvas(fig)
ax = fig.add_subplot(111, aspect=1)
ax.pie( df.iloc[:,1], labels = df.iloc[:,0], counterclock=False, startangle=90 )
ax.set_xlabel(df.columns[0])
fig.tight_layout()
return save_figure(canvas)
| lgpl-3.0 | -8,268,746,101,189,477,000 | 25.617886 | 84 | 0.583079 | false |
Azure/azure-sdk-for-python | sdk/confidentialledger/azure-mgmt-confidentialledger/azure/mgmt/confidentialledger/aio/_configuration.py | 1 | 3287 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, TYPE_CHECKING
from azure.core.configuration import Configuration
from azure.core.pipeline import policies
from azure.mgmt.core.policies import ARMHttpLoggingPolicy
from .._version import VERSION
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from azure.core.credentials_async import AsyncTokenCredential
class ConfidentialLedgerConfiguration(Configuration):
"""Configuration for ConfidentialLedger.
Note that all parameters used to create this instance are saved as instance
attributes.
:param credential: Credential needed for the client to connect to Azure.
:type credential: ~azure.core.credentials_async.AsyncTokenCredential
:param subscription_id: The Azure subscription ID. This is a GUID-formatted string (e.g. 00000000-0000-0000-0000-000000000000).
:type subscription_id: str
"""
def __init__(
self,
credential: "AsyncTokenCredential",
subscription_id: str,
**kwargs: Any
) -> None:
if credential is None:
raise ValueError("Parameter 'credential' must not be None.")
if subscription_id is None:
raise ValueError("Parameter 'subscription_id' must not be None.")
super(ConfidentialLedgerConfiguration, self).__init__(**kwargs)
self.credential = credential
self.subscription_id = subscription_id
self.api_version = "2020-12-01-preview"
self.credential_scopes = kwargs.pop('credential_scopes', ['https://management.azure.com/.default'])
kwargs.setdefault('sdk_moniker', 'mgmt-confidentialledger/{}'.format(VERSION))
self._configure(**kwargs)
def _configure(
self,
**kwargs: Any
) -> None:
self.user_agent_policy = kwargs.get('user_agent_policy') or policies.UserAgentPolicy(**kwargs)
self.headers_policy = kwargs.get('headers_policy') or policies.HeadersPolicy(**kwargs)
self.proxy_policy = kwargs.get('proxy_policy') or policies.ProxyPolicy(**kwargs)
self.logging_policy = kwargs.get('logging_policy') or policies.NetworkTraceLoggingPolicy(**kwargs)
self.http_logging_policy = kwargs.get('http_logging_policy') or ARMHttpLoggingPolicy(**kwargs)
self.retry_policy = kwargs.get('retry_policy') or policies.AsyncRetryPolicy(**kwargs)
self.custom_hook_policy = kwargs.get('custom_hook_policy') or policies.CustomHookPolicy(**kwargs)
self.redirect_policy = kwargs.get('redirect_policy') or policies.AsyncRedirectPolicy(**kwargs)
self.authentication_policy = kwargs.get('authentication_policy')
if self.credential and not self.authentication_policy:
self.authentication_policy = policies.AsyncBearerTokenCredentialPolicy(self.credential, *self.credential_scopes, **kwargs)
| mit | 5,589,294,666,337,175,000 | 48.059701 | 134 | 0.683906 | false |
Osndok/zim-desktop-wiki | zim/plugins/tasklist/__init__.py | 1 | 8109 | # -*- coding: utf-8 -*-
# Copyright 2009-2017 Jaap Karssenberg <[email protected]>
# TODO: allow more complex queries for filter, in particular (NOT tag AND tag)
# allow multiple tabs in dialog / side pane with configurable query
#
# TODO: add an interface for this plugin in the WWW frontend
#
# TODO: commandline option
# - open dialog
# - output to stdout with configurable format
# - force update, intialization
#
# TODO: store parser settings in notebook, not in preferences
# in dialog make it clear what is per notebook and what is user prefs
# tab in properties, link to open that from plugin prefs ?
# TODO: test coverage for the start date label (and due with "<")
# TODO: test coverage for start / due date from calendar page
# TODO: test coverage for sorting in list_open_tasks
# TODO: test coverage include / exclude sections
# TODO: update manual
from __future__ import with_statement
from zim.plugins import PluginClass, extends, ObjectExtension, WindowExtension
from zim.actions import action
from zim.config import StringAllowEmpty
from zim.signals import DelayedCallback
from zim.gui.widgets import RIGHT_PANE, PANE_POSITIONS
from .indexer import TasksIndexer, TasksView
from .gui import TaskListDialog, TaskListWidget
class TaskListPlugin(PluginClass):
plugin_info = {
'name': _('Task List'), # T: plugin name
'description': _('''\
This plugin adds a dialog showing all open tasks in
this notebook. Open tasks can be either open checkboxes
or items marked with tags like "TODO" or "FIXME".
This is a core plugin shipping with zim.
'''), # T: plugin description
'author': 'Jaap Karssenberg',
'help': 'Plugins:Task List'
}
parser_preferences = (
# key, type, label, default
('all_checkboxes', 'bool', _('Consider all checkboxes as tasks'), True),
# T: label for plugin preferences dialog
('labels', 'string', _('Labels marking tasks'), 'FIXME, TODO', StringAllowEmpty),
# T: label for plugin preferences dialog - labels are e.g. "FIXME", "TODO"
('integrate_with_journal', 'choice', _('Use date from journal pages'), 'start', (
('none', _('do not use')), # T: choice for "Use date from journal pages"
('start', _('as start date for tasks')), # T: choice for "Use date from journal pages"
('due', _('as due date for tasks')) # T: choice for "Use date from journal pages"
)),
('included_subtrees', 'string', _('Section(s) to index'), '', StringAllowEmpty),
# T: Notebook sections to search for tasks - default is the whole tree (empty string means everything)
('excluded_subtrees', 'string', _('Section(s) to ignore'), '', StringAllowEmpty),
# T: Notebook sections to exclude when searching for tasks - default is none
)
plugin_preferences = (
# key, type, label, default
('embedded', 'bool', _('Show tasklist in sidepane'), False),
# T: preferences option
('pane', 'choice', _('Position in the window'), RIGHT_PANE, PANE_POSITIONS),
# T: preferences option
) + parser_preferences + (
('nonactionable_tags', 'string', _('Tags for non-actionable tasks'), '', StringAllowEmpty),
# T: label for plugin preferences dialog
('tag_by_page', 'bool', _('Turn page name into tags for task items'), False),
# T: label for plugin preferences dialog
('use_workweek', 'bool', _('Flag tasks due on Monday or Tuesday before the weekend'), False),
# T: label for plugin preferences dialog
)
hide_preferences = ('nonactionable_tags', 'tag_by_page', 'use_workweek')
# These are deprecated, but I don't dare to remove them yet
# so hide them in the configuration dialog instead
@extends('Notebook')
class NotebookExtension(ObjectExtension):
__signals__ = {
'tasklist-changed': (None, None, ()),
}
def __init__(self, plugin, notebook):
ObjectExtension.__init__(self, plugin, notebook)
self.notebook = notebook
self._parser_key = self._get_parser_key()
self.index = notebook.index
if self.index.get_property(TasksIndexer.PLUGIN_NAME) != TasksIndexer.PLUGIN_DB_FORMAT:
self.index._db.executescript(TasksIndexer.TEARDOWN_SCRIPT) # XXX
self.index.flag_reindex()
self.indexer = None
self._setup_indexer(self.index, self.index.update_iter)
self.connectto(self.index, 'new-update-iter', self._setup_indexer)
self.connectto(plugin.preferences, 'changed', self.on_preferences_changed)
def _setup_indexer(self, index, update_iter):
if self.indexer is not None:
self.disconnect_from(self.indexer)
self.indexer.disconnect_all()
self.indexer = TasksIndexer.new_from_index(index, self.plugin.preferences)
update_iter.add_indexer(self.indexer)
self.connectto(self.indexer, 'tasklist-changed')
def on_preferences_changed(self, preferences):
# Need to construct new parser, re-index pages
if self._parser_key != self._get_parser_key():
self._parser_key = self._get_parser_key()
self.disconnect_from(self.indexer)
self.indexer.disconnect_all()
self.indexer = TasksIndexer.new_from_index(self.index, preferences)
self.index.flag_reindex()
self.connectto(self.indexer, 'tasklist-changed')
def on_tasklist_changed(self, indexer):
self.emit('tasklist-changed')
def _get_parser_key(self):
return tuple(
self.plugin.preferences[t[0]]
for t in self.plugin.parser_preferences
)
def teardown(self):
self.indexer.disconnect_all()
self.notebook.index.update_iter.remove_indexer(self.indexer)
self.index._db.executescript(TasksIndexer.TEARDOWN_SCRIPT) # XXX
self.index.set_property(TasksIndexer.PLUGIN_NAME, None)
@extends('MainWindow')
class MainWindowExtension(WindowExtension):
uimanager_xml = '''
<ui>
<menubar name='menubar'>
<menu action='view_menu'>
<placeholder name="plugin_items">
<menuitem action="show_task_list" />
</placeholder>
</menu>
</menubar>
<toolbar name='toolbar'>
<placeholder name='tools'>
<toolitem action='show_task_list'/>
</placeholder>
</toolbar>
</ui>
'''
def __init__(self, plugin, window):
WindowExtension.__init__(self, plugin, window)
self._widget = None
self.on_preferences_changed(plugin.preferences)
self.connectto(plugin.preferences, 'changed', self.on_preferences_changed)
@action(_('Task List'), stock='zim-task-list', readonly=True) # T: menu item
def show_task_list(self):
# TODO: add check + dialog for index probably_up_to_date
index = self.window.ui.notebook.index # XXX
tasksview = TasksView.new_from_index(index)
dialog = TaskListDialog.unique(self, self.window, tasksview, self.plugin.preferences)
dialog.present()
def on_preferences_changed(self, preferences):
if preferences['embedded']:
if self._widget is None:
self._init_widget()
else:
self._widget.task_list.refresh()
try:
self.window.remove(self._widget)
except ValueError:
pass
self.window.add_tab(_('Tasks'), self._widget, preferences['pane'])
# T: tab label for side pane
self._widget.show_all()
else:
if self._widget:
self.window.remove(self._widget)
self._widget = None
def _init_widget(self):
index = self.window.ui.notebook.index # XXX
tasksview = TasksView.new_from_index(index)
opener = self.window.get_resource_opener()
uistate = self.window.ui.uistate['TaskListSidePane']
self._widget = TaskListWidget(tasksview, opener, self.plugin.preferences, uistate)
def on_tasklist_changed(o):
self._widget.task_list.refresh()
callback = DelayedCallback(10, on_tasklist_changed)
# Don't really care about the delay, but want to
# make it less blocking - now it is at least on idle
### XXX HACK to get dependency to connect to
### -- no access to plugin, so can;t use get_extension()
## -- duplicat of this snippet in TaskListDialog
for e in self.window.ui.notebook.__zim_extension_objects__:
if hasattr(e, 'indexer') and e.indexer.__class__.__name__ == 'TasksIndexer':
self.connectto(e, 'tasklist-changed', callback)
break
else:
raise AssertionError('Could not find tasklist notebook extension')
def teardown(self):
if self._widget:
self.window.remove(self._widget)
self._widget = None
| gpl-2.0 | 1,262,954,511,106,585,300 | 34.41048 | 105 | 0.702429 | false |
turdusmerula/kipartman | kipartbase/swagger_server/controllers/controller_upload_file.py | 1 | 1796 | import connexion
from swagger_server.models.upload_file import UploadFile
from swagger_server.models.upload_file_data import UploadFileData
from swagger_server.models.error import Error
from datetime import date, datetime
from typing import List, Dict
from six import iteritems
from ..util import deserialize_date, deserialize_datetime
import api.models
import api.file_storage
from os.path import expanduser
home = expanduser("~")
def serialize_UploadFileData(fupload_file, upload_file=None):
if upload_file is None:
upload_file = UploadFileData()
upload_file.source_name = fupload_file.source_name
upload_file.storage_path = fupload_file.storage_path
return upload_file
def serialize_UploadFile(fupload_file, upload_file=None):
if upload_file is None:
upload_file = UploadFile()
upload_file.id = fupload_file.id
serialize_UploadFileData(fupload_file, upload_file)
return upload_file
def add_upload_file(upfile=None, description=None):
"""
add_upload_file
Upload a file.
:param upfile: The file to upload.
:type upfile: werkzeug.datastructures.FileStorage
:param description: The file to upload.
:type description: str
:rtype: UploadFile
"""
storage = api.file_storage.FileStorage()
fupload_file = storage.add_file(upfile)
return serialize_UploadFile(fupload_file)
def find_upload_file(upload_file_id):
"""
find_upload_file
Return a file
:param upload_file_id: File id
:type upload_file_id: int
:rtype: UploadFile
"""
try:
fupload_file = api.models.File.objects.get(id=upload_file_id)
except:
return Error(code=1000, message='File %d does not exists'%upload_file_id), 403
return serialize_UploadFile(fupload_file)
| gpl-3.0 | 5,873,611,844,778,625,000 | 27.507937 | 86 | 0.713808 | false |
mikelolasagasti/revelation | src/lib/ui.py | 1 | 48073 | #
# Revelation - a password manager for GNOME 2
# http://oss.codepoet.no/revelation/
# $Id$
#
# Module for UI functionality
#
#
# Copyright (c) 2003-2006 Erik Grinaker
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
from . import config, data, dialog, entry, io, util
import gettext
import time
import gi
gi.require_version('Gtk', '3.0')
from gi.repository import GObject, Gtk, Gdk, Gio, Pango # noqa: E402
_ = gettext.gettext
STOCK_CONTINUE = _("_Continue") # "revelation-continue"
STOCK_DISCARD = "revelation-discard"
STOCK_EDIT = "revelation-edit"
STOCK_EXPORT = _("_Export") # "revelation-export"
STOCK_FOLDER = "revelation-folder"
STOCK_GENERATE = _("_Generate") # "revelation-generate"
STOCK_IMPORT = _("_Import") # "revelation-import"
STOCK_GOTO = "revelation-goto"
STOCK_LOCK = "revelation-lock"
STOCK_NEW_ENTRY = _("_Add Entry") # "revelation-new-entry"
STOCK_NEW_FOLDER = _("_Add Folder") # "revelation-new-folder"
STOCK_NEXT = "go-down" # "revelation-next"
STOCK_PASSWORD_CHANGE = _("_Change") # "revelation-password-change"
STOCK_PASSWORD_CHECK = "revelation-password-check" # nosec
STOCK_PASSWORD_STRONG = "security-high" # nosec "revelation-password-strong"
STOCK_PASSWORD_WEAK = "security-low" # nosec "revelation-password-weak"
STOCK_PREVIOUS = "go-up" # "revelation-previous"
STOCK_RELOAD = _("_Reload") # "revelation-reload"
STOCK_REMOVE = "revelation-remove"
STOCK_REPLACE = _("_Replace") # "revelation-replace"
STOCK_UNKNOWN = "dialog-question" # "revelation-unknown"
STOCK_UNLOCK = _("_Unlock") # "revelation-unlock"
STOCK_UPDATE = _("_Update") # "revelation-update"
STOCK_ENTRY_FOLDER = "folder" # "revelation-account-folder"
STOCK_ENTRY_FOLDER_OPEN = "folder-open" # "revelation-account-folder-open"
STOCK_ENTRY_CREDITCARD = "x-office-contact" # "revelation-account-creditcard"
STOCK_ENTRY_CRYPTOKEY = "dialog-password" # "revelation-account-cryptokey"
STOCK_ENTRY_DATABASE = "server-database" # "revelation-account-database"
STOCK_ENTRY_DOOR = "changes-allow" # "revelation-account-door"
STOCK_ENTRY_EMAIL = "emblem-mail" # "revelation-account-email"
STOCK_ENTRY_FTP = "system-file-manager" # "revelation-account-ftp"
STOCK_ENTRY_GENERIC = "document-new" # "revelation-account-generic"
STOCK_ENTRY_PHONE = "phone" # "revelation-account-phone"
STOCK_ENTRY_SHELL = "utilities-terminal" # "revelation-account-shell"
STOCK_ENTRY_REMOTEDESKTOP = "preferences-desktop-remote-desktop" # "revelation-account-remotedesktop"
STOCK_ENTRY_WEBSITE = "web-browser" # "revelation-account-website"
ICON_SIZE_APPLET = Gtk.IconSize.LARGE_TOOLBAR
ICON_SIZE_DATAVIEW = Gtk.IconSize.LARGE_TOOLBAR
ICON_SIZE_DROPDOWN = Gtk.IconSize.SMALL_TOOLBAR
ICON_SIZE_ENTRY = Gtk.IconSize.MENU
ICON_SIZE_FALLBACK = Gtk.IconSize.LARGE_TOOLBAR
ICON_SIZE_HEADLINE = Gtk.IconSize.LARGE_TOOLBAR
ICON_SIZE_LABEL = Gtk.IconSize.MENU
ICON_SIZE_LOGO = Gtk.IconSize.DND
ICON_SIZE_TREEVIEW = Gtk.IconSize.MENU
STOCK_ICONS = (
(STOCK_ENTRY_CREDITCARD, "contact-new", (ICON_SIZE_DATAVIEW, ICON_SIZE_DROPDOWN, ICON_SIZE_ENTRY, ICON_SIZE_TREEVIEW)),
(STOCK_ENTRY_CRYPTOKEY, "dialog-password", (ICON_SIZE_DATAVIEW, ICON_SIZE_DROPDOWN, ICON_SIZE_ENTRY, ICON_SIZE_TREEVIEW)),
(STOCK_ENTRY_DATABASE, "package_system", (ICON_SIZE_DATAVIEW, ICON_SIZE_DROPDOWN, ICON_SIZE_ENTRY, ICON_SIZE_TREEVIEW)),
(STOCK_ENTRY_DOOR, "changes-allow", (ICON_SIZE_DATAVIEW, ICON_SIZE_DROPDOWN, ICON_SIZE_ENTRY, ICON_SIZE_TREEVIEW)),
(STOCK_ENTRY_EMAIL, "emblem-mail", (ICON_SIZE_DATAVIEW, ICON_SIZE_DROPDOWN, ICON_SIZE_ENTRY, ICON_SIZE_TREEVIEW)),
(STOCK_ENTRY_FTP, "system-file-manager", (ICON_SIZE_DATAVIEW, ICON_SIZE_DROPDOWN, ICON_SIZE_ENTRY, ICON_SIZE_TREEVIEW)),
(STOCK_ENTRY_GENERIC, "document-new", (ICON_SIZE_DATAVIEW, ICON_SIZE_DROPDOWN, ICON_SIZE_ENTRY, ICON_SIZE_TREEVIEW)),
(STOCK_ENTRY_PHONE, "phone", (ICON_SIZE_DATAVIEW, ICON_SIZE_DROPDOWN, ICON_SIZE_ENTRY, ICON_SIZE_TREEVIEW)),
(STOCK_ENTRY_SHELL, "utilities-terminal", (ICON_SIZE_DATAVIEW, ICON_SIZE_DROPDOWN, ICON_SIZE_ENTRY, ICON_SIZE_TREEVIEW)),
(STOCK_ENTRY_REMOTEDESKTOP, "preferences-desktop-remote-desktop", (ICON_SIZE_DATAVIEW, ICON_SIZE_DROPDOWN, ICON_SIZE_ENTRY, ICON_SIZE_TREEVIEW)),
(STOCK_ENTRY_WEBSITE, "web-browser", (ICON_SIZE_DATAVIEW, ICON_SIZE_DROPDOWN, ICON_SIZE_ENTRY, ICON_SIZE_TREEVIEW)),
(STOCK_ENTRY_FOLDER, "folder", (ICON_SIZE_DATAVIEW, ICON_SIZE_DROPDOWN, ICON_SIZE_ENTRY, ICON_SIZE_TREEVIEW)),
(STOCK_ENTRY_FOLDER_OPEN, "folder-open", (ICON_SIZE_DATAVIEW, ICON_SIZE_DROPDOWN, ICON_SIZE_ENTRY, ICON_SIZE_TREEVIEW)),
)
STOCK_ITEMS = (
(STOCK_CONTINUE, _('_Continue'), "stock_test-mode"),
(STOCK_DISCARD, _('_Discard'), Gtk.STOCK_DELETE),
(STOCK_EDIT, _('_Edit'), Gtk.STOCK_EDIT),
(STOCK_EXPORT, _('_Export'), Gtk.STOCK_EXECUTE),
(STOCK_FOLDER, '', "stock_folder"),
(STOCK_GENERATE, _('_Generate'), Gtk.STOCK_EXECUTE),
(STOCK_GOTO, _('_Go to'), Gtk.STOCK_JUMP_TO),
(STOCK_IMPORT, _('_Import'), Gtk.STOCK_CONVERT),
(STOCK_LOCK, _('_Lock'), "stock_lock"),
(STOCK_NEW_ENTRY, _('_Add Entry'), Gtk.STOCK_ADD),
(STOCK_NEW_FOLDER, _('_Add Folder'), "stock_folder"),
(STOCK_NEXT, _('Next'), Gtk.STOCK_GO_DOWN),
(STOCK_PASSWORD_CHANGE, _('_Change'), "stock_lock-ok"),
(STOCK_PASSWORD_CHECK, _('_Check'), "stock_lock-ok"),
(STOCK_PASSWORD_STRONG, '', "stock_lock-ok"),
(STOCK_PASSWORD_WEAK, '', "stock_lock-broken"),
(STOCK_PREVIOUS, _('Previous'), Gtk.STOCK_GO_UP),
(STOCK_RELOAD, _('_Reload'), Gtk.STOCK_REFRESH),
(STOCK_REMOVE, _('Re_move'), Gtk.STOCK_DELETE),
(STOCK_REPLACE, _('_Replace'), Gtk.STOCK_SAVE_AS),
(STOCK_UNKNOWN, _('Unknown'), "dialog-question"),
(STOCK_UNLOCK, _('_Unlock'), "stock_lock-open"),
(STOCK_UPDATE, _('_Update'), "stock_edit"),
)
# EXCEPTIONS #
class DataError(Exception):
"Exception for invalid data"
pass
# FUNCTIONS #
def generate_field_display_widget(field, cfg = None, userdata = None):
"Generates a widget for displaying a field value"
if field.datatype == entry.DATATYPE_EMAIL:
widget = LinkButton("mailto:%s" % field.value, util.escape_markup(field.value))
elif field.datatype == entry.DATATYPE_PASSWORD:
widget = PasswordLabel(util.escape_markup(field.value), cfg, userdata)
elif field.datatype == entry.DATATYPE_URL:
widget = LinkButton(field.value, util.escape_markup(field.value))
else:
widget = Label(util.escape_markup(field.value))
widget.set_selectable(True)
return widget
def generate_field_edit_widget(field, cfg = None, userdata = None):
"Generates a widget for editing a field"
if type(field) == entry.PasswordField:
widget = PasswordEntryGenerate(None, cfg, userdata)
elif type(field) == entry.UsernameField:
widget = Gtk.ComboBox.new_with_entry()
setup_comboboxentry(widget, userdata)
elif field.datatype == entry.DATATYPE_FILE:
widget = FileEntry()
elif field.datatype == entry.DATATYPE_PASSWORD:
widget = PasswordEntry(None, cfg, userdata)
else:
widget = Entry()
widget.set_text(field.value or "")
return widget
def setup_comboboxentry(widget, userdata=None):
widget.entry = widget.get_child()
widget.entry.set_activates_default(True)
widget.set_text = widget.entry.set_text
widget.get_text = widget.entry.get_text
widget.model = Gtk.ListStore(GObject.TYPE_STRING)
widget.set_model(widget.model)
widget.set_entry_text_column(0)
widget.completion = Gtk.EntryCompletion()
widget.completion.set_model(widget.model)
widget.completion.set_text_column(0)
widget.completion.set_minimum_key_length(1)
widget.entry.set_completion(widget.completion)
def set_values(vlist):
"Sets the values for the dropdown"
widget.model.clear()
for item in vlist:
widget.model.append((item,))
widget.set_values = set_values
if userdata is not None:
widget.set_values(userdata)
# CONTAINERS #
class HBox(Gtk.HBox):
"A horizontal container"
def __init__(self, *args):
Gtk.HBox.__init__(self)
self.set_spacing(6)
self.set_border_width(0)
for widget in args:
self.pack_start(widget, True, True, 0)
class HButtonBox(Gtk.HButtonBox):
"A horizontal button box"
def __init__(self, *args):
Gtk.HButtonBox.__init__(self)
self.set_layout(Gtk.ButtonBoxStyle.SPREAD)
self.set_spacing(12)
for button in args:
self.pack_start(button, True, True, 0)
class VBox(Gtk.VBox):
"A vertical container"
def __init__(self, *args):
Gtk.VBox.__init__(self)
self.set_spacing(6)
self.set_border_width(0)
for widget in args:
self.pack_start(widget, True, True, 0)
class Notebook(Gtk.Notebook):
"A notebook (tabbed view)"
def __init__(self):
Gtk.Notebook.__init__(self)
def create_page(self, title):
"Creates a notebook page"
page = NotebookPage()
self.append_page(page, Label(title))
return page
class NotebookPage(VBox):
"A notebook page"
def __init__(self):
VBox.__init__(self)
self.sizegroup = Gtk.SizeGroup(mode=Gtk.SizeGroupMode.HORIZONTAL)
self.set_border_width(12)
self.set_spacing(18)
def add_section(self, title, description = None):
"Adds an input section to the notebook"
section = InputSection(title, description, self.sizegroup)
self.pack_start(section, False, False, 0)
return section
class ScrolledWindow(Gtk.ScrolledWindow):
"A scrolled window for partially displaying a child widget"
def __init__(self, contents = None):
Gtk.ScrolledWindow.__init__(self)
self.set_policy(Gtk.PolicyType.AUTOMATIC, Gtk.PolicyType.AUTOMATIC)
if contents is not None:
self.add(contents)
class Toolbar(Gtk.Toolbar):
"A Toolbar subclass"
def append_space(self):
"Appends a space to the toolbar"
space = Gtk.SeparatorToolItem()
space.set_draw(False)
self.insert(space, -1)
def append_widget(self, widget, tooltip = None):
"Appends a widget to the toolbar"
toolitem = Gtk.ToolItem()
toolitem.add(widget)
if tooltip != None:
toolitem.set_tooltip_text(tooltip)
self.insert(toolitem, -1)
class InputSection(VBox):
"A section of input fields"
def __init__(self, title = None, description = None, sizegroup = None):
VBox.__init__(self)
self.title = None
self.desc = None
self.sizegroup = sizegroup
if title is not None:
self.title = Label("<span weight=\"bold\">%s</span>" % util.escape_markup(title))
self.pack_start(self.title, False, True, 0)
if description is not None:
self.desc = Label(util.escape_markup(description))
self.pack_start(self.desc, False, True, 0)
if sizegroup is None:
self.sizegroup = Gtk.SizeGroup(mode=Gtk.SizeGroupMode.HORIZONTAL)
def append_widget(self, title, widget, indent = True):
"Adds a widget to the section"
row = HBox()
row.set_spacing(12)
self.pack_start(row, False, False, 0)
if self.title is not None and indent == True:
row.pack_start(Label(""), False, False, 0)
if title is not None:
label = Label("%s:" % util.escape_markup(title))
self.sizegroup.add_widget(label)
row.pack_start(label, False, False, 0)
row.pack_start(widget, True, True, 0)
def clear(self):
"Removes all widgets"
for child in self.get_children():
if child not in (self.title, self.desc):
child.destroy()
# DISPLAY WIDGETS #
class EventBox(Gtk.EventBox):
"A container which handles events for a widget (for tooltips etc)"
def __init__(self, widget = None):
Gtk.EventBox.__init__(self)
if widget is not None:
self.add(widget)
class Image(Gtk.Image):
"A widget for displaying an image"
def __init__(self, stock = None, size = None):
Gtk.Image.__init__(self)
if stock is not None:
self.set_from_icon_name(stock, size)
class ImageLabel(HBox):
"A label with an image"
def __init__(self, text = None, stock = None, size = ICON_SIZE_LABEL):
HBox.__init__(self)
self.image = Image()
self.pack_start(self.image, False, True, 0)
self.label = Label(text)
self.pack_start(self.label, True, True, 0)
if text != None:
self.set_text(text)
if stock != None:
self.set_stock(stock, size)
def set_ellipsize(self, ellipsize):
"Sets label ellisization"
self.label.set_ellipsize(ellipsize)
def set_stock(self, stock, size):
"Sets the image"
self.image.set_from_icon_name(stock, size)
def set_text(self, text):
"Sets the label text"
self.label.set_text(text)
class Label(Gtk.Label):
"A text label"
def __init__(self, text = None, justify = Gtk.Justification.LEFT):
Gtk.Label.__init__(self)
self.set_text(text)
self.set_justify(justify)
self.set_use_markup(True)
self.set_line_wrap(True)
self.set_valign(Gtk.Align.CENTER)
if justify == Gtk.Justification.LEFT:
self.set_halign(Gtk.Align.START)
elif justify == Gtk.Justification.CENTER:
self.set_halign(Gtk.Align.CENTER)
elif justify == Gtk.Justification.RIGHT:
self.set_halign(Gtk.Align.END)
def set_text(self, text):
"Sets the text of the label"
if text is None:
Gtk.Label.set_text(self, "")
else:
Gtk.Label.set_markup(self, text)
class PasswordLabel(EventBox):
"A label for displaying passwords"
def __init__(self, password = "", cfg = None, clipboard = None, justify = Gtk.Justification.LEFT): # nosec
EventBox.__init__(self)
self.password = util.unescape_markup(password)
self.config = cfg
self.clipboard = clipboard
self.label = Label(util.escape_markup(self.password), justify)
self.label.set_selectable(True)
self.add(self.label)
self.show_password(cfg.get_boolean("view-passwords"))
self.config.connect('changed::view-passwords', lambda w, k: self.show_password(w.get_boolean(k)))
self.connect("button-press-event", self.__cb_button_press)
self.connect("drag-data-get", self.__cb_drag_data_get)
def __cb_drag_data_get(self, widget, context, selection, info, timestamp, data = None):
"Provides data for a drag operation"
selection.set_text(self.password, -1)
def __cb_button_press(self, widget, data = None):
"Populates the popup menu"
if self.label.get_selectable() == True:
return False
elif data.button == 3:
menu = Menu()
menuitem = ImageMenuItem(Gtk.STOCK_COPY, _('Copy password'))
menuitem.connect("activate", lambda w: self.clipboard.set([self.password], True))
menu.append(menuitem)
menu.show_all()
menu.popup_at_pointer(data)
return True
def set_ellipsize(self, ellipsize):
"Sets ellipsize for the label"
self.label.set_ellipsize(ellipsize)
def show_password(self, show = True):
"Sets whether to display the password"
if show == True:
self.label.set_text(util.escape_markup(self.password))
self.label.set_selectable(True)
self.drag_source_unset()
else:
self.label.set_text(Gtk.Entry().get_invisible_char()*6)
self.label.set_selectable(False)
self.drag_source_set(
Gdk.ModifierType.BUTTON1_MASK,
[
Gtk.TargetEntry.new("text/plain", 0, 0),
Gtk.TargetEntry.new("TEXT", 0, 1),
Gtk.TargetEntry.new("STRING", 0, 2),
Gtk.TargetEntry.new("COMPOUND TEXT", 0, 3),
Gtk.TargetEntry.new("UTF8_STRING", 0, 4)
],
Gdk.DragAction.COPY
)
class EditableTextView(Gtk.ScrolledWindow):
"An editable text view"
def __init__(self, buffer = None, text = None):
Gtk.ScrolledWindow.__init__(self)
self.set_policy(Gtk.PolicyType.AUTOMATIC, Gtk.PolicyType.AUTOMATIC)
self.set_shadow_type(Gtk.ShadowType.ETCHED_OUT)
self.textview = Gtk.TextView(buffer=buffer)
self.textbuffer = self.textview.get_buffer()
self.add(self.textview)
if text is not None:
self.textview.get_buffer().set_text(text)
def set_text(self, text):
"Sets the entry contents"
if text is None:
self.textbuffer.set_text("")
self.textbuffer.set_text(text)
def get_text(self):
"Returns the text of the entry"
return self.textbuffer.get_text(self.textbuffer.get_start_iter(), self.textbuffer.get_end_iter(), False)
class TextView(Gtk.TextView):
"A text view"
def __init__(self, buffer = None, text = None):
Gtk.TextView.__init__(self)
self.set_buffer(buffer)
self.set_editable(False)
self.set_wrap_mode(Gtk.WrapMode.NONE)
self.set_cursor_visible(False)
self.modify_font(Pango.FontDescription("Monospace"))
if text is not None:
self.get_buffer().set_text(text)
# TEXT ENTRIES #
class Entry(Gtk.Entry):
"A normal text entry"
def __init__(self, text = None):
Gtk.Entry.__init__(self)
self.set_activates_default(True)
self.set_text(text)
def set_text(self, text):
"Sets the entry contents"
if text is None:
text = ""
Gtk.Entry.set_text(self, text)
class FileEntry(HBox):
"A file entry"
def __init__(self, title = None, file = None, type = Gtk.FileChooserAction.OPEN):
HBox.__init__(self)
self.title = title is not None and title or _('Select File')
self.type = type
self.entry = Entry()
self.entry.connect("changed", lambda w: self.emit("changed"))
self.pack_start(self.entry, True, True, 0)
self.button = Button(_('Browse...'), self.__cb_filesel)
self.pack_start(self.button, False, False, 0)
if file is not None:
self.set_filename(file)
def __cb_filesel(self, widget, data = None):
"Displays a file selector when Browse is pressed"
try:
fsel = dialog.FileSelector(None, self.title, self.type)
file = self.get_filename()
if file != None:
fsel.set_filename(file)
self.set_filename(fsel.run())
except dialog.CancelError:
pass
def get_filename(self):
"Gets the current filename"
return io.file_normpath(self.entry.get_text())
def get_text(self):
"Wrapper to emulate Entry"
return self.entry.get_text()
def set_filename(self, filename):
"Sets the current filename"
self.entry.set_text(io.file_normpath(filename))
self.entry.set_position(-1)
def set_text(self, text):
"Wrapper to emulate Entry"
self.entry.set_text(text)
GObject.type_register(FileEntry)
GObject.signal_new("changed", FileEntry, GObject.SignalFlags.ACTION,
GObject.TYPE_BOOLEAN, ())
class PasswordEntry(Gtk.Entry):
"An entry for editing a password (follows the 'show passwords' preference)"
def __init__(self, password = None, cfg = None, clipboard = None):
Gtk.Entry.__init__(self)
self.set_visibility(False)
if password:
self.set_text(password)
self.autocheck = True
self.config = cfg
self.clipboard = clipboard
self.connect("changed", self.__cb_check_password)
self.connect("populate-popup", self.__cb_popup)
if cfg != None:
self.config.bind('view-passwords', self, "visibility", Gio.SettingsBindFlags.DEFAULT)
def __cb_check_password(self, widget, data = None):
"Callback for changed, checks the password"
if self.autocheck == False:
return
password = self.get_text()
if len(password) == 0:
self.set_icon_from_icon_name(Gtk.EntryIconPosition.SECONDARY, None)
else:
try:
util.check_password(password)
except ValueError as reason:
self.set_password_strong(False, _('The password %s') % str(reason))
else:
self.set_password_strong(True, _('The password seems good'))
def __cb_popup(self, widget, menu):
"Populates the popup menu"
if self.clipboard != None:
menuitem = ImageMenuItem(Gtk.STOCK_COPY, _('Copy password'))
menuitem.connect("activate", lambda w: self.clipboard.set([self.get_text()], True))
menu.insert(menuitem, 2)
menu.show_all()
def set_password_strong(self, strong, reason = ""):
"Sets whether the password is strong or not"
self.set_icon_from_icon_name(Gtk.EntryIconPosition.SECONDARY, strong and STOCK_PASSWORD_STRONG or STOCK_PASSWORD_WEAK)
self.set_icon_tooltip_text(Gtk.EntryIconPosition.SECONDARY, reason)
class PasswordEntryGenerate(HBox):
"A password entry with a generator button"
def __init__(self, password = None, cfg = None, clipboard = None):
HBox.__init__(self)
self.config = cfg
self.pwentry = PasswordEntry(password, cfg, clipboard)
self.pack_start(self.pwentry, True, True, 0)
self.button = Button(_('Generate'), lambda w: self.generate())
self.pack_start(self.button, False, False, 0)
self.entry = self.pwentry
def generate(self):
"Generates a password for the entry"
password = util.generate_password(self.config.get_int("passwordgen-length"), self.config.get_boolean("passwordgen-punctuation"))
self.pwentry.set_text(password)
def get_text(self):
"Wrapper for the entry"
return self.pwentry.get_text()
def set_text(self, text):
"Wrapper for the entry"
self.pwentry.set_text(text)
class SpinEntry(Gtk.SpinButton):
"An entry for numbers"
def __init__(self, adjustment = None, climb_rate = 0.0, digits = 0):
Gtk.SpinButton.__init__(self)
self.configure(adjustment, climb_rate, digits)
self.set_increments(1, 5)
self.set_range(0, 100000)
self.set_numeric(True)
# BUTTONS #
class Button(Gtk.Button):
"A normal button"
def __init__(self, label, callback = None):
Gtk.Button.__init__(self, label=label)
if callback is not None:
self.connect("clicked", callback)
class CheckButton(Gtk.CheckButton):
"A checkbutton"
def __init__(self, label = None):
Gtk.CheckButton.__init__(self, label=label)
class DropDown(Gtk.ComboBox):
"A dropdown button"
def __init__(self, icons = False):
Gtk.ComboBox.__init__(self)
self.model = Gtk.ListStore(GObject.TYPE_STRING, GObject.TYPE_STRING, GObject.TYPE_PYOBJECT)
self.set_model(self.model)
if icons == True:
cr = Gtk.CellRendererPixbuf()
cr.set_fixed_size(Gtk.icon_size_lookup(ICON_SIZE_DROPDOWN)[1] + 5, -1)
self.pack_start(cr, False)
self.add_attribute(cr, "icon-name", 1)
cr = Gtk.CellRendererText()
self.pack_start(cr, True)
self.add_attribute(cr, "text", 0)
self.connect("realize", self.__cb_show)
def __cb_show(self, widget, data = None):
"Callback for when widget is shown"
if self.get_active() == -1:
self.set_active(0)
def append_item(self, text, stock = None, data = None):
"Appends an item to the dropdown"
self.model.append((text, stock, data))
def delete_item(self, index):
"Removes an item from the dropdown"
if self.model.iter_n_children(None) > index:
iter = self.model.iter_nth_child(None, index)
self.model.remove(iter)
def get_active_item(self):
"Returns a tuple with data for the current item"
iter = self.model.iter_nth_child(None, self.get_active())
return self.model.get(iter, 0, 1, 2)
def get_item(self, index):
"Returns data for an item"
return self.model.get(self.model.iter_nth_child(None, index), 0, 1, 2)
def get_num_items(self):
"Returns the number of items in the dropdown"
return self.model.iter_n_children(None)
def insert_item(self, index, text, stock = None, data = None):
"Inserts an item in the dropdown"
self.model.insert(index, (text, stock, data))
class EntryDropDown(DropDown):
"An entry type dropdown"
def __init__(self):
DropDown.__init__(self, True)
for e in entry.ENTRYLIST:
if e != entry.FolderEntry:
self.append_item(e().typename, e().icon, e)
def get_active_type(self):
"Get the currently active type"
item = self.get_active_item()
if item is not None:
return item[2]
def set_active_type(self, entrytype):
"Set the active type"
for i in range(self.model.iter_n_children(None)):
iter = self.model.iter_nth_child(None, i)
if self.model.get_value(iter, 2) == entrytype:
self.set_active(i)
class FileButton(Gtk.FileChooserButton):
"A file chooser button"
def __init__(self, title = None, file = None, type = Gtk.FileChooserAction.OPEN):
Gtk.FileChooserButton.__init__(self, title)
self.set_action(type)
self.set_local_only(False)
if file != None:
self.set_filename(file)
def get_filename(self):
"Gets the filename"
return io.file_normpath(self.get_uri())
def set_filename(self, filename):
"Sets the filename"
filename = io.file_normpath(filename)
if filename != io.file_normpath(self.get_filename()):
Gtk.FileChooserButton.set_filename(self, filename)
class LinkButton(Gtk.LinkButton):
"A link button"
def __init__(self, url, label):
Gtk.LinkButton.__init__(self, uri=url, label=label)
self.set_halign(Gtk.Align.START)
self.label = self.get_children()[0]
"If URI is too long reduce it for the label"
if len(label) > 60:
self.label.set_text(label[0:59] + " (...)")
def set_ellipsize(self, ellipsize):
"Sets ellipsize for label"
self.label.set_ellipsize(ellipsize)
def set_justify(self, justify):
"Sets justify for label"
self.label.set_justify(justify)
class RadioButton(Gtk.RadioButton):
"A radio button"
def __init__(self, group, label):
Gtk.RadioButton.__init__(self, group, label)
# MENUS AND MENU ITEMS #
class ImageMenuItem(Gtk.ImageMenuItem):
"A menuitem with a stock icon"
def __init__(self, stock, text = None):
Gtk.ImageMenuItem.__init__(self, stock)
self.label = self.get_children()[0]
self.image = self.get_image()
if text is not None:
self.set_text(text)
def set_stock(self, stock):
"Set the stock item to use as icon"
self.image.set_from_icon_name(stock, Gtk.IconSize.MENU)
def set_text(self, text):
"Set the item text"
self.label.set_text(text)
class Menu(Gtk.Menu):
"A menu"
def __init__(self):
Gtk.Menu.__init__(self)
# MISCELLANEOUS WIDGETS #
class TreeView(Gtk.TreeView):
"A tree display"
def __init__(self, model):
Gtk.TreeView.__init__(self, model=model)
self.set_headers_visible(False)
self.model = model
self.__cbid_drag_motion = None
self.__cbid_drag_end = None
self.selection = self.get_selection()
self.selection.set_mode(Gtk.SelectionMode.MULTIPLE)
self.connect("button-press-event", self.__cb_buttonpress)
self.connect("key-press-event", self.__cb_keypress)
def __cb_buttonpress(self, widget, data):
"Callback for handling mouse clicks"
path = self.get_path_at_pos(int(data.x), int(data.y))
# handle click outside entry
if path is None:
self.unselect_all()
# handle doubleclick
if data.button == 1 and data.type == Gdk.EventType._2BUTTON_PRESS and path != None:
iter = self.model.get_iter(path[0])
self.toggle_expanded(iter)
if iter != None:
self.emit("doubleclick", iter)
# display popup on right-click
elif data.button == 3:
if path != None and self.selection.iter_is_selected(self.model.get_iter(path[0])) == False:
self.set_cursor(path[0], path[1], False)
self.emit("popup", data)
return True
# handle drag-and-drop of multiple rows
elif self.__cbid_drag_motion is None and data.button in (1, 2) and data.type == Gdk.EventType.BUTTON_PRESS and path != None and self.selection.iter_is_selected(self.model.get_iter(path[0])) == True and len(self.get_selected()) > 1:
self.__cbid_drag_motion = self.connect("motion-notify-event", self.__cb_drag_motion, data.copy())
self.__cbid_drag_end = self.connect("button-release-event", self.__cb_button_release, data.copy())
return True
def __cb_button_release(self, widget, data, userdata = None):
"Ends a drag"
self.emit("button-press-event", userdata)
self.__drag_check_end()
def __cb_drag_motion(self, widget, data, userdata = None):
"Monitors drag motion"
if self.drag_check_threshold(int(userdata.x), int(userdata.y), int(data.x), int(data.y)) == True:
self.__drag_check_end()
uritarget = Gtk.TargetEntry.new("revelation/treerow", Gtk.TargetFlags.SAME_APP | Gtk.TargetFlags.SAME_WIDGET, 0)
self.drag_begin_with_coordinates(Gtk.TargetList([uritarget]), Gdk.DragAction.MOVE, userdata.button.button, userdata, userdata.x, userdata.y)
def __cb_keypress(self, widget, data = None):
"Callback for handling key presses"
# expand/collapse node on space
if data.keyval == Gdk.KEY_space:
self.toggle_expanded(self.get_active())
def __drag_check_end(self):
"Ends a drag check"
self.disconnect(self.__cbid_drag_motion)
self.disconnect(self.__cbid_drag_end)
self.__cbid_drag_motion = None
self.__cbid_drag_end = None
def collapse_row(self, iter):
"Collapse a tree row"
Gtk.TreeView.collapse_row(self, self.model.get_path(iter))
def expand_row(self, iter):
"Expand a tree row"
if iter is not None and self.model.iter_n_children(iter) > 0:
Gtk.TreeView.expand_row(self, self.model.get_path(iter), False)
def expand_to_iter(self, iter):
"Expand all items up to and including a given iter"
path = self.model.get_path(iter)
for i in range(len(path)):
iter = self.model.get_iter(path[0:i])
self.expand_row(iter)
def get_active(self):
"Get the currently active row"
if self.model is None:
return None
iter = self.model.get_iter(self.get_cursor()[0])
if iter is None or self.selection.iter_is_selected(iter) == False:
return None
return iter
def get_selected(self):
"Get a list of currently selected rows"
list = []
self.selection.selected_foreach(lambda model, path, iter: list.append(iter))
return list
def select(self, iter):
"Select a particular row"
if iter is None:
self.unselect_all()
else:
self.expand_to_iter(iter)
self.set_cursor(self.model.get_path(iter))
def select_all(self):
"Select all rows in the tree"
self.selection.select_all()
self.selection.emit("changed")
self.emit("cursor_changed")
def set_model(self, model):
"Change the tree model which is being displayed"
Gtk.TreeView.set_model(self, model)
self.model = model
def toggle_expanded(self, iter):
"Toggle the expanded state of a row"
if iter is None:
return
elif self.row_expanded(self.model.get_path(iter)):
self.collapse_row(iter)
else:
self.expand_row(iter)
def unselect_all(self):
"Unselect all rows in the tree"
self.selection.unselect_all()
self.selection.emit("changed")
self.emit("cursor_changed")
self.emit("unselect_all")
GObject.signal_new("doubleclick", TreeView, GObject.SignalFlags.ACTION,
GObject.TYPE_BOOLEAN, (GObject.TYPE_PYOBJECT, ))
GObject.signal_new("popup", TreeView, GObject.SignalFlags.ACTION,
GObject.TYPE_BOOLEAN, (GObject.TYPE_PYOBJECT, ))
class EntryTree(TreeView):
"An entry tree"
def __init__(self, entrystore):
TreeView.__init__(self, entrystore)
column = Gtk.TreeViewColumn()
self.append_column(column)
cr = Gtk.CellRendererPixbuf()
column.pack_start(cr, False)
column.add_attribute(cr, "icon-name", data.COLUMN_ICON)
cr.set_property("stock-size", ICON_SIZE_TREEVIEW)
cr = Gtk.CellRendererText()
column.pack_start(cr, True)
column.add_attribute(cr, "text", data.COLUMN_NAME)
self.connect("doubleclick", self.__cb_doubleclick)
self.connect("row-expanded", self.__cb_row_expanded)
self.connect("row-collapsed", self.__cb_row_collapsed)
def __cb_doubleclick(self, widget, iter):
"Stop doubleclick emission on folder"
if type(self.model.get_entry(iter)) == entry.FolderEntry:
self.stop_emission("doubleclick")
def __cb_row_collapsed(self, object, iter, extra):
"Updates folder icons when collapsed"
self.model.folder_expanded(iter, False)
def __cb_row_expanded(self, object, iter, extra):
"Updates folder icons when expanded"
# make sure all children are collapsed (some may have lingering expand icons)
for i in range(self.model.iter_n_children(iter)):
child = self.model.iter_nth_child(iter, i)
if self.row_expanded(self.model.get_path(child)) == False:
self.model.folder_expanded(child, False)
self.model.folder_expanded(iter, True)
def set_model(self, model):
"Sets the model displayed by the tree view"
TreeView.set_model(self, model)
if model is None:
return
for i in range(model.iter_n_children(None)):
model.folder_expanded(model.iter_nth_child(None, i), False)
class Statusbar(Gtk.Statusbar):
"An application statusbar"
def __init__(self):
Gtk.Statusbar.__init__(self)
self.contextid = self.get_context_id("statusbar")
def clear(self):
"Clears the statusbar"
self.pop(self.contextid)
def set_status(self, text):
"Displays a text in the statusbar"
self.clear()
self.push(self.contextid, text or "")
# ACTION HANDLING #
class Action(Gtk.Action):
"UI Manager Action"
def __init__(self, name, label = None, tooltip = None, stock = "", important = False):
Gtk.Action.__init__(self, name, label, tooltip, stock)
if important == True:
self.set_property("is-important", True)
class ActionGroup(Gtk.ActionGroup):
"UI Manager Actiongroup"
def add_action(self, action, accel = None):
"Adds an action to the actiongroup"
if accel is None:
Gtk.ActionGroup.add_action(self, action)
else:
self.add_action_with_accel(action, accel)
class ToggleAction(Gtk.ToggleAction):
"A toggle action item"
def __init__(self, name, label, tooltip = None, stock = None):
Gtk.ToggleAction.__init__(self, name, label, tooltip, stock)
class UIManager(Gtk.UIManager):
"UI item manager"
def __init__(self):
Gtk.UIManager.__init__(self)
self.connect("connect-proxy", self.__cb_connect_proxy)
def __cb_connect_proxy(self, uimanager, action, widget):
"Callback for connecting proxies to an action"
if type(widget) in (Gtk.MenuItem, Gtk.ImageMenuItem, Gtk.CheckMenuItem):
widget.tooltip = action.get_property("tooltip")
else:
widget.set_property("label", widget.get_property("label").replace("...", ""))
def add_ui_from_file(self, file):
"Loads ui from a file"
try:
Gtk.UIManager.add_ui_from_file(self, file)
except GObject.GError:
raise IOError
def append_action_group(self, actiongroup):
"Appends an action group"
Gtk.UIManager.insert_action_group(self, actiongroup, len(self.get_action_groups()))
def get_action(self, name):
"Looks up an action in the managers actiongroups"
for actiongroup in self.get_action_groups():
action = actiongroup.get_action(name)
if action is not None:
return action
def get_action_group(self, name):
"Returns the named action group"
for actiongroup in self.get_action_groups():
if actiongroup.get_name() == name:
return actiongroup
# APPLICATION COMPONENTS #
class AppWindow(Gtk.ApplicationWindow):
"An application window"
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
class App(Gtk.Application):
"An application"
def __init__(self, appname):
Gtk.Application.__init__(self,
application_id='info.olasagasti.revelation')
self.toolbars = {}
def __connect_menu_statusbar(self, menu):
"Connects a menus items to the statusbar"
for item in menu.get_children():
if isinstance(item, Gtk.MenuItem) == True:
item.connect("select", self.cb_menudesc, True)
item.connect("deselect", self.cb_menudesc, False)
def cb_menudesc(self, item, show):
"Displays menu descriptions in the statusbar"
if show == True:
self.statusbar.set_status(item.get_label())
else:
self.statusbar.clear()
def __cb_toolbar_hide(self, widget, name):
"Hides the toolbar dock when the toolbar is hidden"
if name in self.toolbars:
self.toolbars[name].hide()
def __cb_toolbar_show(self, widget, name):
"Shows the toolbar dock when the toolbar is shown"
if name in self.toolbars:
self.toolbars[name].show()
def add_toolbar(self, toolbar, name, band):
"Adds a toolbar"
self.toolbars[name] = toolbar
self.main_vbox.pack_start(toolbar, False, True, 0)
toolbar.connect("show", self.__cb_toolbar_show, name)
toolbar.connect("hide", self.__cb_toolbar_hide, name)
toolbar.show_all()
def get_title(self):
"Returns the app title"
title = Gtk.Window.get_title(self.window)
return title.replace(" - " + config.APPNAME, "")
def popup(self, menu, button, time):
"Displays a popup menu"
# get Gtk.Menu
gmenu = Gtk.Menu.new_from_model(menu)
gmenu.attach_to_widget(self.window, None)
# transfer the tooltips from Gio.Menu to Gtk.Menu
menu_item_index = 0
menu_items = gmenu.get_children()
for sect in range(menu.get_n_items()):
for item in range(menu.get_item_link(sect, 'section').get_n_items()):
tooltip_text = menu.get_item_link(sect, 'section').get_item_attribute_value(item, 'tooltip', None)
if tooltip_text:
tooltip_text = tooltip_text.unpack()
menu_items[menu_item_index].set_tooltip_text(tooltip_text)
menu_item_index += 1
# skip section separator
menu_item_index += 1
self.__connect_menu_statusbar(gmenu)
gmenu.popup_at_pointer()
def set_menus(self, menubar):
"Sets the menubar for the application"
for item in menubar.get_children():
self.__connect_menu_statusbar(item.get_submenu())
self.main_vbox.pack_start(menubar, False, True, 0)
def set_title(self, title):
"Sets the window title"
Gtk.Window.set_title(self.window, title + " - " + config.APPNAME)
def set_toolbar(self, toolbar):
"Sets the application toolbar"
self.main_vbox.pack_start(toolbar, False, True, 0)
toolbar.connect("show", self.__cb_toolbar_show, "Toolbar")
toolbar.connect("hide", self.__cb_toolbar_hide, "Toolbar")
def set_contents(self, widget):
self.main_vbox.pack_start(widget, True, True, 0)
class EntryView(VBox):
"A component for displaying an entry"
def __init__(self, cfg = None, clipboard = None):
VBox.__init__(self)
self.set_spacing(12)
self.set_border_width(12)
self.config = cfg
self.clipboard = clipboard
self.entry = None
def clear(self, force = False):
"Clears the data view"
self.entry = None
for child in self.get_children():
child.destroy()
def display_entry(self, e):
"Displays info about an entry"
self.clear()
self.entry = e
if self.entry is None:
return
# set up metadata display
metabox = VBox()
self.pack_start(metabox)
label = ImageLabel(
"<span size=\"large\" weight=\"bold\">%s</span>" % util.escape_markup(e.name),
e.icon, ICON_SIZE_DATAVIEW
)
label.set_halign(Gtk.Align.CENTER)
label.set_valign(Gtk.Align.CENTER)
metabox.pack_start(label, True, True, 0)
label = Label("<span weight=\"bold\">%s</span>%s" % (e.typename + (e.description != "" and ": " or ""), util.escape_markup(e.description)), Gtk.Justification.CENTER)
metabox.pack_start(label, True, True, 0)
# set up field list
fields = [field for field in e.fields if field.value != ""]
if len(fields) > 0:
table = Gtk.Grid()
self.pack_start(table)
table.set_column_spacing(10)
table.set_row_spacing(5)
for rowindex, field in zip(range(len(fields)), fields):
label = Label("<span weight=\"bold\">%s: </span>" % util.escape_markup(field.name))
label.set_hexpand(True)
table.attach(label, 0, rowindex, 1, 1)
widget = generate_field_display_widget(field, self.config, self.clipboard)
widget.set_hexpand(True)
table.attach(widget, 1, rowindex, 1, 1)
# notes
label = Label("<span weight=\"bold\">%s</span>%s" % ((e.notes != "" and _("Notes: ") or ""),
util.escape_markup(e.notes)), Gtk.Justification.LEFT)
self.pack_start(label)
# display updatetime
if type(e) != entry.FolderEntry:
label = Label((_('Updated %s ago') + "\n%s") % (util.time_period_rough(e.updated, time.time()), time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(e.updated))), Gtk.Justification.CENTER)
self.pack_start(label)
self.show_all()
def pack_start(self, widget):
"Adds a widget to the data view"
widget.set_halign(Gtk.Align.CENTER)
widget.set_valign(Gtk.Align.CENTER)
VBox.pack_start(self, widget, False, False, 0)
class Searchbar(Toolbar):
"A toolbar for easy searching"
def __init__(self):
Toolbar.__init__(self)
self.entry = Gtk.SearchEntry()
self.entry.set_tooltip_text(_('Text to search for'))
self.dropdown = EntryDropDown()
self.dropdown.insert_item(0, _('Any type'), "help-about")
box = Gtk.Box.new(Gtk.Orientation.HORIZONTAL, 0)
Gtk.StyleContext.add_class(box.get_style_context(), "linked")
self.button_prev = Gtk.Button.new_from_icon_name(STOCK_PREVIOUS,
Gtk.IconSize.BUTTON)
self.button_prev.set_tooltip_text(_('Find the previous match'))
self.button_next = Gtk.Button.new_from_icon_name(STOCK_NEXT,
Gtk.IconSize.BUTTON)
self.button_next.set_tooltip_text(_('Find the next match'))
box.add(self.entry)
box.add(self.button_prev)
box.add(self.button_next)
box.add(self.dropdown)
self.append_widget(box)
self.connect("show", self.__cb_show)
self.entry.connect("changed", self.__cb_entry_changed)
self.entry.connect("key-press-event", self.__cb_key_press)
self.button_next.set_sensitive(False)
self.button_prev.set_sensitive(False)
def __cb_entry_changed(self, widget, data = None):
"Callback for entry changes"
s = self.entry.get_text() != ""
self.button_next.set_sensitive(s)
self.button_prev.set_sensitive(s)
def __cb_key_press(self, widget, data = None):
"Callback for key presses"
# return
if data.keyval == Gdk.KEY_Return and widget.get_text() != "":
if (data.state & Gdk.ModifierType.SHIFT_MASK) == Gdk.ModifierType.SHIFT_MASK:
self.button_prev.activate()
else:
self.button_next.activate()
return True
def __cb_show(self, widget, data = None):
"Callback for widget display"
self.entry.select_region(0, -1)
self.entry.grab_focus()
| gpl-2.0 | 1,417,248,840,331,954,200 | 30.297526 | 239 | 0.599339 | false |
conklinbd/MovementAnalysis | TemplateInstall/PortalDeploy/arcrest/ags/featureservice.py | 1 | 15911 | """
Contains information regarding an ArcGIS Server Feature Server
"""
from re import search
from .._abstract.abstract import BaseAGSServer, BaseSecurityHandler
from ..security import security
import layer
import json
from ..common.geometry import SpatialReference
from ..common.general import FeatureSet
from ..common.filters import LayerDefinitionFilter, GeometryFilter, TimeFilter
########################################################################
class FeatureService(BaseAGSServer):
""" contains information about a feature service """
_url = None
_currentVersion = None
_serviceDescription = None
_hasVersionedData = None
_supportsDisconnectedEditing = None
_hasStaticData = None
_maxRecordCount = None
_supportedQueryFormats = None
_capabilities = None
_description = None
_copyrightText = None
_spatialReference = None
_initialExtent = None
_fullExtent = None
_allowGeometryUpdates = None
_units = None
_syncEnabled = None
_syncCapabilities = None
_editorTrackingInfo = None
_documentInfo = None
_layers = None
_tables = None
_enableZDefaults = None
_zDefault = None
_proxy_url = None
_proxy_port = None
_securityHandler = None
_json = None
_json_dict = None
#----------------------------------------------------------------------
def __init__(self, url, securityHandler=None,
initialize=False, proxy_url=None, proxy_port=None):
"""Constructor"""
self._proxy_url = proxy_url
self._proxy_port = proxy_port
self._url = url
if securityHandler is not None:
self._securityHandler = securityHandler
elif securityHandler is None:
pass
else:
raise AttributeError("Invalid Security Handler")
if not securityHandler is None and \
hasattr(securityHandler, 'referer_url'):
self._referer_url = securityHandler.referer_url
if initialize:
self.__init()
#----------------------------------------------------------------------
def __init(self):
""" loads the data into the class """
params = {"f": "json"}
json_dict = self._do_get(self._url, params,
securityHandler=self._securityHandler,
proxy_port=self._proxy_port,
proxy_url=self._proxy_url)
self._json_dict = json_dict
self._json = json.dumps(self._json_dict)
attributes = [attr for attr in dir(self)
if not attr.startswith('__') and \
not attr.startswith('_')]
for k,v in json_dict.iteritems():
if k in attributes:
setattr(self, "_"+ k, v)
else:
print k, " - attribute not implemented for Feature Service."
#----------------------------------------------------------------------
@property
def administration(self):
"""returns the service admin object (if accessible)"""
from ..manageags._services import AGSService
url = self._url
res = search("/rest/", url).span()
addText = "/admin/"
part1 = url[:res[1]].lower().replace('/rest/', '')
part2 = url[res[1]:].lower().replace('/featureserver', ".mapserver")
adminURL = "%s%s%s" % (part1, addText, part2)
return AGSService(url=adminURL,
securityHandler=self._securityHandler,
proxy_url=self._proxy_url,
proxy_port=self._proxy_port,
initialize=False)
#----------------------------------------------------------------------
@property
def itemInfo(self):
"""gets the item's info"""
params = {"f" : "json"}
url = self._url + "/info/iteminfo"
return self._do_get(url=url, param_dict=params,
securityHandler=self._securityHandler,
proxy_url=self._proxy_url,
proxy_port=self._proxy_port)
#----------------------------------------------------------------------
def downloadThumbnail(self, outPath):
"""downloads the items's thumbnail"""
url = self._url + "/info/thumbnail"
params = {}
return self._download_file(url=url,
save_path=outPath,
securityHandler=self._securityHandler,
file_name=None,
param_dict=params,
proxy_url=self._proxy_url,
proxy_port=self._proxy_port)
#----------------------------------------------------------------------
def downloadMetadataFile(self, outPath):
"""downloads the metadata file to a given path"""
fileName = "metadata.xml"
url = self._url + "/info/metadata"
params = {}
return self._download_file(url=url,
save_path=outPath,
file_name=fileName,
param_dict=params,
securityHandler=self._securityHandler,
proxy_url=self._proxy_url,
proxy_port=self._proxy_port)
#----------------------------------------------------------------------
def __str__(self):
"""returns object as a string"""
if self._json is None:
self.__init()
return self._json
#----------------------------------------------------------------------
def __iter__(self):
"""returns the JSON response in key/value pairs"""
if self._json_dict is None:
self.__init()
for k,v in self._json_dict.iteritems():
yield [k,v]
#----------------------------------------------------------------------
@property
def securityHandler(self):
""" gets the security handler """
return self._securityHandler
#----------------------------------------------------------------------
@securityHandler.setter
def securityHandler(self, value):
""" sets the security handler """
if isinstance(value, BaseSecurityHandler):
if isinstance(value, security.AGSTokenSecurityHandler):
self._securityHandler = value
else:
pass
elif value is None:
self._securityHandler = None
self._token = None
#----------------------------------------------------------------------
@property
def maxRecordCount(self):
"""returns the max record count"""
if self._maxRecordCount is None:
self.__init()
return self._maxRecordCount
#----------------------------------------------------------------------
@property
def supportedQueryFormats(self):
""""""
if self._supportedQueryFormats is None:
self.__init()
return self._supportedQueryFormats
#----------------------------------------------------------------------
@property
def capabilities(self):
""" returns a list of capabilities """
if self._capabilities is None:
self.__init()
return self._capabilities
#----------------------------------------------------------------------
@property
def description(self):
""" returns the service description """
if self._description is None:
self.__init()
return self._description
#----------------------------------------------------------------------
@property
def copyrightText(self):
""" returns the copyright text """
if self._copyrightText is None:
self.__init()
return self._copyrightText
#----------------------------------------------------------------------
@property
def spatialReference(self):
""" returns the spatial reference """
if self._spatialReference is None:
self.__init()
return self._spatialReference
#----------------------------------------------------------------------
@property
def initialExtent(self):
""" returns the initial extent of the feature service """
if self._initialExtent is None:
self.__init()
return self._initialExtent
#----------------------------------------------------------------------
@property
def fullExtent(self):
""" returns the full extent of the feature service """
if self._fullExtent is None:
self.__init()
return self._fullExtent
#----------------------------------------------------------------------
@property
def allowGeometryUpdates(self):
""" informs the user if the data allows geometry updates """
if self._allowGeometryUpdates is None:
self.__init()
return self._allowGeometryUpdates
#----------------------------------------------------------------------
@property
def units(self):
""" returns the measurement unit """
if self._units is None:
self.__init()
return self._units
#----------------------------------------------------------------------
@property
def syncEnabled(self):
""" informs the user if sync of data can be performed """
if self._syncEnabled is None:
self.__init()
return self._syncEnabled
#----------------------------------------------------------------------
@property
def syncCapabilities(self):
""" type of sync that can be performed """
if self._syncCapabilities is None:
self.__init()
return self._syncCapabilities
#----------------------------------------------------------------------
@property
def editorTrackingInfo(self):
""""""
if self._editorTrackingInfo is None:
self.__init()
return self._editorTrackingInfo
#----------------------------------------------------------------------
@property
def documentInfo(self):
""""""
if self._documentInfo is None:
self.__init()
return self._documentInfo
#----------------------------------------------------------------------
@property
def layers(self):
""" gets the layers for the feature service """
if self._layers is None:
self.__init()
self._getLayers()
return self._layers
#----------------------------------------------------------------------
def _getLayers(self):
""" gets layers for the featuer service """
params = {"f": "json"}
json_dict = self._do_get(self._url, params,
securityHandler=self._securityHandler,
proxy_url=self._proxy_url,
proxy_port=self._proxy_port)
self._layers = []
if json_dict.has_key("layers"):
for l in json_dict["layers"]:
self._layers.append(
layer.FeatureLayer(url=self._url + "/%s" % l['id'],
securityHandler=self._securityHandler,
proxy_port=self._proxy_port,
proxy_url=self._proxy_url)
)
#----------------------------------------------------------------------
@property
def tables(self):
"""lists the tables on the feature service"""
if self._tables is None:
self.__init()
return self._tables
#----------------------------------------------------------------------
@property
def enableZDefaults(self):
""""""
if self._enableZDefaults is None:
self.__init()
return self._enableZDefaults
#----------------------------------------------------------------------
@property
def zDefault(self):
""""""
if self._zDefault is None:
self.__init()
return self._zDefault
#----------------------------------------------------------------------
@property
def hasStaticData(self):
""""""
if self._hasStaticData is None:
self.__init()
return self._hasStaticData
#----------------------------------------------------------------------
@property
def currentVersion(self):
""" returns the map service current version """
if self._currentVersion is None:
self.__init()
return self._currentVersion
#----------------------------------------------------------------------
@property
def serviceDescription(self):
""" returns the serviceDescription of the map service """
if self._serviceDescription is None:
self.__init()
return self._serviceDescription
#----------------------------------------------------------------------
@property
def hasVersionedData(self):
""" returns boolean for versioned data """
if self._hasVersionedData is None:
self.__init()
return self._hasVersionedData
#----------------------------------------------------------------------
@property
def supportsDisconnectedEditing(self):
""" returns boolean is disconnecting editted supported """
if self._supportsDisconnectedEditing is None:
self.__init()
return self._supportsDisconnectedEditing
#----------------------------------------------------------------------
def query(self,
layerDefsFilter=None,
geometryFilter=None,
timeFilter=None,
returnGeometry=True,
returnIdsOnly=False,
returnCountOnly=False,
returnZ=False,
returnM=False,
outSR=None
):
"""
The Query operation is performed on a feature service resource
"""
qurl = self._url + "/query"
params = {"f": "json",
"returnGeometry": returnGeometry,
"returnIdsOnly": returnIdsOnly,
"returnCountOnly": returnCountOnly,
"returnZ": returnZ,
"returnM" : returnM}
if not layerDefsFilter is None and \
isinstance(layerDefsFilter, LayerDefinitionFilter):
params['layerDefs'] = layerDefsFilter.filter
if not geometryFilter is None and \
isinstance(geometryFilter, GeometryFilter):
gf = geometryFilter.filter
params['geometryType'] = gf['geometryType']
params['spatialRel'] = gf['spatialRel']
params['geometry'] = gf['geometry']
params['inSR'] = gf['inSR']
if not outSR is None and \
isinstance(outSR, SpatialReference):
params['outSR'] = outSR.asDictionary
if not timeFilter is None and \
isinstance(timeFilter, TimeFilter):
params['time'] = timeFilter.filter
res = self._do_get(url=qurl,
param_dict=params,
securityHandler=self._securityHandler,
proxy_url=self._proxy_url,
proxy_port=self._proxy_port)
if returnIdsOnly == False and returnCountOnly == False:
if isinstance(res, str):
jd = json.loads(res)
return [FeatureSet.fromJSON(json.dumps(lyr)) for lyr in jd['layers']]
elif isinstance(res, dict):
return [FeatureSet.fromJSON(json.dumps(lyr)) for lyr in res['layers']]
else:
return res
return res
| apache-2.0 | -8,352,142,267,933,846,000 | 39.281013 | 86 | 0.452831 | false |
wbonnet/lffs | toolkit/dft/build_firmware_update.py | 1 | 8151 | #
# The contents of this file are subject to the Apache 2.0 license you may not
# use this file except in compliance with the License.
#
# Software distributed under the License is distributed on an "AS IS" basis,
# WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
# for the specific language governing rights and limitations under the
# License.
#
#
# Copyright 2016 DFT project (http://www.debianfirmwaretoolkit.org).
# All rights reserved. Use is subject to license terms.
#
# Debian Firmware Toolkit is the new name of Linux Firmware From Scratch
# Copyright 2014 LFFS project (http://www.linuxfirmwarefromscratch.org).
#
#
# Contributors list :
#
# William Bonnet [email protected], [email protected]
#
#
""" This modules implements the functionnalities used to create the initramfs in charge of
setting up the firmware in memory at system boot.
"""
import logging
import os
import tarfile
from dft.cli_command import CliCommand
from dft.enumkey import Key
#
# Class BuildFirmwareUpdate
#
class BuildFirmwareUpdate(CliCommand):
"""This class implements method needed to create the archives containing
firmware update, and all the scripts needed at deployment.
"""
# -------------------------------------------------------------------------
#
# __init__
#
# -------------------------------------------------------------------------
def __init__(self, dft, project):
"""Default constructor
"""
# Initialize ancestor
CliCommand.__init__(self, dft, project)
# -------------------------------------------------------------------------
#
# build_update_archive
#
# -------------------------------------------------------------------------
def build_update_archive(self):
"""This method generates the final archive containing the elements of the
firmware. The main steps :
. Creating a manisfest describing the content items (hash value)
. Creat a tar file, containing all the data from the content subirectory
. Create a detached signature using either gnupg or openssl
The two generated files are stored under firmware (same levelas content)
"""
# Check that there is a firmware configuration file first
if self.project.firmware is None:
self.project.logging.critical("The firmware configuration file is not defined in \
project file")
exit(1)
# Check that the target files and directories exists
if not os.path.isdir(self.project.get_firmware_content_directory()):
self.project.logging.critical("The firmware directory does not exist. Did you forget to run \
assemble_firmwarec command before ? Expected directory is " + \
self.project.get_firmware_content_directory())
exit(1)
# Create the tar archive
self.create_main_archive()
# Sign the main archive
self.sign_main_archive()
# And we are done
return
# -------------------------------------------------------------------------
#
# create_main_archive
#
# -------------------------------------------------------------------------
def create_main_archive(self):
"""This method create the manifest of the archive (a file listing all the
files with their checksums). Then it creates the archive to be signed.
All the files are stored under firmware directory. In the en only two
files should be produced. The archive, created by this method, and the
detached signature. Coded in next method.
"""
# Output current task to logs
logging.info("Creating the main archive")
# Creating the manifest
# Creating the archive
dest_archive = self.project.get_firmware_output_directory()
dest_archive += "/" + self.project.firmware[Key.CONFIGURATION.value][Key.FILENAME.value]
# Create the tar itself
tar = tarfile.open(name=dest_archive, mode='w')
# Iterate firmware content directory
for name in os.listdir(self.project.get_firmware_content_directory()):
# And add each and every file
filename = self.project.get_firmware_content_directory() + "/" + name
tar.add(filename, name, recursive=True)
# Let's close the tar to flushit
tar.close()
logging.debug("Archive " + dest_archive + " has been created")
# -------------------------------------------------------------------------
#
# sign_main_archive
#
# -------------------------------------------------------------------------
def sign_main_archive(self):
"""This method does a digital signature of the archive, or a hash (should
not be used). Depending on configuration, it ca use either a hash function
such as sha1sum, or a signature software such as gnupg or openssl.
"""
# Output current task to logs
logging.info("Signing the main archive")
# Check if signature is activated
if Key.SECURITY.value in self.project.firmware:
if Key.SIGNATURE.value in self.project.firmware[Key.SECURITY.value]:
# Retrieve the signature tool to use
signing_tool = self.project.firmware[Key.SECURITY.value][Key.SIGNATURE.value]
# Generate the path to the archive and detached signature file
dest_archive = self.project.get_firmware_output_directory()
dest_archive += "/" + self.project.firmware[Key.CONFIGURATION.value][Key.FILENAME.value]
dest_sign = dest_archive + ".sig"
# Remove any exsting signature
if os.path.isfile(dest_sign):
os.remove(dest_sign)
self.project.logging.info("Existing " + dest_archive + " has been removed")
# Expected values are empty (means deactivated), gpg2 (or gnupg2), or openssl
if len(signing_tool) == 0:
self.project.logging.info("Signature is not activated in the security section of the \
firmware definition file")
# Are we using a known tool
elif signing_tool not in [Key.GPG.value, Key.GPG2.value, Key.OPENSSL.value]:
self.project.logging.critical("Unknown signing tool : " + signing_tool)
self.project.logging.critical("Valid values are gpg, gpg2, openssl or empty string to \
deactivate signature")
exit(1)
# Signing tool is valid, now let's generate the command to do it
# First case, are we using GnuPG 1 or 2
if signing_tool == Key.GPG.value or signing_tool == Key.GPG2.value:
# Now let's prepare the signing command
command = signing_tool
# Are we using armor format export ?
if Key.GPG_ARMOR_SIGNATURE.value in self.project.firmware[Key.SECURITY.value] and \
self.project.firmware[Key.SECURITY.value][Key.GPG_ARMOR_SIGNATURE.value]:
# Yes, let's append --armor to the command
command += " --armor"
command += " --output " + dest_sign + " --detach-sig " + dest_archive
self.execute_command(command)
self.project.logging.info(dest_archive + " has been created and signed successfully")
# Update archive has been signed, let's verify signature before finishing
command = signing_tool + " --verify " + dest_sign + " " + dest_archive
self.execute_command(command)
#TODO : add test case
self.project.logging.info(dest_sign + " has been verfied successfully")
# Or is it OpenSSL ?
elif signing_tool == Key.OPENSSL.value:
# TODO OpenSSL support
self.project.logging.critical("OpenSSL is not yet supported for firmware signature")
self.project.logging.critical("Please use GnuPG until support is available")
exit(1)
else:
self.project.logging.info("Signature is not activated in the security section of the \
firmware definition file")
else:
self.project.logging.error("The firmware definition file does not include a security section")
self.project.logging.error("Unable to create signature file. You should add security.")
| apache-2.0 | 8,113,699,216,175,973,000 | 38 | 100 | 0.617961 | false |
freesmartphone/framework | framework/subsystems/testing/testing.py | 1 | 4159 | #!/usr/bin/env python
"""
Dummy Subsystem for Testing Purposes
(C) 2008-2009 Michael 'Mickey' Lauer <[email protected]>
(C) 2008 Openmoko, Inc.
GPLv2 or later
Package: testing
Module: testing
"""
MODULE_NAME = "testing"
__version__ = "0.0.0"
from framework import resource
import dbus
import dbus.service
import gobject
import logging
logger = logging.getLogger( MODULE_NAME )
import time
DBUS_INTERFACE = "org.freesmartphone.Testing"
DBUS_OBJECT_PATH = "/org/freesmartphone/Testing"
#============================================================================#
class Resource( resource.Resource ):
#============================================================================#
def __init__( self, bus ):
self.path = DBUS_OBJECT_PATH
self.bus = bus
self.virgin = True
dbus.service.Object.__init__( self, bus, self.path )
resource.Resource.__init__( self, bus, "TEST" )
logger.info("%s %s at %s initialized.", self.__class__.__name__, __version__, self.path )
# default behaviour: everything works
self.catmap = { "enabling":"ok",
"disabling":"ok",
"suspending":"ok",
"resuming":"ok" }
#
# framework.Resource
#
def _enable( self, on_ok, on_error ):
logger.info( "enabling" )
time.sleep( 5.0 )
self._doit( "enabling", on_ok, on_error )
def _disable( self, on_ok, on_error ):
logger.info( "disabling" )
if self.virgin == True:
self.virgin = False
else:
time.sleep( 5.0 )
self._doit( "disabling", on_ok, on_error )
def _suspend( self, on_ok, on_error ):
logger.info( "suspending" )
time.sleep( 5.0 )
self._doit( "suspending", on_ok, on_error )
def _resume( self, on_ok, on_error ):
logger.info("resuming")
time.sleep( 5.0 )
self._doit( "resuming", on_ok, on_error )
def _doit( self, category, on_ok, on_error ):
action = self.catmap[ category ]
if action == "ok":
on_ok()
elif action == "error":
on_error( "unspecified" )
elif action == "veto":
on_error( resource.SuspendVeto( "not allowed to suspend this resource" ) )
else:
foobar
#
# dbus interface
#
@dbus.service.method( DBUS_INTERFACE, "", "",
async_callbacks=( "dbus_ok", "dbus_error" ) )
@resource.checkedmethod
def SetResourceBehaviour( self, category, behaviour, dbus_ok, dbus_error ):
try:
value = self.catmap[category]
except KeyError:
dbus_error( "unknown category, valid categories are: %s" % self.catmap.keys() )
else:
if behaviour not in "ok error veto".split():
dbus_error( "unknown behaviour. valid behaviours are: ok error veto" )
self.catmap[category] = str( behaviour )
dbus_ok()
@dbus.service.method( DBUS_INTERFACE, "", "aa{sv}",
async_callbacks=( "dbus_ok", "dbus_error" ) )
@resource.checkedmethod
def ReturnTest( self, dbus_ok, dbus_error ):
d = {"foo":"bar"}
dbus_ok( [d,d] )
@dbus.service.method( DBUS_INTERFACE, "", "",
async_callbacks=( "dbus_ok", "dbus_error" ) )
@resource.checkedmethod
def SignalTest( self, dbus_ok, dbus_error ):
self.Test( dict(yo="kurt") )
dbus_ok()
@dbus.service.signal( DBUS_INTERFACE, "a{sv}" )
def Test( self, asv ):
logger.info( "emitting signal" )
#============================================================================#
def factory(prefix, controller):
#============================================================================#
"""This is the magic function that will be called by the framework module manager"""
return [ Resource( controller.bus ) ]
#============================================================================#
if __name__ == "__main__":
#============================================================================#
pass
| gpl-2.0 | -4,311,006,182,056,977,400 | 31.492188 | 97 | 0.498678 | false |
ben-e-whitney/the-points-chart | utilities/views.py | 1 | 2100 | from django.shortcuts import render
import decimal
class TableElement:
"""
Represent an individual cell of an HTML table.
"""
def __init__(self, title=None, CSS_classes=None, content=None):
self.title = title
self.CSS_classes = CSS_classes
self.content = content
class TableParent(TableElement):
"""
Represent a collection of table elements.
The table elements could be TableElements or TableParents. Display is left
up to the template.
"""
def __init__(self, **kwargs):
self.children = kwargs.pop('children')
super().__init__(**kwargs)
def format_balance(balance=None, load=None,
endpoints=(-float('inf'), -0.35, -0.15, 0.15, 0.35, float('inf')),
possible_CSS_classes=('very_low_balance', 'low_balance', 'OK_balance',
'high_balance', 'very_high_balance')):
"""
Format the balance with styling according to the balance:load ratio.
"""
if (len(endpoints) != 1+len(possible_CSS_classes)):
raise ValueError
def sign_int(balance):
"""
Return input with a sign character prepended.
"""
balance = int(balance.to_integral_value())
if balance >= 0:
return '+{bal}'.format(bal=balance)
else:
#Note that '−' is Unicode character U+2212, not a hyphen.
return '−{bal}'.format(bal=abs(balance))
try:
ratio = balance/load
except decimal.DivisionByZero:
ratio = endpoints[-1]+1 if balance >= 0 else endpoints[0]-1
except decimal.InvalidOperation:
ratio = 0
for i, CSS_class in enumerate(possible_CSS_classes):
if endpoints[i] <= ratio < endpoints[i+1]:
# We will use the value of `CSS_class`. If we never make it to this
# block, `CSS_class` will end up `CSS_classes[-1]`.
break
return {
'value': float(balance),
'formatted_value': sign_int(balance),
'html_title': 'Exact value: {val}'.format(val=balance),
'CSS_class': ' '.join(('balance', CSS_class)),
}
| gpl-3.0 | 7,983,015,587,877,793,000 | 30.757576 | 79 | 0.594943 | false |
OpenBeta/beta | apiserver/model.py | 1 | 9792 | from flask_sqlalchemy import SQLAlchemy
from sqlalchemy.dialects import postgresql
from geoalchemy2 import Geometry
from sqlalchemy import func, ForeignKey, PrimaryKeyConstraint, event, Sequence
from sqlalchemy.schema import DropTable
from sqlalchemy.ext.compiler import compiles
import flask_login
from datetime import datetime
import json
import collections
from key_helper import *
db = SQLAlchemy()
FeatureSet = collections.namedtuple('FeatureSet', 'route, boundary', verbose=True)
class Route(db.Model):
__tablename__ = 'routes'
id = db.Column(db.Integer, primary_key=True)
geo = db.Column(Geometry(geometry_type='POINT', srid=4326), unique=True)
name = db.Column(db.Text, index=True)
grade = db.Column(db.Text)
grade_type = db.Column(db.Text, ForeignKey('grade_types.id'))
properties_json = db.Column(postgresql.JSONB)
def __init__(self, geojson):
self.geo = func.ST_SetSRID(func.ST_GeomFromGeoJSON(json.dumps(geojson['geometry'])), 4326)
self.name = geojson['properties']['name']
if 'grade' in geojson['properties']:
grade = geojson['properties']['grade']
self.grade = grade['value']
self.grade_type = grade['type']
else:
self.grade = ''
self.type = 'unknown'
self.properties_json = geojson['properties'] # store raw data
def __repr__(self):
return '<Route %r>' % self.name
def to_json(self):
return {
"type": "Feature",
"id": "route/{}".format(self.id),
"geometry": json.loads(db.session.scalar(func.ST_AsGeoJSON(self.geo))),
"properties": self.properties_json
}
def __eq__(self, other):
"""Override the default Equals behavior"""
if isinstance(other, self.__class__):
lhs = json.loads(db.session.scalar(func.ST_AsGeoJSON(self.geo)))
rhs = json.loads(db.session.scalar(func.ST_AsGeoJSON(other.geo)))
return lhs == rhs
return NotImplemented
def __ne__(self, other):
"""Define a non-equality test"""
return not self.__eq__(other)
def __hash__(self):
"""Override the default hash behavior (that returns the id or the object)"""
return hash(self.geo)
class GradeType(db.Model):
__tablename__ = 'grade_types'
id = db.Column(db.Text, primary_key=True, unique=True)
full_name = db.Column(db.Text)
def __init__(self, id, full_name):
self.id = id
self.full_name = full_name
@event.listens_for(GradeType.__table__, 'after_create')
def insert_initial_values(*args, **kwargs):
db.session.add(GradeType(id='unknown', full_name='Type Unknown'))
db.session.add(GradeType(id='yds', full_name='Yosemite Decimal System'))
db.session.add(GradeType(id='v', full_name='Hueco V-scale'))
db.session.commit()
event.listen(GradeType.__table__, 'after_create', insert_initial_values)
class GradeDetail(db.Model):
__tablename__ = 'grade_details'
id = db.Column(db.Text, ForeignKey('grade_types.id'))
value = db.Column(db.Text)
weight = db.Column(db.Integer)
__table_args__ = (PrimaryKeyConstraint(id, weight),)
class Boundary(db.Model):
__tablename__ = 'boundaries'
BOUNDARY_ID_SEQ = Sequence('boundary_id_seq', metadata=db.Model.metadata) # define sequence explicitly
boundary_id = db.Column(db.Integer, primary_key=True, server_default=BOUNDARY_ID_SEQ.next_value())
name = db.Column(db.Text, index=True)
is_top_level = db.Column(db.Boolean)
geo = db.Column(Geometry(geometry_type='POLYGON', srid=4326), unique=True)
properties_json = db.Column(postgresql.JSONB)
sys_period = db.Column(postgresql.TSTZRANGE, nullable=False)
def __init__(self, geojson):
props = geojson['properties']
self.name = props.get('name')
self.is_top_level = props.get('topLevel', False)
self.geo = func.ST_SetSRID(func.ST_GeomFromGeoJSON(json.dumps(geojson['geometry'])), 4326)
self.properties_json = props
def to_json(self):
return {
"type": "Feature",
"id": "area/{}".format(self.boundary_id),
"geometry": json.loads(db.session.scalar(func.ST_AsGeoJSON(self.geo))),
"properties": self.properties_json
}
class BoundaryHistory(db.Model):
__tablename__ = 'boundaries_history'
history_id = db.Column(db.Integer, primary_key=True, autoincrement=True)
boundary_id =db.Column(db.Integer)
name = db.Column(db.Text)
is_top_level = db.Column(db.Boolean)
geo = db.Column(Geometry(geometry_type='POLYGON', srid=4326))
properties_json = db.Column(postgresql.JSONB)
sys_period = db.Column(postgresql.TSTZRANGE, nullable=False)
class APIUser(db.Model, flask_login.UserMixin):
__tablename__ = 'api_users'
uid = db.Column(db.Integer, primary_key=True, autoincrement=True)
email = db.Column(db.Text, primary_key=True, unique=True)
api_key = db.Column(db.Text, primary_key=True, unique=True)
active = db.Column(db.Boolean)
created_ts = db.Column(db.DateTime(timezone=True))
mod_ts = db.Column(db.DateTime(timezone=True))
def __init__(self, **kwargs):
self.active = kwargs['active']
self.email = kwargs['email']
now = datetime.utcnow()
self.created_ts = now
self.mpd_ts = now
self.api_key = genkey(userKeySigner)
@property
def is_active(self):
return self.is_active
@property
def is_authenticated(self):
return True
@property
def apikey(self):
return self.api_key
class AuditLog(db.Model):
__tablename__ = 'audit_log'
id = db.Column(db.Integer, primary_key=True, autoincrement=True)
op = db.Column(db.CHAR)
row_id = db.Column(db.Integer)
table_name = db.Column(db.VARCHAR(50))
user_id = db.Column(db.VARCHAR(30), nullable=False)
ip = db.Column(postgresql.INET)
ts = db.Column(db.DateTime(timezone=True))
def get_boundary_by_id(boundary_id):
row = db.session.query(Boundary).filter(Boundary.boundary_id == boundary_id).first()
if row is None:
return None
return row.to_json()
def search_within_boundary_by_id(boundary_id):
rows = db.session.query(Route, Boundary)\
.filter("ST_WITHIN(routes.geo, boundaries.geo)")\
.filter("boundaries.boundary_id=:id")\
.params(id=boundary_id).all()
return {
"type": "FeatureCollection",
"features": map(lambda item: item.to_json(), rows)
}
def search_within_radius_in_meters(location, radius, route=True, boundary=False):
coordinates = location.split(",")
route_rows = list()
boundary_rows = list()
if route:
route_rows = db.session.query(Route).\
filter('ST_DistanceSphere(geo, ST_MakePoint(:lng,:lat))<=:r').\
params(lng=coordinates[0], lat=coordinates[1], r=radius).all()
if boundary:
boundary_rows = db.session.query(Boundary).\
filter('ST_DistanceSphere(geo, ST_MakePoint(:lng,:lat))<=:r').\
params(lng=coordinates[0], lat=coordinates[1], r=radius).all()
route_json = {
"type": "FeatureCollection",
"features": map(lambda item: item.to_json(), route_rows)
}
boundary_json = {
"type": "FeatureCollection",
"features": map(lambda item: item.to_json(), boundary_rows)
}
return FeatureSet(route=route_json, boundary=boundary_json)
def recent_activities(count, route=True, boundary=False):
hard_limit = 10;
route_rows = list()
boundary_rows = list()
if count > hard_limit:
count = hard_limit
if route:
route_rows = db.session.query(Route).\
order_by(Route.id.desc()).\
limit(count);
if boundary:
boundary_rows = db.session.query(Boundary).\
order_by(Boundary.boundary_id.desc()).\
limit(count);
route_json = {
"type": "FeatureCollection",
"features": map(lambda item: item.to_json(), route_rows)
}
boundary_json = {
"type": "FeatureCollection",
"features": map(lambda item: item.to_json(), boundary_rows)
}
return FeatureSet(route=route_json, boundary=boundary_json)
def setup_temporal_tables():
sql = ("CREATE TRIGGER boundary_history BEFORE INSERT OR UPDATE OR DELETE ON Boundaries "
"FOR EACH ROW EXECUTE PROCEDURE versioning('sys_period', 'boundaries_history', true)")
db.session.execute(sql)
sql = ("create or replace function trxn_history() returns trigger as $$ "
"BEGIN"
" IF (TG_OP = 'DELETE') THEN"
" INSERT INTO audit_log (op, row_id, table_name, user_id, ts) "
" VALUES('D', OLD.boundary_id, TG_TABLE_NAME, current_setting('vars.edited_by'),now());"
" ELSEIF (TG_OP='UPDATE') THEN"
" INSERT INTO audit_log (op, row_id, table_name, user_id, ts) "
" VALUES('U', OLD.boundary_id, TG_TABLE_NAME, NEW.properties_json->>'editedBy', now());"
" ELSEIF (TG_OP='INSERT') THEN"
" INSERT INTO audit_log (op, row_id, table_name, user_id, ts) "
" VALUES('I', NEW.boundary_id, TG_TABLE_NAME, NEW.properties_json->>'editedBy', now());"
" END IF;"
" RETURN null;"
"END;"
"$$ language plpgsql;")
db.session.execute(sql)
sql = ("CREATE TRIGGER audit AFTER INSERT OR UPDATE OR DELETE ON boundaries "
"FOR EACH ROW EXECUTE procedure trxn_history();")
db.session.execute(sql)
db.session.commit()
@compiles(DropTable, "postgresql")
def _compile_drop_table(element, compiler, **kwargs):
return compiler.visit_drop_table(element) + " CASCADE"
| gpl-3.0 | -8,655,785,475,700,712,000 | 33.478873 | 107 | 0.625613 | false |
SINGROUP/pycp2k | pycp2k/classes/_each304.py | 1 | 1114 | from pycp2k.inputsection import InputSection
class _each304(InputSection):
def __init__(self):
InputSection.__init__(self)
self.Just_energy = None
self.Powell_opt = None
self.Qs_scf = None
self.Xas_scf = None
self.Md = None
self.Pint = None
self.Metadynamics = None
self.Geo_opt = None
self.Rot_opt = None
self.Cell_opt = None
self.Band = None
self.Ep_lin_solver = None
self.Spline_find_coeffs = None
self.Replica_eval = None
self.Bsse = None
self.Shell_opt = None
self.Tddft_scf = None
self._name = "EACH"
self._keywords = {'Bsse': 'BSSE', 'Cell_opt': 'CELL_OPT', 'Just_energy': 'JUST_ENERGY', 'Band': 'BAND', 'Xas_scf': 'XAS_SCF', 'Rot_opt': 'ROT_OPT', 'Replica_eval': 'REPLICA_EVAL', 'Tddft_scf': 'TDDFT_SCF', 'Shell_opt': 'SHELL_OPT', 'Md': 'MD', 'Pint': 'PINT', 'Metadynamics': 'METADYNAMICS', 'Geo_opt': 'GEO_OPT', 'Spline_find_coeffs': 'SPLINE_FIND_COEFFS', 'Powell_opt': 'POWELL_OPT', 'Qs_scf': 'QS_SCF', 'Ep_lin_solver': 'EP_LIN_SOLVER'}
| lgpl-3.0 | -3,167,931,075,788,382,700 | 41.846154 | 447 | 0.576302 | false |
xia2/xia2 | src/xia2/Wrappers/Dials/EstimateGain.py | 1 | 1399 | from xia2.Driver.DriverFactory import DriverFactory
from xia2.Schema.Interfaces.FrameProcessor import FrameProcessor
def EstimateGain(DriverType=None):
"""A factory for EstimateGainWrapper classes."""
DriverInstance = DriverFactory.Driver(DriverType)
class EstimateGainWrapper(DriverInstance.__class__, FrameProcessor):
def __init__(self):
super().__init__()
self.set_executable("dials.estimate_gain")
self._sweep_filename = None
self._kernel_size = None
self._gain = None
def set_sweep_filename(self, sweep_filename):
self._sweep_filename = sweep_filename
def set_kernel_size(self, kernel_size):
self._kernel_size = kernel_size
def get_gain(self):
return self._gain
def run(self):
self.clear_command_line()
assert self._sweep_filename is not None
self.add_command_line(self._sweep_filename)
if self._kernel_size is not None:
self.add_command_line("kernel_size=%i,%i" % self._kernel_size)
self.start()
self.close_wait()
self.check_for_errors()
for line in self.get_all_output():
if "Estimated gain:" in line:
self._gain = float(line.split(":")[-1].strip())
return EstimateGainWrapper()
| bsd-3-clause | 5,109,846,030,004,419,000 | 30.795455 | 78 | 0.591851 | false |
yueyongyue/saltshaker | shaker/highstate.py | 1 | 1467 | import os
class HighState(object):
def __init__(self):
if os.path.isfile('/etc/salt/master.d/file_roots.conf') == True:
os.system("mkdir -p /srv/salt")
else:
file_roots = file("/etc/salt/master.d/file_roots.conf", "w+")
add = ["file_roots:\n", " base:\n", " - /srv/salt\n"]
file_roots.writelines(add)
file_roots.close()
def list_sls(self, dir):
all_sls = {}
list_filename = os.listdir(dir)
for filename in list_filename:
print filename.split('.')
if os.path.isfile("/srv/salt/"+filename):
content = open(dir+filename).readlines()
name = filename.split('.')[0]
dic_sls = {name: content}
all_sls.update(dic_sls)
return all_sls
def add_sls(self, filename, content):
files = file("/srv/salt/"+filename+".sls", "w")
files.writelines(content)
files.close()
def del_sls(self, filename):
path = r"/srv/salt/" + filename + ".sls"
if os.path.exists(path):
os.remove(path)
else:
return "file not exit"
def main():
highstate = HighState()
a = highstate.list_sls("/srv/salt/")
#b = ['dfgdfgfgfdg\n',' fgfgfdgfgfgfg\n']
#a = highstate.add_sls("tomcat", b)
#print a
#filename = "test"
#a = highstate.del_sls(filename)
if __name__ == '__main__':
main()
| apache-2.0 | 7,973,569,440,805,450,000 | 28.34 | 73 | 0.521472 | false |
mkollaro/destroystack | destroystack/tools/server_manager.py | 1 | 7151 | # Copyright (c) 2013 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import destroystack.tools.state_restoration.metaopenstack as metaopenstack
import destroystack.tools.state_restoration.vagrant as vagrant
import destroystack.tools.state_restoration.manual as manual_restoration
import destroystack.tools.common as common
import destroystack.tools.servers as server_tools
# Possible roles that a server can have, depending what services are installed
# on it. It can have more than one role.
ROLES = set(['keystone', 'swift_proxy', 'swift_data', 'controller', 'compute',
'glance', 'cinder', 'neutron'])
MANAGEMENT_TYPES = ['none', 'manual', 'metaopenstack']
LOG = logging.getLogger(__name__)
class Singleton(object):
_instance = None
def __new__(cls, *args, **kwargs):
if not cls._instance:
cls._instance = super(Singleton, cls).__new__(cls, *args, **kwargs)
return cls._instance
class ServerManager(Singleton):
def __init__(self):
self._servers = server_tools.create_servers(common.CONFIG['servers'])
self._workaround_single_swift_disk()
def servers(self, role=None, roles=None):
"""Generator that gets a server by its parameters.
If no parameters are given, it will just return any of them.
:param role: get a server that has this role, choose from `ROLES`
:param roles: get a server that has all of these roles, see param
`role`
"""
if role:
assert role in ROLES
assert not roles # cannot use both
if roles:
roles = set(roles)
assert roles.issubset(ROLES)
for server in self._servers:
if not role and not roles:
# no conditions, return any
yield server
elif role in server.roles \
or (roles and roles.issubset(server.roles)):
yield server
def get(self, role=None, roles=None):
"""Get the first server that matches the parameters.
For more info, look at the `ServerManager.servers() generator - it uses
the same parameters.
:returns: the server in question or None
"""
try:
return self.servers(role, roles).next()
except StopIteration:
return None
def get_all(self, role=None, roles=None):
"""Same as `get`, but returns a list of all the matching servers."""
return list(self.servers(role, roles))
def save_state(self, tag=''):
"""Create a snapshot of all the servers
Depending on what is in the configuration in "management.type":
* manual - Just create some backup of the files and maybe
databases. Unsupported and not recommended.
* none - Do nothing
* metaopenstack - Create a snapshot of all the servers
If it's being created, the name of the snapshots (if created) will be
"config.management.snapshot_prefix" + name of the VM + tag, where the
prefix is "destroystack-snapshot" by default. The VMs have to have
unique names (at least among each other) and snapshots/images with that
name cannot already exist.
:param tag: will be appended to the name of the snapshots
"""
self._choose_state_restoration_action('save', tag)
def load_state(self, tag=''):
"""Restore all the servers from their snapshots.
For more information, see the function ``save``.
Depending on what is in the configuration in "management.type":
* manual - Restore backups, mount disks that got umounted, start up
services again. Unsupported, might not work - it's just a best
effort.
* none - Do nothing
* metaopenstack - Rebuild the VMs with the snapshot images, which
are going to be found by the name as described in the `save`
function.
"""
self._choose_state_restoration_action('load', tag)
self.connect()
# workaround for the fact that the extra disk might not get snapshotted
self._restore_swift_disks()
def connect(self):
"""Create ssh connections to all the servers.
Will re-create them if called a second time.
"""
for server in self._servers:
server.connect()
def disconnect(self):
for server in self._servers:
server.disconnect()
def _choose_state_restoration_action(self, action, tag):
"""Choose which function to use, based on "management.type" in config.
:param action: save or load
"""
assert action in ['save', 'load']
man_type = common.CONFIG['management']['type']
if man_type == 'metaopenstack':
if action == 'save':
metaopenstack.create_snapshots(tag)
else:
metaopenstack.restore_snapshots(tag)
elif man_type == 'vagrant':
if action == 'save':
vagrant.create_snapshots(tag)
else:
vagrant.restore_snapshots(tag)
elif man_type == 'manual':
if action == 'save':
manual_restoration.create_backup(self)
else:
manual_restoration.restore_backup(self)
elif man_type == 'none':
LOG.info("State save and restoration has been turned off")
else:
raise Exception("This type of server management, '%s', is not"
"supported, choose among: %s"
% (man_type, MANAGEMENT_TYPES))
def _restore_swift_disks(self):
"""These disks might not have been snapshotted.
Since the extra disk is currently maybe not being snapshotted (it is
just some ephemeral storage or cinder volume), format them and restore
their flags.
Additionally, if the user provided only one disk, we create 3
partitions on it and use them as "disks" to simplify things for the
user.
"""
data_servers = list(self.servers(role='swift_data'))
server_tools.prepare_swift_disks(data_servers)
for server in data_servers:
for disk in server.disks:
server.restore_disk(disk)
def _workaround_single_swift_disk(self):
for server in list(self.servers(role='swift_data')):
if len(server.disks) == 1:
disk = server.disks[0]
server.disks = [disk + "1", disk + "2", disk + "3"]
| apache-2.0 | 3,963,596,485,430,058,500 | 37.240642 | 79 | 0.61418 | false |
Som-Energia/invoice-janitor | invoicing/f1fixing/import_error/models.py | 1 | 24323 | # -*- coding: utf-8 -*-
from lxml import etree, objectify
import base64
import re
import os
from datetime import datetime
import dateutil.parser
import xmlformatter
## Codis OCSUM - F1
# Codi periode
codigoPeriodo_to_P = {
1:'P1', 03:'P2',10:'P1',21:'P1',22:'P2',31:'P1',32:'P2',33:'P3',41:'P1',
42:'P2',43:'P3',51:'P1',52:'P2',53:'P3',61:'P1',62:'P2',63:'P3',64:'P4',
65:'P5',66:'P6',71:'P1',72:'P2',73:'P3',74:'P4',75:'P5',76:'P6',77:'P7'
}
# Codi origen de lectura
codigoOrigen_to_O = {
'10': 'Telemedida',
'11': 'Telemedida corregida',
'20': 'TPL',
'21': 'TPL corregida',
'30': 'Visual',
'31': 'Visual corregida',
'40': 'Estimada',
'50': 'Autolectura',
'99': 'Sin Lectura'
}
O_to_codigoOrigen =\
{
'Telemedida':1,
'Telemedida corregida':2,
'TPL':3,
'TPL corregida':4,
'Visual':5,
'Visual corregida':6,
'Estimada':7,
'Autolectura':8,
'Sin Lectura':9,
'Sense Lectura':9
}
class OpenObject(object):
O = None
def __init__(self, O):
self.O = O
class F1(object):
root = None
raw = None
def __init__(self, xml=None, filename=None):
if not xml and not filename:
raise
if filename:
with open(filename) as f:
xml = f.read()
self.root = objectify.fromstring(xml)
@property
def raw(self):
objectify.deannotate(self.root, xsi_nil=True)
etree.cleanup_namespaces(self.root)
return etree.tostring(self.root,
encoding="ISO-8859-1",
xml_declaration=True)
def dump(self, filename):
formatter = xmlformatter.Formatter(indent="1",
indent_char="\t",
encoding_output="ISO-8859-1",
preserve=["literal"])
raw = formatter.format_string(self.raw)
with open(filename, "w") as f:
f.write(raw)
def update_xml_value(self, comptador, data, periode, tipus, attribute, value):
if attribute not in ['FechaHora', 'Procedencia', 'Lectura']:
raise Exception('Attribute not supported')
root = self.root
if not hasattr(root, 'Facturas'):
raise Exception('F1 format failed')
Facturas = root.Facturas
if not hasattr(Facturas, 'FacturaATR'):
raise Exception('F1 format failed')
FacturaATR = Facturas.FacturaATR
if not hasattr(FacturaATR, '__iter__'):
FacturaATR = [FacturaATR]
for FacturaATR_ in FacturaATR:
if not hasattr(FacturaATR_, 'Medidas'):
raise Exception('F1 format failed')
Medidas = FacturaATR_.Medidas
if not hasattr(Medidas, '__iter__'):
Medidas = [Medidas]
for Medidas_ in Medidas:
if not hasattr(Medidas_, 'Aparato'):
raise Exception('F1 format failed')
Aparato = Medidas_.Aparato
if not hasattr(Aparato, '__iter__'):
Aparato = [Aparato]
for Aparato_ in Aparato:
if not hasattr(Aparato_, 'NumeroSerie'):
raise Exception('F1 format failed')
try:
if not ((int(Aparato_.NumeroSerie) == int(comptador)) or
(int(Aparato_.NumeroSerie) == int(comptador))):
continue
except Exception, e:
continue
if not hasattr(Aparato_,'Integrador'):
raise Exception('F1 format failed')
Integrador = Aparato_.Integrador
if not hasattr(Integrador, '__iter__'):
Integrador = [Integrador]
for Integrador_ in Integrador:
if not hasattr(Integrador_,'Magnitud'):
raise Exception('F1 format failed')
if (tipus == 'A') and not (str(Integrador_.Magnitud) == 'AE'):
continue
if (tipus == 'R') and not (str(Integrador_.Magnitud).startswith('R')):
continue
if not Integrador_.CodigoPeriodo:
continue
if codigoPeriodo_to_P[Integrador_.CodigoPeriodo] == periode:
if not hasattr(Integrador_, 'LecturaDesde'):
raise Exception('F1 format failed')
if not hasattr(Integrador_, 'LecturaHasta'):
raise Exception('F1 format failed')
if dateutil.parser.parse(str(Integrador_.LecturaDesde.FechaHora)) == dateutil.parser.parse(data):
setattr(Integrador_.LecturaDesde, attribute, value)
elif dateutil.parser.parse(str(Integrador_.LecturaHasta.FechaHora)) == dateutil.parser.parse(data):
setattr(Integrador_.LecturaHasta, attribute, value)
def get_xml_value(self, comptador, data, periode, tipus, attribute):
if attribute not in ['FechaHora', 'Procedencia', 'Lectura']:
raise Exception('Attribute not supported')
root = self.root
if not hasattr(root, 'Facturas'):
raise Exception('F1 format failed')
Facturas = root.Facturas
if not hasattr(Facturas, 'FacturaATR'):
raise Exception('F1 format failed')
FacturaATR = Facturas.FacturaATR
if not hasattr(FacturaATR, '__iter__'):
FacturaATR = [FacturaATR]
for FacturaATR_ in FacturaATR:
if not hasattr(FacturaATR_, 'Medidas'):
raise Exception('F1 format failed')
Medidas = FacturaATR_.Medidas
if not hasattr(Medidas, '__iter__'):
Medidas = [Medidas]
for Medidas_ in Medidas:
if not hasattr(Medidas_, 'Aparato'):
raise Exception('F1 format failed')
Aparato = Medidas_.Aparato
if not hasattr(Aparato, '__iter__'):
Aparato = [Aparato]
for Aparato_ in Aparato:
if not hasattr(Aparato_, 'NumeroSerie'):
raise Exception('F1 format failed')
try:
if comptador.isdigit():
if not int(Aparato_.NumeroSerie) == int(comptador):
continue
else:
if not Aparato_.NumeroSerie == comptador:
continue
except Exception, e:
continue
if not hasattr(Aparato_,'Integrador'):
raise Exception('F1 format failed')
Integrador = Aparato_.Integrador
if not hasattr(Integrador, '__iter__'):
Integrador = [Integrador]
for Integrador_ in Integrador:
if not hasattr(Integrador_,'Magnitud'):
raise Exception('F1 format failed')
if (tipus == 'A') and not (str(Integrador_.Magnitud) == 'AE'):
continue
if (tipus == 'R') and not (str(Integrador_.Magnitud).startswith('R')):
continue
if not Integrador_.CodigoPeriodo:
continue
if codigoPeriodo_to_P[Integrador_.CodigoPeriodo] == periode:
if not hasattr(Integrador_, 'LecturaDesde'):
raise Exception('F1 format failed')
if not hasattr(Integrador_, 'LecturaHasta'):
raise Exception('F1 format failed')
if dateutil.parser.parse(str(Integrador_.LecturaDesde.FechaHora)) == dateutil.parser.parse(data):
return getattr(Integrador_.LecturaDesde, attribute)
elif dateutil.parser.parse(str(Integrador_.LecturaHasta.FechaHora)) == dateutil.parser.parse(data):
return getattr(Integrador_.LecturaDesde, attribute)
raise Exception('F1 error')
def is_abonadora(self):
Facturas = self.root.Facturas
if not hasattr(Facturas, 'FacturaATR'):
raise Exception('F1 format failed')
FacturaATR = Facturas.FacturaATR
if not hasattr(FacturaATR, '__iter__'):
FacturaATR = [FacturaATR]
return FacturaATR.DatosGeneralesFacturaATR.DatosGeneralesFactura.IndicativoFacturaRectificadora in ['A', 'B']
def is_rectificadora(self):
Facturas = self.root.Facturas
if not hasattr(Facturas, 'FacturaATR'):
raise Exception('F1 format failed')
FacturaATR = Facturas.FacturaATR
if not hasattr(FacturaATR, '__iter__'):
FacturaATR = [FacturaATR]
return FacturaATR.DatosGeneralesFacturaATR.DatosGeneralesFactura.IndicativoFacturaRectificadora == 'R'
class LectPool(OpenObject):
def __init__(self,O):
super(LectPool, self).__init__(O)
class Comptador(OpenObject):
id = None
def __init__(self, O, id):
super(Comptador,self).__init__(O)
self.id = id
class Polissa(OpenObject):
id = None
def __init__(self, O, id):
super(Polissa,self).__init__(O)
self.id = id
fields_to_read = ['name', 'cups', 'tarifa', 'state', 'comptador', 'distribuidora', 'data_alta', 'data_baixa']
data = self.O.GiscedataPolissa.read(self.id, fields_to_read)[0]
self.name = data['name']
self.tarifa = data['tarifa'][1]
self.state = data['state']
self.comptador = Comptador(self.O, data['comptador'])
self.distribuidora = data['distribuidora']
self.data_alta = data['data_alta']
self.data_baixa = data['data_baixa']
def daily_consumption(self):
return self.O.GiscedataPolissa.consum_diari(self.id)
def monthly_consumption(self, period):
return self.daily_consumption()[period]*30
class LectBase(object):
id = None
data = None
tarifa = None
periode_id = None
periode = None
lectura = None
origen_comer = None
origen = None
tipus = None
observacions = None
obj = None
def __init__(self, obj, id):
self.obj = obj
self.id = id
fields_to_read = ['name', 'lectura', 'origen_comer_id', 'origen_id', 'periode', 'tipus', 'observacions']
lect_read = self.obj.read(self.id, fields_to_read)
lect_perm_read = self.obj.perm_read([self.id])[0]
(tarifa,periode) = lect_read['periode'][1].split(' ')
periode_id = lect_read['periode'][0]
periode = periode[1:3]
self.write_date = lect_perm_read['write_date']
self.date = lect_read['name']
self.tarifa = tarifa
self.periode_id = periode_id
self.periode = periode
self.lectura = lect_read['lectura']
self.origen_comer = lect_read['origen_comer_id'][1]
self.origen = lect_read['origen_id'][1]
self.tipus = lect_read['tipus']
self.observacions = lect_read['observacions']
def update_lectura(self, old, new, origen, update_observacions, observacions='', observacions_date='-'):
write_values = {'lectura': int(new), 'origen_id': int(origen)}
if update_observacions:
obs = self.observacions
txt = 'R. {observacions} {old} [{observacions_date}] (ABr)\n'.format(**locals())
if not obs:
obs = ''
obs = txt + obs
write_values.update({'observacions':obs})
self.obj.write([self.id], write_values)
def update_observacions(self, value=None):
if value:
obs = self.observacions
today = datetime.strftime(datetime.today(),'%Y-%m-%d')
txt = 'R. {value} [{today}] (ABr)\n'.format(**locals())
if not obs:
obs = ''
obs = txt + ' ' + obs
self.obj.write([self.id], {'observacions': obs})
class LectPool(LectBase):
def __init__(self, O, id):
super(LectPool, self).__init__(O.GiscedataLecturesLecturaPool, id)
class Lect(LectBase):
def __init__(self, O, id):
super(Lect, self).__init__(O.GiscedataLecturesLectura, id)
class Error(OpenObject):
raw = None
factura = None
comptador = None
data = None
periode = None
tipus = None
valor_xml = None
valor_db = None
lects_pool = {}
last_lects_pool = {}
last_lects_invoice = {}
def __init__(self, O, polissa_id, raw):
super(Error, self).__init__(O)
self.parse(raw)
# LectPool
fields_to_search = [('polissa', '=', polissa_id), ('name', '=', self.comptador)]
comptador_ids = O.GiscedataLecturesComptador.search(fields_to_search, 0, 0, False, {'active_test': False})
if len(comptador_ids) == 0:
raise Exception('Comptador missing')
comptador_id = comptador_ids[0]
fields_to_search = [('comptador', '=', comptador_id), ('name', '=', self.data)]
lect_pool_ids = O.GiscedataLecturesLecturaPool.search(fields_to_search)
if not len(lect_pool_ids) > 0:
raise Exception('Lectpool missing')
for lect_pool_id in lect_pool_ids:
lect_pool = LectPool(self.O, lect_pool_id)
self.lects_pool[lect_pool.periode] = lect_pool
fields_to_search = [('comptador', '=', comptador_id),
('origen_id', 'in',
[O_to_codigoOrigen['Telemedida'],
O_to_codigoOrigen['Telemedida corregida'],
O_to_codigoOrigen['TPL'],
O_to_codigoOrigen['TPL corregida'],
O_to_codigoOrigen['Visual'],
O_to_codigoOrigen['Visual corregida']])]
last_lects_pool_ids = O.GiscedataLecturesLecturaPool.search(fields_to_search)
if not len(last_lects_pool_ids) > 0:
raise Exception('Lectpool missing')
last_lects_pool_id = last_lects_pool_ids[0]
fields_to_read = ['name']
last_lects_pool_date = O.GiscedataLecturesLecturaPool.read(last_lects_pool_id, fields_to_read)['name']
fields_to_search = [('comptador', '=', comptador_id),
('name', '=', last_lects_pool_date)]
last_lects_pool_ids = O.GiscedataLecturesLecturaPool.search(fields_to_search)
if not len(last_lects_pool_ids) > 0:
raise Exception('Lectpool missing')
for last_lects_pool_id in last_lects_pool_ids:
last_lects_pool = LectPool(self.O, last_lects_pool_id)
self.last_lects_pool[last_lects_pool.periode] = last_lects_pool
fields_to_search = [('comptador', '=', comptador_id)]
last_lects_invoice_id = O.GiscedataLecturesLectura.search(fields_to_search)[0]
fields_to_read = ['name']
last_lects_invoice_date = O.GiscedataLecturesLectura.read(last_lects_invoice_id, fields_to_read)['name']
fields_to_search = [('comptador', '=', comptador_id),
('name', '=', last_lects_invoice_date)]
last_lects_invoice_ids = O.GiscedataLecturesLectura.search(fields_to_search)
if not len(last_lects_invoice_ids) > 0:
raise Exception('Lect invoice missing')
last_lects_invoice_id = last_lects_invoice_ids[0]
if not len(last_lects_invoice_ids) > 0:
raise Exception('Lect missing')
for last_lects_invoice_id in last_lects_invoice_ids:
last_lects_invoice = Lect(self.O, last_lects_invoice_id)
self.last_lects_invoice[last_lects_invoice.periode] = last_lects_invoice
@property
def FechaHora(self):
return self.data
@property
def Lectura(self):
return self.valor_db
def parse(self,raw):
self.raw = raw
try:
# Format descripció divergència (GISCEMaster/giscedata_lectures_switching/giscedata_lectures.py
# _msg = _(u"Divergència en el valor de lectura existent."
# u" Comptador: %s Data: %s. Període: %s. Tipus: %s"
# u" valor: XML: %s BBDD:%s") \
# % (c_obj.name,
# valor, lect_bw.lectura)
m = re.match(u'Factura (.+): Divergència en el valor de lectura existent. Comptador: (\w+).*Data: ([0-9\-]+).+Període: (\w+)\. Tipus: (\w+) valor: XML: (\d*[.]?\d*).+BBDD:(\d*[.]?\d*)',raw)
if not m:
raise ('Error not matching')
if not len(m.groups()) == 7:
raise ('Error not matching')
self.factura = m.groups()[0]
self.comptador = m.groups()[1]
self.data = m.groups()[2]
self.periode = m.groups()[3]
self.tipus = m.groups()[4]
self.valor_xml = float(m.groups()[5])
self.valor_db = float(m.groups()[6])
except Exception, e:
raise e
class F1ImportError(OpenObject):
id = None
def __init__(self, O, id):
super(F1ImportError, self).__init__(O)
self.id = id
fields_to_read = ['name', 'cups_id', 'info']
data = O.GiscedataFacturacioImportacioLinia.read(self.id, fields_to_read)
self.name = data['name']
self.cups_id = data['cups_id'][0]
perm_data = O.GiscedataFacturacioImportacioLinia.perm_read([self.id])[0]
self.write_date = perm_data['write_date']
self.create_date = perm_data['create_date']
polissa_id = self.O.GiscedataPolissa.search([('cups', '=', self.cups_id)], 0, 0, False, {'active_test': False})
if not polissa_id:
raise('No contract information available')
self.polissa = Polissa(self.O, polissa_id)
# error
self.error = Error(self.O, polissa_id, data['info'])
# F1
attach_id = self.O.IrAttachment.search([
('res_model', '=', 'giscedata.facturacio.importacio.linia'), ('res_id', '=', self.id)])[0]
if not attach_id:
raise ValueError('Resource id not found')
xml_ = O.IrAttachment.read(attach_id, ['name', 'datas'])
xml = base64.b64decode(xml_["datas"])
self.F1 = F1(xml)
self.request_date = dateutil.parser.parse(str(self.F1.root.Cabecera.FechaSolicitud))
def reload(self, update=False):
if update:
(filename_,extension_) = os.path.splitext(self.name)
self.name = filename_ + '_A' + extension_
filename = os.path.join('/tmp', self.name)
self.F1.dump(filename)
with open(filename, 'rb') as file_:
encoded_string = base64.b64encode(file_.read())
ctx = {'active_id': self.id,
'fitxer_xml': True}
wizard_id = self.O.GiscedataFacturacioSwitchingWizard.create({}, ctx)
wizard = self.O.GiscedataFacturacioSwitchingWizard.get(wizard_id)
vals = {
'origen':'nou',
'filename': self.name,
'file':encoded_string
}
wizard.write(vals)
wizard.action_importar_f1(ctx)
else:
ctx = {'active_id': self.id, 'fitxer_xml': True}
wizard_id = self.O.GiscedataFacturacioSwitchingWizard.create({}, ctx)
wizard = self.O.GiscedataFacturacioSwitchingWizard.get(wizard_id)
wizard.action_importar_f1(ctx)
def update_xml_attribute(self, attribute):
if not hasattr(self.error, attribute):
raise Exception('Attribute %s not supported' % attribute)
self.F1.update_xml_value(self.error.comptador,
self.error.data,
self.error.periode,
self.error.tipus,
attribute,
getattr(self.error, attribute))
def get_xml_attribute(self, attribute):
return self.F1.get_xml_value(self.error.comptador,
self.error.data,
self.error.periode,
self.error.tipus,
attribute)
def dump(self, fmt='txt'):
vars = []
vars.append(('Error_id', self.id))
vars.append(('Polissa', self.polissa.name))
vars.append(('Tarifa', self.polissa.tarifa))
vars.append(('Distribuidora', self.polissa.distribuidora))
vars.append(('Data', self.error.data))
vars.append(('Periode', self.error.periode))
vars.append(('Tipus', self.error.tipus))
if self.F1.is_abonadora():
vars.append(('IndicativoFactura', 'Abonadora'))
elif self.F1.is_rectificadora():
vars.append(('IndicativoFactura', 'Rectificadora'))
else:
vars.append(('IndicativoFactura', 'Normal'))
procedencia = str(self.get_xml_attribute('Procedencia'))
vars.append(('Valor_XML', '%0.2f (%s)' % (self.error.valor_xml, codigoOrigen_to_O[procedencia])))
vars.append(('Valor_DB', '%0.2f' % self.error.valor_db))
vars.append(('Data DB', self.error.lects_pool[self.error.periode].write_date))
fields_to_search = [('comptador.polissa', '=', self.polissa.id[0])]
lect_pool_ids = self.O.GiscedataLecturesLecturaPool.search(fields_to_search)
lect_ids = self.O.GiscedataLecturesLectura.search(fields_to_search)
fields_to_read = ['name', 'periode', 'lectura', 'origen_id', 'observacions']
lect_pools = self.O.GiscedataLecturesLecturaPool.read(lect_pool_ids, fields_to_read)
lects = self.O.GiscedataLecturesLectura.read(lect_ids, fields_to_read)
lect_n = max(len(lects), len(lect_pools))
from tabulate import tabulate
table = []
for lect_idx in range(lect_n):
row = []
if lect_idx < len(lects):
observacions_ = ''
if lects[lect_idx]['observacions']:
observacions = lects[lect_idx]['observacions'].split('\n')
for o in observacions:
if o.startswith(u'From') or \
o.startswith(u'Lectura') or \
o.startswith(u'Tenim') or \
o.startswith(u'Data') or \
o.startswith(u'Limitació') or \
o.startswith(u'Consum'):
continue
observacions_ += o
row += [lects[lect_idx]['name'],
lects[lect_idx]['periode'][1],
lects[lect_idx]['lectura'],
lects[lect_idx]['origen_id'][1],
observacions_]
else:
row += [None, None, None, None, None]
if lect_idx < len(lect_pools):
row += [lect_pools[lect_idx]['name'],
lect_pools[lect_idx]['periode'][1],
lect_pools[lect_idx]['lectura'],
lect_pools[lect_idx]['origen_id'][1],
lect_pools[lect_idx]['observacions']]
else:
row += [None, None, None, None, None]
table.append(row)
for var in vars:
(var_name, var_value) = var
txt = '{var_name}:{var_value}'.format(**locals())
txt = txt.rstrip()
print txt
print tabulate(table, tablefmt=fmt) | agpl-3.0 | -3,406,983,610,301,352,000 | 36.642415 | 201 | 0.525827 | false |
zstackorg/zstack-woodpecker | integrationtest/vm/virt_plus/qos/test_del_data_vol_rw_qos.py | 1 | 4158 | '''
This case can not execute parallelly
@author: Legion
'''
import os
import zstackwoodpecker.test_util as test_util
import zstackwoodpecker.test_lib as test_lib
import zstackwoodpecker.test_state as test_state
import zstackwoodpecker.operations.host_operations as host_ops
import zstackwoodpecker.operations.resource_operations as res_ops
import zstackwoodpecker.operations.vm_operations as vm_ops
import zstackwoodpecker.operations.volume_operations as vol_ops
_config_ = {
'timeout' : 1000,
'noparallel' : True
}
test_stub = test_lib.lib_get_test_stub()
test_obj_dict = test_state.TestStateDict()
volume_offering_uuid = None
def test():
global volume_offering_uuid
test_util.test_dsc('Test VM disk bandwidth QoS by 20MB')
#unit is KB
read_bandwidth = 10*1024*1024
write_bandwidth = 5*1024*1024
new_volume_offering = test_lib.lib_create_disk_offering(read_bandwidth=read_bandwidth, write_bandwidth=write_bandwidth)
volume_offering_uuid = new_volume_offering.uuid
vm = test_stub.create_vm(vm_name = 'vm_volume_qos', disk_offering_uuids = [volume_offering_uuid])
vm.check()
test_obj_dict.add_vm(vm)
vm_inv = vm.get_vm()
cond = res_ops.gen_query_conditions("vmInstanceUuid", '=', vm_inv.uuid)
cond = res_ops.gen_query_conditions("type", '=', 'Data', cond)
volume_uuid = res_ops.query_resource(res_ops.VOLUME, cond)[0].uuid
test_lib.lib_mkfs_for_volume(volume_uuid, vm_inv)
path = '/mnt'
user_name = 'root'
user_password = 'password'
os.system("sshpass -p '%s' ssh %s@%s 'mount /dev/vdb1 %s'"%(user_password, user_name, vm_inv.vmNics[0].ip, path))
test_stub.make_ssh_no_password(vm_inv)
test_stub.install_fio(vm_inv)
vm_ops.set_vm_disk_qos(test_lib.lib_get_data_volumes(vm_inv)[0].uuid, read_bandwidth*2)
if vm_ops.get_vm_disk_qos(test_lib.lib_get_data_volumes(vm_inv)[0].uuid).volumeBandwidth != read_bandwidth*2:
test_util.test_fail('Retrieved disk qos not match')
if vm_ops.get_vm_disk_qos(test_lib.lib_get_data_volumes(vm_inv)[0].uuid).volumeBandwidthRead == read_bandwidth:
test_util.test_fail('read qos must be cleared after set total qos')
if vm_ops.get_vm_disk_qos(test_lib.lib_get_data_volumes(vm_inv)[0].uuid).volumeBandwidthWrite == write_bandwidth:
test_util.test_fail('write qos must be cleared after set total qos')
# test_stub.test_fio_bandwidth(vm_inv, read_bandwidth, '/dev/vdb')
# check read bw
vm_ops.set_vm_disk_qos(test_lib.lib_get_data_volumes(vm_inv)[0].uuid, read_bandwidth*2, 'read')
if vm_ops.get_vm_disk_qos(test_lib.lib_get_data_volumes(vm_inv)[0].uuid).volumeBandwidthRead != read_bandwidth*2:
test_util.test_fail('Retrieved disk qos not match')
test_stub.test_fio_bandwidth(vm_inv, read_bandwidth*2, '/dev/vdb')
vm_ops.del_vm_disk_qos(test_lib.lib_get_data_volumes(vm_inv)[0].uuid, mode='all')
if test_stub.test_fio_bandwidth(vm_inv, read_bandwidth, '/dev/vdb', raise_exception=False):
test_util.test_fail('disk qos is not expected to have limit after qos setting is deleted')
# check write bw
vm_ops.set_vm_disk_qos(test_lib.lib_get_data_volumes(vm_inv)[0].uuid, write_bandwidth*2, 'write')
if vm_ops.get_vm_disk_qos(test_lib.lib_get_data_volumes(vm_inv)[0].uuid).volumeBandwidthWrite != write_bandwidth*2:
test_util.test_fail('Retrieved disk qos not match')
test_stub.test_fio_bandwidth(vm_inv, write_bandwidth*2, path)
vm_ops.del_vm_disk_qos(test_lib.lib_get_data_volumes(vm_inv)[0].uuid, mode='all')
if test_stub.test_fio_bandwidth(vm_inv, write_bandwidth, path, raise_exception=False):
test_util.test_fail('disk qos is not expected to have limit after qos setting is deleted')
vol_ops.delete_disk_offering(volume_offering_uuid)
test_lib.lib_robot_cleanup(test_obj_dict)
test_util.test_pass('VM data volume QoS Test Pass')
#Will be called only if exception happens in test().
def error_cleanup():
global volume_offering_uuid
test_lib.lib_error_cleanup(test_obj_dict)
try:
vol_ops.delete_disk_offering(volume_offering_uuid)
except:
pass
| apache-2.0 | -6,563,108,078,337,826,000 | 46.25 | 123 | 0.703463 | false |
Kayoku/iotari | work-area/wireless-sensor/sensor.py | 1 | 2978 | import datetime
import json
from pprint import pprint
import requests
class Sensor():
"""Abstract sensor class."""
def __init__(self, uuid):
"""Initialisation."""
# find a way to get a stable name
self.uuid = uuid
def save_measure(self):
"""How to save a new measure."""
raise NotImplementedError
class APISensor(Sensor):
"""Save a sensor value using a remote API."""
HTTP_STATUS_CREATED = 201
HTTP_STATUS_SUCCESS = 200
def __init__(self, uuid, baseurl):
"""Initialize."""
super().__init__(uuid)
self.baseurl = baseurl
self.uuid = uuid
self.get_id()
def get_id(self):
"""Get the database id for the sensor.
If the sensor doesn't exist, it creates it.
"""
filters = [dict(name='location', op='equals', val=self.uuid)]
params = dict(q=json.dumps(dict(filters=filters)))
r = requests.get(self.baseurl + '/api/sensor',
params=params,
headers={'content-type': 'application/json'})
if r.status_code == self.HTTP_STATUS_SUCCESS:
json_content = json.loads(r.text)
if json_content["num_results"] == 1:
self.id_ = json_content["objects"][0]["id"]
elif json_content["num_results"] == 0:
# add a new sensor in db with the UUID
r = requests.post(baseurl + '/api/sensor',
data=json.dumps({"location": self.uuid}),
headers={'content-type': 'application/json'})
if r.status_code == self.HTTP_STATUS_CREATED:
self.id_ = json.loads(r.text)["id"]
else:
raise Exception("impossible to add new sensor")
else:
raise Exception("mulltiple sensors with same id")
def save_measure(self, measure, time_stamp):
new_mesure = {'value': measure,
'sensor_id': self.id_,
'time_stamp': time_stamp}
try:
r = requests.post(self.baseurl + '/api/measure',
data=json.dumps(new_mesure),
headers={'content-type': 'application/json'})
except requests.exceptions.ConnectionError:
return False
return r.status_code == self.HTTP_STATUS_CREATED
if __name__ == "__main__":
baseurl = 'http://localhost:5000'
sensor = APISensor("salon", baseurl)
for _ in range(50):
sensor.save_measure(_, datetime.datetime.now().isoformat())
r = requests.get(baseurl + '/api/sensor',
headers={'content-type': 'application/json'})
print("Sensors: ")
pprint({"status": r.status_code, "headers": r.headers['content-type'], "content": json.loads(str(r.text))})
| mit | 6,726,347,953,359,607,000 | 33.879518 | 111 | 0.520484 | false |
ContinuumIO/dask | dask/dataframe/accessor.py | 2 | 5362 | import numpy as np
import pandas as pd
from functools import partial
from ..utils import derived_from
def maybe_wrap_pandas(obj, x):
if isinstance(x, np.ndarray):
if isinstance(obj, pd.Series):
return pd.Series(x, index=obj.index, dtype=x.dtype)
return pd.Index(x)
return x
class Accessor(object):
"""
Base class for pandas Accessor objects cat, dt, and str.
Notes
-----
Subclasses should define ``_accessor_name``
"""
_not_implemented = set()
def __init__(self, series):
from .core import Series
if not isinstance(series, Series):
raise ValueError("Accessor cannot be initialized")
series_meta = series._meta
if hasattr(series_meta, "to_series"): # is index-like
series_meta = series_meta.to_series()
meta = getattr(series_meta, self._accessor_name)
self._meta = meta
self._series = series
@staticmethod
def _delegate_property(obj, accessor, attr):
out = getattr(getattr(obj, accessor, obj), attr)
return maybe_wrap_pandas(obj, out)
@staticmethod
def _delegate_method(obj, accessor, attr, args, kwargs):
out = getattr(getattr(obj, accessor, obj), attr)(*args, **kwargs)
return maybe_wrap_pandas(obj, out)
def _property_map(self, attr):
meta = self._delegate_property(self._series._meta, self._accessor_name, attr)
token = "%s-%s" % (self._accessor_name, attr)
return self._series.map_partitions(
self._delegate_property, self._accessor_name, attr, token=token, meta=meta
)
def _function_map(self, attr, *args, **kwargs):
if "meta" in kwargs:
meta = kwargs.pop("meta")
else:
meta = self._delegate_method(
self._series._meta_nonempty, self._accessor_name, attr, args, kwargs
)
token = "%s-%s" % (self._accessor_name, attr)
return self._series.map_partitions(
self._delegate_method,
self._accessor_name,
attr,
args,
kwargs,
meta=meta,
token=token,
)
@property
def _delegates(self):
return set(dir(self._meta)).difference(self._not_implemented)
def __dir__(self):
o = self._delegates
o.update(self.__dict__)
o.update(dir(type(self)))
return list(o)
def __getattr__(self, key):
if key in self._delegates:
if callable(getattr(self._meta, key)):
return partial(self._function_map, key)
else:
return self._property_map(key)
else:
raise AttributeError(key)
class DatetimeAccessor(Accessor):
""" Accessor object for datetimelike properties of the Series values.
Examples
--------
>>> s.dt.microsecond # doctest: +SKIP
"""
_accessor_name = "dt"
class StringAccessor(Accessor):
""" Accessor object for string properties of the Series values.
Examples
--------
>>> s.str.lower() # doctest: +SKIP
"""
_accessor_name = "str"
_not_implemented = {"get_dummies"}
@derived_from(pd.core.strings.StringMethods)
def split(self, pat=None, n=-1, expand=False):
if expand:
if n == -1:
raise NotImplementedError(
"To use the expand parameter you must specify the number of "
"expected splits with the n= parameter. Usually n splits result in n+1 output columns."
)
else:
delimiter = " " if pat is None else pat
meta = type(self._series._meta)([delimiter.join(["a"] * (n + 1))])
meta = meta.str.split(n=n, expand=expand, pat=pat)
else:
meta = (self._series.name, object)
return self._function_map("split", pat=pat, n=n, expand=expand, meta=meta)
@derived_from(pd.core.strings.StringMethods)
def cat(self, others=None, sep=None, na_rep=None):
from .core import Series, Index
if others is None:
raise NotImplementedError("x.str.cat() with `others == None`")
valid_types = (Series, Index, pd.Series, pd.Index)
if isinstance(others, valid_types):
others = [others]
elif not all(isinstance(a, valid_types) for a in others):
raise TypeError("others must be Series/Index")
return self._series.map_partitions(
str_cat, *others, sep=sep, na_rep=na_rep, meta=self._series._meta
)
@derived_from(pd.core.strings.StringMethods)
def extractall(self, pat, flags=0):
# TODO: metadata inference here won't be necessary for pandas >= 0.23.0
meta = self._series._meta.str.extractall(pat, flags=flags)
return self._series.map_partitions(
str_extractall, pat, flags, meta=meta, token="str-extractall"
)
def __getitem__(self, index):
return self._series.map_partitions(str_get, index, meta=self._series._meta)
def str_extractall(series, pat, flags):
return series.str.extractall(pat, flags=flags)
def str_get(series, index):
""" Implements series.str[index] """
return series.str[index]
def str_cat(self, *others, **kwargs):
return self.str.cat(others=others, **kwargs)
| bsd-3-clause | -6,140,759,249,159,344,000 | 29.64 | 107 | 0.586162 | false |
fakdora/flaksy-upto-login | app/main/views.py | 1 | 3865 | from flask import render_template, redirect, url_for, abort, flash, request,\
current_app
from flask.ext.login import login_required, current_user
from . import main
from .forms import EditProfileForm, EditProfileAdminForm, PostForm
from .. import db
from ..models import Permission, Role, User, Post
from ..decorators import admin_required
@main.route('/', methods=['GET', 'POST'])
def index():
form = PostForm()
if current_user.can(Permission.WRITE_ARTICLES) and \
form.validate_on_submit():
post = Post(body=form.body.data,
author=current_user._get_current_object())
db.session.add(post)
return redirect(url_for('.index'))
page = request.args.get('page', 1, type=int)
pagination = Post.query.order_by(Post.timestamp.desc()).paginate(
page, per_page=current_app.config['FLASKY_POSTS_PER_PAGE'],
error_out=False)
posts = pagination.items
return render_template('index.html', form=form, posts=posts,
pagination=pagination)
@main.route('/user/<username>')
def user(username):
user = User.query.filter_by(username=username).first_or_404()
page = request.args.get('page', 1, type=int)
pagination = user.posts.order_by(Post.timestamp.desc()).paginate(
page, per_page=current_app.config['FLASKY_POSTS_PER_PAGE'],
error_out=False)
posts = pagination.items
return render_template('user.html', user=user, posts=posts,
pagination=pagination)
@main.route('/edit-profile', methods=['GET', 'POST'])
@login_required
def edit_profile():
form = EditProfileForm()
if form.validate_on_submit():
current_user.name = form.name.data
current_user.location = form.location.data
current_user.about_me = form.about_me.data
db.session.add(current_user)
flash('Your profile has been updated.')
return redirect(url_for('.user', username=current_user.username))
form.name.data = current_user.name
form.location.data = current_user.location
form.about_me.data = current_user.about_me
return render_template('edit_profile.html', form=form)
@main.route('/edit-profile/<int:id>', methods=['GET', 'POST'])
@login_required
@admin_required
def edit_profile_admin(id):
user = User.query.get_or_404(id)
form = EditProfileAdminForm(user=user)
if form.validate_on_submit():
user.email = form.email.data
user.username = form.username.data
user.confirmed = form.confirmed.data
user.role = Role.query.get(form.role.data)
user.name = form.name.data
user.location = form.location.data
user.about_me = form.about_me.data
db.session.add(user)
flash('The profile has been updated.')
return redirect(url_for('.user', username=user.username))
form.email.data = user.email
form.username.data = user.username
form.confirmed.data = user.confirmed
form.role.data = user.role_id
form.name.data = user.name
form.location.data = user.location
form.about_me.data = user.about_me
return render_template('edit_profile.html', form=form, user=user)
@main.route('/post/<int:id>')
def post(id):
post = Post.query.get_or_404(id)
return render_template('post.html', posts=[post])
@main.route('/edit/<int:id>', methods=['GET', 'POST'])
@login_required
def edit(id):
post = Post.query.get_or_404(id)
if current_user != post.author and \
not current_user.can(Permission.ADMINISTER):
abort(403)
form = PostForm()
if form.validate_on_submit():
post.body = form.body.data
db.session.add(post)
flash('The post has been updated.')
return redirect(url_for('.post', id=post.id))
form.body.data = post.body
return render_template('edit_post.html', form=form)
| mit | -6,375,092,692,908,061,000 | 35.809524 | 77 | 0.656145 | false |
quentinhardy/odat | ExternalTable.py | 1 | 7140 | #!/usr/bin/python
# -*- coding: utf-8 -*-
from DirectoryManagement import DirectoryManagement
import logging, random, string
from Utils import checkOptionsGivenByTheUser
from Constants import *
class ExternalTable (DirectoryManagement):
'''
Allow the user to read file thanks to external tables
'''
def __init__(self,args):
'''
Constructor
'''
logging.debug("ExternalTable object created")
DirectoryManagement.__init__(self,args)
self.tableName = self.__generateRandomString__()
self.__setDirectoryName__()
self.ERROR_EXTERNAL_TABLE_WITH_WRITE = "ORA-30653: "
self.ERROR_EXTERNAL_TABLE_READ ="ORA-29400: "
self.ERROR_ODCIEXTTABLEOPEN="ORA-29913: "
def __createTableForReadFile__(self,remoteNameFile):
'''
Create table name with, for exemple:
CREATE TABLE rf1 (id NUMBER PRIMARY KEY, path VARCHAR(255) UNIQUE, ot_format VARCHAR(6));
'''
logging.info('Create the table: {0}'.format(self.tableName))
query = "CREATE TABLE {0} (line varchar2(256)) ORGANIZATION EXTERNAL (TYPE oracle_loader DEFAULT DIRECTORY {1} ACCESS PARAMETERS ( RECORDS DELIMITED BY NEWLINE BADFILE 'bad_data.bad' NOLOGFILE FIELDS TERMINATED BY ',' MISSING FIELD VALUES ARE NULL REJECT ROWS WITH ALL NULL FIELDS (line)) LOCATION ('{2}')) PARALLEL REJECT LIMIT 0 NOMONITORING".format(self.tableName, self.directoryName, remoteNameFile)
response = self.__execThisQuery__(query=query,isquery=False)
if isinstance(response,Exception) :
logging.info('Error with the SQL request {0}: {1}'.format(query,str(response)))
return response
else : return True
def __createTableForExec__(self,remoteNameFile):
'''
Create a table in order to execute a command
'''
logging.info('Create the table: {0}'.format(self.tableName))
query = """CREATE TABLE {0} ( line NUMBER , text VARCHAR2(4000)) ORGANIZATION EXTERNAL ( TYPE ORACLE_LOADER DEFAULT DIRECTORY {1} ACCESS PARAMETERS ( RECORDS DELIMITED BY NEWLINE NOLOGFILE PREPROCESSOR {1}: '{2}' FIELDS TERMINATED BY WHITESPACE ( line RECNUM , text POSITION(1:4000)) ) LOCATION ('{2}') ) REJECT LIMIT UNLIMITED""".format(self.tableName, self.directoryName, remoteNameFile)
response = self.__execThisQuery__(query=query,isquery=False)
if isinstance(response,Exception) :
logging.info('Error with the SQL request {0}: {1}'.format(query,str(response)))
return response
else : return True
def __dropTable__(self):
'''
Drop the table with, for exemple
DROP TABLE my_table PURGE;
'''
logging.info('Drop the table: {0}'.format(self.tableName))
query = "DROP TABLE {0} PURGE".format(self.tableName)
response = self.__execThisQuery__(query=query,isquery=False)
if isinstance(response,Exception) :
logging.info('Error with the SQL request {0}: {1}'.format(query,str(response)))
return response
else : return True
def getFile (self,remotePath, remoteNameFile, localFile):
'''
Create the localFile file containing data stored on the remoteNameFile (stored in the remotePath)
'''
data = ""
logging.info("Copy the {0} remote file (stored in {1}) to {2}".format(remoteNameFile,remotePath,localFile))
status = self.__createOrRemplaceDirectory__(remotePath)
if isinstance(status,Exception): return status
status = self.__createTableForReadFile__(remoteNameFile)
if isinstance(status,Exception): return status
request = "select line from {0}".format(self.tableName)
response = self.__execThisQuery__(query=request,ld=['line'])
if isinstance(response,Exception):
logging.info('Error with the SQL request {0}: {1}'.format(request,response))
status = self.__dropDirectory__()
status = self.__dropTable__()
return response
else :
for l in response:
data += l['line']+'\n'
status = self.__dropDirectory__()
status = self.__dropTable__()
return data
def execute (self, remotePath, remoteNameFile):
'''
Execute a command
'''
logging.info("Execute the {0} command stored stored in {1}".format(remoteNameFile,remotePath))
status = self.__createOrRemplaceDirectory__(remotePath)
if isinstance(status,Exception): return status
status = self.__createTableForExec__(remoteNameFile)
if isinstance(status,Exception): return status
request = "select line from {0}".format(self.tableName)
response = self.__execThisQuery__(query=request, ld=['line'])
if isinstance(response,Exception):
logging.info('Error with the SQL request {0}: {1}'.format(request,response))
status = self.__dropDirectory__()
status = self.__dropTable__()
return response
else :
logging.info("{0} command executed without errors".format(remoteNameFile))
status = self.__dropDirectory__()
status = self.__dropTable__()
return response
def testAll(self):
'''
Test all functions
'''
folder = self.__generateRandomString__()
self.args['print'].subtitle("External table to read files ?")
logging.info("Simulate the file reading in the {0} folder thanks to an external table".format(folder))
status = self.getFile(remotePath=folder, remoteNameFile='data.txt', localFile="test.txt")
if (status == True or self.ERROR_EXTERNAL_TABLE_WITH_WRITE in str(status) or self.ERROR_EXTERNAL_TABLE_READ in str(status)):
self.args['print'].goodNews("OK")
else :
self.args['print'].badNews("KO")
self.args['print'].subtitle("External table to execute system commands ?")
logging.info("Simulate the file execution thanks to an external table")
status = self.execute (remotePath=folder, remoteNameFile='test')
if (status == True or self.ERROR_EXTERNAL_TABLE_WITH_WRITE in str(status) or self.ERROR_EXTERNAL_TABLE_READ in str(status)):
self.args['print'].goodNews("OK")
else :
self.args['print'].badNews("KO")
def runExternalTableModule (args):
'''
Run the External Table module
'''
status = True
if checkOptionsGivenByTheUser(args,["test-module","getFile","exec"]) == False : return EXIT_MISS_ARGUMENT
externalTable = ExternalTable(args)
status = externalTable.connection(stopIfError=True)
if args['test-module'] == True :
args['print'].title("Test if the External Table module can be used")
status = externalTable.testAll()
#Option 1: getFile
if args['getFile'] != None:
args['print'].title("Read the {0} file stored in the {1} path".format(args['getFile'][1],args['getFile'][0]))
data = externalTable.getFile (remotePath=args['getFile'][0], remoteNameFile=args['getFile'][1], localFile=args['getFile'][2])
if isinstance(data,Exception):
args['print'].badNews("There is an error: {0}".format(data))
else:
args['print'].goodNews("Data stored in the remote file {0} stored in {1}".format(args['getFile'][1],args['getFile'][0]))
print(data)
#Option 2: exec a script or command
if args['exec'] != None:
args['print'].title("Execute the {0} command stored in the {1} path".format(args['exec'][1],args['exec'][0]))
data = externalTable.execute (remotePath=args['exec'][0], remoteNameFile=args['exec'][1])
if isinstance(data,Exception):
args['print'].badNews("There is an error: {0}".format(data))
else:
args['print'].goodNews("The {0} command stored in {1} has been executed (normally)".format(args['exec'][1],args['exec'][0]))
| lgpl-3.0 | 8,598,786,521,398,740,000 | 43.886792 | 405 | 0.711223 | false |
Azure/azure-sdk-for-python | sdk/appservice/azure-mgmt-web/azure/mgmt/web/v2020_06_01/aio/operations/_deleted_web_apps_operations.py | 1 | 11568 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
from azure.mgmt.core.exceptions import ARMErrorFormat
from ... import models as _models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class DeletedWebAppsOperations:
"""DeletedWebAppsOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.web.v2020_06_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def list(
self,
**kwargs: Any
) -> AsyncIterable["_models.DeletedWebAppCollection"]:
"""Get all deleted apps for a subscription.
Description for Get all deleted apps for a subscription.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either DeletedWebAppCollection or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.web.v2020_06_01.models.DeletedWebAppCollection]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.DeletedWebAppCollection"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-06-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('DeletedWebAppCollection', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize.failsafe_deserialize(_models.DefaultErrorResponse, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Web/deletedSites'} # type: ignore
def list_by_location(
self,
location: str,
**kwargs: Any
) -> AsyncIterable["_models.DeletedWebAppCollection"]:
"""Get all deleted apps for a subscription at location.
Description for Get all deleted apps for a subscription at location.
:param location:
:type location: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either DeletedWebAppCollection or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.web.v2020_06_01.models.DeletedWebAppCollection]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.DeletedWebAppCollection"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-06-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_by_location.metadata['url'] # type: ignore
path_format_arguments = {
'location': self._serialize.url("location", location, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('DeletedWebAppCollection', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize.failsafe_deserialize(_models.DefaultErrorResponse, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list_by_location.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Web/locations/{location}/deletedSites'} # type: ignore
async def get_deleted_web_app_by_location(
self,
location: str,
deleted_site_id: str,
**kwargs: Any
) -> "_models.DeletedSite":
"""Get deleted app for a subscription at location.
Description for Get deleted app for a subscription at location.
:param location:
:type location: str
:param deleted_site_id: The numeric ID of the deleted app, e.g. 12345.
:type deleted_site_id: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: DeletedSite, or the result of cls(response)
:rtype: ~azure.mgmt.web.v2020_06_01.models.DeletedSite
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.DeletedSite"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-06-01"
accept = "application/json"
# Construct URL
url = self.get_deleted_web_app_by_location.metadata['url'] # type: ignore
path_format_arguments = {
'location': self._serialize.url("location", location, 'str'),
'deletedSiteId': self._serialize.url("deleted_site_id", deleted_site_id, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.DefaultErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('DeletedSite', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_deleted_web_app_by_location.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Web/locations/{location}/deletedSites/{deletedSiteId}'} # type: ignore
| mit | -4,093,610,424,113,834,000 | 46.216327 | 179 | 0.635979 | false |
werehuman/cocaine-tools | cocaine/tools/actions/crashlog.py | 1 | 7060 | #
# Copyright (c) 2013+ Anton Tyurin <[email protected]>
# Copyright (c) 2013+ Evgeny Safronov <[email protected]>
# Copyright (c) 2011-2014 Other contributors as noted in the AUTHORS file.
#
# This file is part of Cocaine-tools.
#
# Cocaine is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# Cocaine is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
import datetime
import itertools
import time
from tornado import gen
from cocaine.tools import actions, log
from cocaine.decorators import coroutine
from cocaine.tools.actions import app
__author__ = 'Evgeny Safronov <[email protected]>'
def parse_crashlog_day_format(day_string):
index_format = 'cocaine-%Y-%m-%d'
if not day_string:
return day_string
if 'today'.startswith(day_string):
return datetime.date.today().strftime(index_format)
elif 'yesterday'.startswith(day_string):
yesterday = datetime.date.today() - datetime.timedelta(days=1)
return yesterday.strftime(index_format)
else:
values_count = day_string.count("-")
if values_count == 0: # only day specified
today = datetime.date.today()
day = datetime.datetime.strptime(day_string, "%d").replace(year=today.year,
month=today.month)
return day.strftime(index_format)
elif values_count == 1: # day and month
day = datetime.datetime.strptime(day_string,
"%d-%m").replace(year=datetime.date.today().year)
return day.strftime(index_format)
elif values_count == 2: # the whole date
return datetime.datetime.strptime(day_string, "%d-%m-%Y").strftime(index_format)
raise ValueError("Invalid day format %s. Must be day-month-year|today|yesterday" % day_string)
class List(actions.Storage):
def __init__(self, storage, name, day_string=''):
super(List, self).__init__(storage)
self.name = name
if not self.name:
raise ValueError('Please specify a crashlog name')
self.day = parse_crashlog_day_format(day_string)
@coroutine
def execute(self):
indexes = [self.name]
if self.day:
indexes.append(self.day)
channel = yield self.storage.find('crashlogs', indexes)
listing = yield channel.rx.get()
raise gen.Return(listing)
def _parseCrashlogs(crashlogs, timestamp=None):
def is_filter(arg):
return arg == timestamp if timestamp else True
_list = (log.split(':', 1) for log in crashlogs)
return [(ts, time.ctime(float(ts) / 1000000), name) for ts, name in _list if is_filter(ts)]
class Specific(actions.Storage):
def __init__(self, storage, name, timestamp=None):
super(Specific, self).__init__(storage)
self.name = name
self.timestamp = timestamp
if not self.name:
raise ValueError('Please specify application name')
class View(Specific):
@coroutine
def execute(self):
channel = yield self.storage.find('crashlogs', [self.name])
crashlogs = yield channel.rx.get()
parsed_crashlogs = _parseCrashlogs(crashlogs, timestamp=self.timestamp)
contents = []
for crashlog in parsed_crashlogs:
key = '%s:%s' % (crashlog[0], crashlog[2])
channel = yield self.storage.read('crashlogs', key)
content = yield channel.rx.get()
contents.append(content)
raise gen.Return(''.join(contents))
class Remove(Specific):
@coroutine
def execute(self):
channel = yield self.storage.find('crashlogs', [self.name])
crashlogs = yield channel.rx.get()
parsed_crashlogs = _parseCrashlogs(crashlogs, timestamp=self.timestamp)
for crashlog in parsed_crashlogs:
try:
key = '%s:%s' % (crashlog[0], crashlog[2])
channel = yield self.storage.remove('crashlogs', key)
yield channel.rx.get()
except Exception as err:
log.error("unable to delete crashlog %s: %s", str(crashlog), err)
raise gen.Return('Done')
class RemoveAll(Remove):
def __init__(self, storage, name):
super(RemoveAll, self).__init__(storage, name, timestamp=None)
class Status(actions.Storage):
@coroutine
def execute(self):
applications = yield app.List(self.storage).execute()
crashed = []
for application in applications:
crashlogs = yield List(self.storage, application).execute()
if crashlogs:
last = max(_parseCrashlogs(crashlogs), key=lambda (timestamp, time, uuid): timestamp)
crashed.append((application, last, len(crashlogs)))
raise gen.Return(crashed)
def splitted(collection, sep=None, maxsplit=None):
for item in collection:
yield item.split(sep, maxsplit)
def filtered(crashlogs):
for (ts, uuid) in splitted(crashlogs, ':', 1):
yield int(ts), uuid
class Clean(Specific):
def __init__(self, storage, name, size, timestamp=None):
super(Clean, self).__init__(storage, name, timestamp)
self.size = int(size)
@coroutine
def execute(self):
if not self.name:
apps = yield app.List(self.storage).execute()
else:
apps = [self.name]
result = []
if self.timestamp:
try:
dt = datetime.datetime.strptime(self.timestamp, '%Y-%m-%dT%H:%M:%S')
timestamp = int(time.mktime(dt.timetuple())) * 1000000 + dt.microsecond
except ValueError:
timestamp = int(self.timestamp)
for app_name in apps:
channel = yield self.storage.find('crashlogs', [app_name])
crashlogs = yield channel.rx.get()
result = filter(lambda (ts, uuid): ts < timestamp, filtered(crashlogs))
elif self.size > 0:
for app_name in apps:
channel = yield self.storage.find('crashlogs', [app_name])
crashlogs = yield channel.rx.get()
result = itertools.islice(
sorted(filtered(crashlogs[0]), key=lambda (ts, uuid): ts, reverse=True), self.size, None)
for crashlog in result:
print('removing', '%d:%s' % crashlog)
channel = yield self.storage.remove('crashlogs', '%d:%s' % crashlog)
yield channel.rx.get()
raise gen.Return('Done')
| lgpl-3.0 | -3,078,409,603,253,153,300 | 36.157895 | 109 | 0.616997 | false |
yeti-platform/yeti | core/web/api/export.py | 1 | 3616 | from __future__ import unicode_literals
import os
from flask import send_from_directory, make_response
from flask_classy import route
from mongoengine.errors import DoesNotExist
from core.web.api.crud import CrudApi
from core import exports
from core.web.api.api import render
from core.helpers import string_to_timedelta
from core.observables import Tag
from core.web.helpers import requires_permissions
class ExportTemplate(CrudApi):
template = "export_template_api.html"
objectmanager = exports.ExportTemplate
class Export(CrudApi):
template = "export_api.html"
template_single = "export_api_single.html"
objectmanager = exports.Export
@route("/<string:id>/content")
@requires_permissions("read")
def content(self, id):
"""Return export content
Returns a given export's content.
:query ObjectID id: Export ID
:resheader X-Yeti-Export-MD5: The MD5 hash of the exported content. Use it to check the export's integrity
"""
try:
e = self.objectmanager.objects.get(id=id)
except DoesNotExist:
return render({"error": "No Export found for id {}".format(id)}), 404
if e.output_dir.startswith("/"):
d = e.output_dir
else:
d = os.path.join(
os.path.dirname(
os.path.dirname(
os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
)
),
e.output_dir,
)
response = make_response(
send_from_directory(
d, e.name, as_attachment=True, attachment_filename=e.name
)
)
response.headers["X-Yeti-Export-MD5"] = e.hash_md5
return response
@route("/<string:id>/refresh", methods=["POST"])
@requires_permissions("refresh")
def refresh(self, id):
"""Refresh an export
Manually executes an export if it is not already exporting.
:query ObjectID id: Export ID
:>json ObjectID id: The export's ObjectID
"""
exports.execute_export.delay(id)
return render({"id": id})
@route("/<string:id>/toggle", methods=["POST"])
@requires_permissions("toggle")
def toggle(self, id):
"""Toggle an export
Toggles an export. A deactivated export will not execute when called (manually or scheduled)
:query ObjectID id: Export ID
:>json ObjectID id: The export's ObjectID
:>json boolean status: The result of the toggle operation (``true`` means the export has been enabled, ``false`` means it has been disabled)
"""
e = self.objectmanager.objects.get(id=id)
e.enabled = not e.enabled
e.save()
return render({"id": id, "status": e.enabled})
def _parse_request(self, json):
params = json
params["frequency"] = string_to_timedelta(params.get("frequency", "1:00:00"))
params["ignore_tags"] = [
Tag.objects.get(name=name.strip())
for name in params["ignore_tags"].split(",")
if name.strip()
]
params["include_tags"] = [
Tag.objects.get(name=name.strip())
for name in params["include_tags"].split(",")
if name.strip()
]
params["exclude_tags"] = [
Tag.objects.get(name=name.strip())
for name in params["exclude_tags"].split(",")
if name.strip()
]
params["template"] = exports.ExportTemplate.objects.get(name=params["template"])
return params
| apache-2.0 | 2,652,748,986,496,160,000 | 32.174312 | 148 | 0.595409 | false |
igemsoftware2017/USTC-Software-2017 | tests/notices/test_send.py | 1 | 1024 | from rest_framework.test import APITestCase
from biohub.notices import tool
from biohub.accounts.models import User
class Test(APITestCase):
def setUp(self):
self.me = User.objects.create_test_user('me')
self.you = User.objects.create_test_user('you')
self.dispatcher = tool.Dispatcher('test')
def test_basic_send(self):
notice = self.dispatcher.send(
self.me,
'User {{user.username}} {{category}}')
self.assertEqual(notice.message, 'User %s test' % self.me.username)
self.assertEqual('test', notice.category)
def test_url(self):
notice = self.dispatcher.send(
self.me,
'{{"title"|url:user}}')
self.assertEqual('[[title]]((user))((%s))' % self.me.username, notice.message)
def test_group_send(self):
notices = self.dispatcher.group_send(
[self.me, self.you],
'{{user.username}}')
self.assertListEqual(['me', 'you'], [x.message for x in notices])
| gpl-3.0 | -2,783,810,677,243,379,700 | 29.117647 | 86 | 0.602539 | false |
Joni-Aaltonen/pebble-owner-info | .waf-1.7.0-9334f7e963bee5410f4fa28728feffdd/waflib/Tools/c_tests.py | 1 | 4186 | #! /usr/bin/env python
# encoding: utf-8
# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
from waflib import Task
from waflib.Configure import conf
from waflib.TaskGen import feature,before_method,after_method
import sys
LIB_CODE='''
#ifdef _MSC_VER
#define testEXPORT __declspec(dllexport)
#else
#define testEXPORT
#endif
testEXPORT int lib_func(void) { return 9; }
'''
MAIN_CODE='''
#ifdef _MSC_VER
#define testEXPORT __declspec(dllimport)
#else
#define testEXPORT
#endif
testEXPORT int lib_func(void);
int main(void) {return !(lib_func() == 9);}
'''
@feature('link_lib_test')
@before_method('process_source')
def link_lib_test_fun(self):
def write_test_file(task):
task.outputs[0].write(task.generator.code)
rpath=[]
if getattr(self,'add_rpath',False):
rpath=[self.bld.path.get_bld().abspath()]
mode=self.mode
m='%s %s'%(mode,mode)
ex=self.test_exec and'test_exec'or''
bld=self.bld
bld(rule=write_test_file,target='test.'+mode,code=LIB_CODE)
bld(rule=write_test_file,target='main.'+mode,code=MAIN_CODE)
bld(features='%sshlib'%m,source='test.'+mode,target='test')
bld(features='%sprogram %s'%(m,ex),source='main.'+mode,target='app',use='test',rpath=rpath)
@conf
def check_library(self,mode=None,test_exec=True):
if not mode:
mode='c'
if self.env.CXX:
mode='cxx'
self.check(compile_filename=[],features='link_lib_test',msg='Checking for libraries',mode=mode,test_exec=test_exec,)
INLINE_CODE='''
typedef int foo_t;
static %s foo_t static_foo () {return 0; }
%s foo_t foo () {
return 0;
}
'''
INLINE_VALUES=['inline','__inline__','__inline']
@conf
def check_inline(self,**kw):
self.start_msg('Checking for inline')
if not'define_name'in kw:
kw['define_name']='INLINE_MACRO'
if not'features'in kw:
if self.env.CXX:
kw['features']=['cxx']
else:
kw['features']=['c']
for x in INLINE_VALUES:
kw['fragment']=INLINE_CODE%(x,x)
try:
self.check(**kw)
except self.errors.ConfigurationError:
continue
else:
self.end_msg(x)
if x!='inline':
self.define('inline',x,quote=False)
return x
self.fatal('could not use inline functions')
LARGE_FRAGMENT='#include <unistd.h>\nint main() { return !(sizeof(off_t) >= 8); }\n'
@conf
def check_large_file(self,**kw):
if not'define_name'in kw:
kw['define_name']='HAVE_LARGEFILE'
if not'execute'in kw:
kw['execute']=True
if not'features'in kw:
if self.env.CXX:
kw['features']=['cxx','cxxprogram']
else:
kw['features']=['c','cprogram']
kw['fragment']=LARGE_FRAGMENT
kw['msg']='Checking for large file support'
ret=True
try:
if self.env.DEST_BINFMT!='pe':
ret=self.check(**kw)
except self.errors.ConfigurationError:
pass
else:
if ret:
return True
kw['msg']='Checking for -D_FILE_OFFSET_BITS=64'
kw['defines']=['_FILE_OFFSET_BITS=64']
try:
ret=self.check(**kw)
except self.errors.ConfigurationError:
pass
else:
self.define('_FILE_OFFSET_BITS',64)
return ret
self.fatal('There is no support for large files')
ENDIAN_FRAGMENT='''
short int ascii_mm[] = { 0x4249, 0x4765, 0x6E44, 0x6961, 0x6E53, 0x7953, 0 };
short int ascii_ii[] = { 0x694C, 0x5454, 0x656C, 0x6E45, 0x6944, 0x6E61, 0 };
int use_ascii (int i) {
return ascii_mm[i] + ascii_ii[i];
}
short int ebcdic_ii[] = { 0x89D3, 0xE3E3, 0x8593, 0x95C5, 0x89C4, 0x9581, 0 };
short int ebcdic_mm[] = { 0xC2C9, 0xC785, 0x95C4, 0x8981, 0x95E2, 0xA8E2, 0 };
int use_ebcdic (int i) {
return ebcdic_mm[i] + ebcdic_ii[i];
}
extern int foo;
'''
class grep_for_endianness(Task.Task):
color='PINK'
def run(self):
txt=self.inputs[0].read(flags='rb').decode('iso8859-1')
if txt.find('LiTTleEnDian')>-1:
self.generator.tmp.append('little')
elif txt.find('BIGenDianSyS')>-1:
self.generator.tmp.append('big')
else:
return-1
@feature('grep_for_endianness')
@after_method('process_source')
def grep_for_endianness_fun(self):
self.create_task('grep_for_endianness',self.compiled_tasks[0].outputs[0])
@conf
def check_endianness(self):
tmp=[]
def check_msg(self):
return tmp[0]
self.check(fragment=ENDIAN_FRAGMENT,features='c grep_for_endianness',msg="Checking for endianness",define='ENDIANNESS',tmp=tmp,okmsg=check_msg)
return tmp[0]
| gpl-2.0 | -6,602,674,087,827,309,000 | 27.868966 | 144 | 0.689202 | false |
Alexoner/mooc | coursera/nlpintro-001/Assignment2/solutionsA.py | 1 | 9462 | import math
import nltk
import time
import sys
# Constants to be used by you when you fill the functions
START_SYMBOL = '*'
STOP_SYMBOL = 'STOP'
MINUS_INFINITY_SENTENCE_LOG_PROB = -1000
log2 = lambda x: math.log(x, 2)
# TODO: IMPLEMENT THIS FUNCTION
# Calculates unigram, bigram, and trigram probabilities given a training corpus
# training_corpus: is a list of the sentences. Each sentence is a string with tokens separated by spaces, ending in a newline character.
# This function outputs three python dictionaries, where the keys are
# tuples expressing the ngram and the value is the log probability of that
# ngram
def calc_probabilities(training_corpus):
"""
this is docstring
"""
# unigram_tuples = []
# bigram_tuples = []
# trigram_tuples = []
unigram_count = {}
bigram_count = {}
trigram_count = {}
unigram_count_pnodes = {}
bigram_count_pnodes = {}
trigram_count_pnodes = {}
unigram_total = 0
bigram_total = 0
trigram_total = 0
print 'total {} sentences'.format(len(training_corpus))
for i in xrange(0, len(training_corpus)):
if i % 3000 == 0:
print 'processing ', i, 'th sentence...'
training_corpus[i] = START_SYMBOL + ' ' + training_corpus[i]
training_corpus[i] = training_corpus[i] + ' ' + STOP_SYMBOL
# training_corpus[i].replace('.',' ' + STOP_SYMBOL)
tokens = training_corpus[i].split()
unigram_tuples_i = list((token,) for token in tokens)
bigram_tuples_i = list(nltk.bigrams(tokens))
trigram_tuples_i = list(nltk.trigrams(tokens))
unigram_total += len(unigram_tuples_i)
bigram_total += len(bigram_tuples_i)
trigram_total += len(trigram_tuples_i)
for item in unigram_tuples_i:
if item in [(START_SYMBOL,)]:
continue
unigram_count.setdefault(item, 0)
unigram_count_pnodes.setdefault(item[0:-1], 0)
unigram_count[item] = unigram_count[item] + 1
unigram_count_pnodes[
item[0:-1]] = unigram_count_pnodes[item[0:-1]] + 1
for item in bigram_tuples_i:
bigram_count.setdefault(item, 0)
bigram_count_pnodes.setdefault(item[0:-1], 0)
bigram_count[item] = bigram_count[item] + 1
bigram_count_pnodes[
item[0:-1]] = bigram_count_pnodes[item[0:-1]] + 1
for item in trigram_tuples_i:
trigram_count.setdefault(item, 0)
trigram_count_pnodes.setdefault(item[0:-1], 0)
trigram_count[item] = trigram_count[item] + 1
trigram_count_pnodes[
item[0:-1]] = trigram_count_pnodes[item[0:-1]] + 1
unigram_p = {
item: math.log(
unigram_count[item],
2) -
math.log(
unigram_count_pnodes[
item[
0:-
1]],
2) for item in set(unigram_count)}
bigram_p = {
item: math.log(
bigram_count[item],
2) -
math.log(
bigram_count_pnodes[
item[
0:-
1]],
2) for item in set(bigram_count)}
trigram_p = {
item: math.log(
trigram_count[item],
2) -
math.log(
trigram_count_pnodes[
item[
0:-
1]],
2) for item in set(trigram_count)}
print "calc_probabilities finished!"
return unigram_p, bigram_p, trigram_p
# Prints the output for q1
# Each input is a python dictionary where keys are a tuple expressing the
# ngram, and the value is the log probability of that ngram
def q1_output(unigrams, bigrams, trigrams, filename):
# output probabilities
outfile = open(filename, 'w')
unigrams_keys = sorted(unigrams.keys())
for unigram in unigrams_keys:
outfile.write('UNIGRAM ' +
unigram[0] +
' ' +
str(unigrams[unigram]) +
'\n')
outfile.flush()
bigrams_keys = sorted(bigrams.keys())
for bigram in bigrams_keys:
outfile.write('BIGRAM ' +
bigram[0] +
' ' +
bigram[1] +
' ' +
str(bigrams[bigram]) +
'\n')
outfile.flush()
trigrams_keys = sorted(trigrams.keys())
for trigram in trigrams_keys:
outfile.write('TRIGRAM ' +
trigram[0] +
' ' +
trigram[1] +
' ' +
trigram[2] +
' ' +
str(trigrams[trigram]) +
'\n')
outfile.flush()
outfile.close()
# TODO: IMPLEMENT THIS FUNCTION
# Calculates scores (log probabilities) for every sentence
# ngram_p: python dictionary of probabilities of uni-, bi- and trigrams.
# n: size of the ngram you want to use to compute probabilities
# corpus: list of sentences to score. Each sentence is a string with tokens separated by spaces, ending in a newline character.
# This function must return a python list of scores, where the first
# element is the score of the first sentence, etc.
def score(ngram_p, n, corpus):
print "scoring corpus for ", n, "-grams"
scores = []
for i, sentence in enumerate(corpus):
ngram_tuples = None
score_i = 0
if i % 10000 == 0:
print 'scoring ', i, 'th sentence...'
tokens = sentence.split()
if n == 1:
ngram_tuples = list([(token,) for token in tokens])
elif n == 2:
ngram_tuples = list(nltk.bigrams(tokens))
elif n == 3:
ngram_tuples = list(nltk.trigrams(tokens))
try:
score_i = sum([ngram_p[gram] for gram in ngram_tuples
if gram not in [(START_SYMBOL,)]])
except KeyError as error:
score_i = MINUS_INFINITY_SENTENCE_LOG_PROB
print 'ngram_tuple ', gram, ' not in dict ', error.message
scores.append(score_i)
return scores
# Outputs a score to a file
# scores: list of scores
# filename: is the output file name
def score_output(scores, filename):
outfile = open(filename, 'w')
for score in scores:
outfile.write(str(score) + '\n')
outfile.close()
# TODO: IMPLEMENT THIS FUNCTION
# Calculates scores (log probabilities) for every sentence with a linearly interpolated model
# Each ngram argument is a python dictionary where the keys are tuples that express an ngram and the value is the log probability of that ngram
# Like score(), this function returns a python list of scores
# TODO: `EM` algorithm to find the optimal weights.
def linearscore(unigrams, bigrams, trigrams, corpus):
scores = []
weights = (1. / 3, 1. / 3, 1. / 3,)
for i, sentence in enumerate(corpus):
if i % 3000 == 0:
print 'linearscore ', i, 'th sentence...'
score_i = 0
tokens = sentence.split()
trigram_tuples = list(nltk.trigrams(tokens))
try:
for trigram in trigram_tuples:
score_i += log2(sum([weights[0] * 2 ** trigrams[trigram[0:]],
weights[1] * 2 ** bigrams[trigram[1:]],
weights[2] * 2 ** unigrams[trigram[2:]],
]))
except KeyError as e:
score_i = MINUS_INFINITY_SENTENCE_LOG_PROB
print i, 'th sentence', 'ngram ', trigram, ' not in dict', e.message
scores.append(score_i)
return scores
DATA_PATH = 'data/'
OUTPUT_PATH = 'output/'
# DO NOT MODIFY THE MAIN FUNCTION
def main():
# start timer
time.clock()
# get data
infile = open(DATA_PATH + 'Brown_train.txt', 'r')
corpus = infile.readlines()
infile.close()
# calculate ngram probabilities (question 1)
unigrams, bigrams, trigrams = calc_probabilities(corpus)
# question 1 output
q1_output(unigrams, bigrams, trigrams, OUTPUT_PATH + 'A1.txt')
# score sentences (question 2)
uniscores = score(unigrams, 1, corpus)
biscores = score(bigrams, 2, corpus)
triscores = score(trigrams, 3, corpus)
# question 2 output
score_output(uniscores, OUTPUT_PATH + 'A2.uni.txt')
score_output(biscores, OUTPUT_PATH + 'A2.bi.txt')
score_output(triscores, OUTPUT_PATH + 'A2.tri.txt')
# linear interpolation (question 3)
linearscores = linearscore(unigrams, bigrams, trigrams, corpus)
# question 3 output
score_output(linearscores, OUTPUT_PATH + 'A3.txt')
# open Sample1 and Sample2 (question 5)
infile = open(DATA_PATH + 'Sample1.txt', 'r')
sample1 = infile.readlines()
infile.close()
infile = open(DATA_PATH + 'Sample2.txt', 'r')
sample2 = infile.readlines()
infile.close()
# score the samples
sample1scores = linearscore(unigrams, bigrams, trigrams, sample1)
sample2scores = linearscore(unigrams, bigrams, trigrams, sample2)
# question 5 output
score_output(sample1scores, OUTPUT_PATH + 'Sample1_scored.txt')
score_output(sample2scores, OUTPUT_PATH + 'Sample2_scored.txt')
# print total time to run Part A
print("Part A time: " + str(time.clock()) + ' sec')
if __name__ == "__main__":
main()
| apache-2.0 | 5,338,700,109,794,599,000 | 32.792857 | 143 | 0.570915 | false |
mozilla/kitsune | kitsune/questions/tests/test_api.py | 1 | 28190 | import json
from datetime import datetime, timedelta
from unittest import mock
import actstream.actions
from actstream.models import Follow
from nose.tools import eq_, ok_, raises
from rest_framework.test import APIClient
from rest_framework.exceptions import APIException
from taggit.models import Tag
from kitsune.sumo.tests import TestCase
from kitsune.questions import api
from kitsune.questions.models import Question, Answer
from kitsune.questions.tests import (
tags_eq,
QuestionFactory,
AnswerFactory,
QuestionVoteFactory,
AnswerVoteFactory,
)
from kitsune.products.tests import ProductFactory, TopicFactory
from kitsune.sumo.urlresolvers import reverse
from kitsune.tags.tests import TagFactory
from kitsune.users.templatetags.jinja_helpers import profile_avatar
from kitsune.users.models import Profile
from kitsune.users.tests import UserFactory, add_permission
class TestQuestionSerializerDeserialization(TestCase):
def setUp(self):
self.user = UserFactory()
self.product = ProductFactory()
self.topic = TopicFactory(product=self.product)
self.request = mock.Mock()
self.request.user = self.user
self.context = {
"request": self.request,
}
self.data = {
"creator": self.user.profile,
"title": "How do I test programs?",
"content": "Help, I don't know what to do.",
"product": self.product.slug,
"topic": self.topic.slug,
}
def test_it_works(self):
serializer = api.QuestionSerializer(context=self.context, data=self.data)
serializer.is_valid(raise_exception=True)
def test_automatic_creator(self):
del self.data["creator"]
serializer = api.QuestionSerializer(context=self.context, data=self.data)
serializer.is_valid(raise_exception=True)
obj = serializer.save()
eq_(obj.creator, self.user)
def test_product_required(self):
del self.data["product"]
serializer = api.QuestionSerializer(context=self.context, data=self.data)
ok_(not serializer.is_valid())
eq_(
serializer.errors,
{
"product": ["This field is required."],
"topic": ["A product must be specified to select a topic."],
},
)
def test_topic_required(self):
del self.data["topic"]
serializer = api.QuestionSerializer(context=self.context, data=self.data)
ok_(not serializer.is_valid())
eq_(
serializer.errors,
{
"topic": ["This field is required."],
},
)
def test_topic_disambiguation(self):
# First make another product, and a colliding topic.
# It has the same slug, but a different product.
new_product = ProductFactory()
TopicFactory(product=new_product, slug=self.topic.slug)
serializer = api.QuestionSerializer(context=self.context, data=self.data)
serializer.is_valid(raise_exception=True)
obj = serializer.save()
eq_(obj.topic, self.topic)
def test_solution_is_readonly(self):
q = QuestionFactory()
a = AnswerFactory(question=q)
self.data["solution"] = a.id
serializer = api.QuestionSerializer(context=self.context, data=self.data, instance=q)
serializer.is_valid(raise_exception=True)
serializer.save()
eq_(q.solution, None)
class TestQuestionSerializerSerialization(TestCase):
def setUp(self):
self.asker = UserFactory()
self.helper1 = UserFactory()
self.helper2 = UserFactory()
self.question = QuestionFactory(creator=self.asker)
def _names(self, *users):
return sorted(
(
{
"username": u.username,
"display_name": Profile.objects.get(user=u).name,
"avatar": profile_avatar(u),
}
for u in users
),
key=lambda d: d["username"],
)
def _answer(self, user):
return AnswerFactory(question=self.question, creator=user)
def test_no_votes(self):
serializer = api.QuestionSerializer(instance=self.question)
eq_(serializer.data["num_votes"], 0)
def test_with_votes(self):
QuestionVoteFactory(question=self.question)
QuestionVoteFactory(question=self.question)
QuestionVoteFactory()
serializer = api.QuestionSerializer(instance=self.question)
eq_(serializer.data["num_votes"], 2)
def test_just_asker(self):
serializer = api.QuestionSerializer(instance=self.question)
eq_(serializer.data["involved"], self._names(self.asker))
def test_one_answer(self):
self._answer(self.helper1)
serializer = api.QuestionSerializer(instance=self.question)
eq_(
sorted(serializer.data["involved"], key=lambda d: d["username"]),
self._names(self.asker, self.helper1),
)
def test_asker_and_response(self):
self._answer(self.helper1)
self._answer(self.asker)
serializer = api.QuestionSerializer(instance=self.question)
eq_(
sorted(serializer.data["involved"], key=lambda d: d["username"]),
self._names(self.asker, self.helper1),
)
def test_asker_and_two_answers(self):
self._answer(self.helper1)
self._answer(self.asker)
self._answer(self.helper2)
serializer = api.QuestionSerializer(instance=self.question)
eq_(
sorted(serializer.data["involved"], key=lambda d: d["username"]),
self._names(self.asker, self.helper1, self.helper2),
)
def test_solution_is_id(self):
a = self._answer(self.helper1)
self.question.solution = a
self.question.save()
serializer = api.QuestionSerializer(instance=self.question)
eq_(serializer.data["solution"], a.id)
def test_creator_is_object(self):
serializer = api.QuestionSerializer(instance=self.question)
eq_(
serializer.data["creator"],
{
"username": self.question.creator.username,
"display_name": Profile.objects.get(user=self.question.creator).display_name,
"avatar": profile_avatar(self.question.creator),
},
)
def test_with_tags(self):
self.question.tags.add("tag1")
self.question.tags.add("tag2")
serializer = api.QuestionSerializer(instance=self.question)
eq_(
serializer.data["tags"],
[
{"name": "tag1", "slug": "tag1"},
{"name": "tag2", "slug": "tag2"},
],
)
class TestQuestionViewSet(TestCase):
def setUp(self):
self.client = APIClient()
def test_create(self):
u = UserFactory()
p = ProductFactory()
t = TopicFactory(product=p)
self.client.force_authenticate(user=u)
data = {
"title": "How do I start Firefox?",
"content": "Seriously, what do I do?",
"product": p.slug,
"topic": t.slug,
}
eq_(Question.objects.count(), 0)
res = self.client.post(reverse("question-list"), data)
eq_(res.status_code, 201)
eq_(Question.objects.count(), 1)
q = Question.objects.all()[0]
eq_(q.title, data["title"])
eq_(q.content, data["content"])
eq_(q.content_parsed, res.data["content"])
def test_delete_permissions(self):
u1 = UserFactory()
u2 = UserFactory()
q = QuestionFactory(creator=u1)
# Anonymous user can't delete
self.client.force_authenticate(user=None)
res = self.client.delete(reverse("question-detail", args=[q.id]))
eq_(res.status_code, 401) # Unauthorized
# Non-owner can't delete
self.client.force_authenticate(user=u2)
res = self.client.delete(reverse("question-detail", args=[q.id]))
eq_(res.status_code, 403) # Forbidden
# Owner can delete
self.client.force_authenticate(user=u1)
res = self.client.delete(reverse("question-detail", args=[q.id]))
eq_(res.status_code, 204) # No content
def test_solve(self):
q = QuestionFactory()
a = AnswerFactory(question=q)
self.client.force_authenticate(user=q.creator)
res = self.client.post(reverse("question-solve", args=[q.id]), data={"answer": a.id})
eq_(res.status_code, 204)
q = Question.objects.get(id=q.id)
eq_(q.solution, a)
def test_filter_is_taken_true(self):
q1 = QuestionFactory()
q2 = QuestionFactory()
q2.take(q1.creator)
url = reverse("question-list") + "?is_taken=1"
res = self.client.get(url)
eq_(res.status_code, 200)
eq_(res.data["count"], 1)
eq_(res.data["results"][0]["id"], q2.id)
def test_filter_is_taken_false(self):
q1 = QuestionFactory()
q2 = QuestionFactory()
q2.take(q1.creator)
url = reverse("question-list") + "?is_taken=0"
res = self.client.get(url)
eq_(res.status_code, 200)
eq_(res.data["count"], 1)
eq_(res.data["results"][0]["id"], q1.id)
def test_filter_is_taken_expired(self):
q = QuestionFactory()
# "take" the question, but with an expired timer.
q.taken_by = UserFactory()
q.taken_until = datetime.now() - timedelta(seconds=60)
url = reverse("question-list") + "?is_taken=1"
res = self.client.get(url)
eq_(res.status_code, 200)
eq_(res.data["count"], 0)
def test_filter_taken_by_username(self):
q1 = QuestionFactory()
q2 = QuestionFactory()
q2.take(q1.creator)
url = reverse("question-list") + "?taken_by=" + q1.creator.username
res = self.client.get(url)
eq_(res.status_code, 200)
eq_(res.data["count"], 1)
eq_(res.data["results"][0]["id"], q2.id)
def test_helpful(self):
q = QuestionFactory()
u = UserFactory()
self.client.force_authenticate(user=u)
res = self.client.post(reverse("question-helpful", args=[q.id]))
eq_(res.status_code, 200)
eq_(res.data, {"num_votes": 1})
eq_(Question.objects.get(id=q.id).num_votes, 1)
def test_helpful_double_vote(self):
q = QuestionFactory()
u = UserFactory()
QuestionVoteFactory(question=q, creator=u)
self.client.force_authenticate(user=u)
res = self.client.post(reverse("question-helpful", args=[q.id]))
eq_(res.status_code, 409)
# It's 1, not 0, because one was created above. The failure cause is
# if the number of votes is 2, one from above and one from the api call.
eq_(Question.objects.get(id=q.id).num_votes, 1)
def test_helpful_question_not_editable(self):
q = QuestionFactory(is_locked=True)
u = UserFactory()
self.client.force_authenticate(user=u)
res = self.client.post(reverse("question-helpful", args=[q.id]))
eq_(res.status_code, 403)
eq_(Question.objects.get(id=q.id).num_votes, 0)
def test_ordering(self):
q1 = QuestionFactory()
q2 = QuestionFactory()
res = self.client.get(reverse("question-list"))
eq_(res.data["results"][0]["id"], q2.id)
eq_(res.data["results"][1]["id"], q1.id)
res = self.client.get(reverse("question-list") + "?ordering=id")
eq_(res.data["results"][0]["id"], q1.id)
eq_(res.data["results"][1]["id"], q2.id)
res = self.client.get(reverse("question-list") + "?ordering=-id")
eq_(res.data["results"][0]["id"], q2.id)
eq_(res.data["results"][1]["id"], q1.id)
def test_filter_product_with_slug(self):
p1 = ProductFactory()
p2 = ProductFactory()
q1 = QuestionFactory(product=p1)
QuestionFactory(product=p2)
querystring = "?product={0}".format(p1.slug)
res = self.client.get(reverse("question-list") + querystring)
eq_(len(res.data["results"]), 1)
eq_(res.data["results"][0]["id"], q1.id)
def test_filter_creator_with_username(self):
q1 = QuestionFactory()
QuestionFactory()
querystring = "?creator={0}".format(q1.creator.username)
res = self.client.get(reverse("question-list") + querystring)
eq_(res.status_code, 200)
eq_(len(res.data["results"]), 1)
eq_(res.data["results"][0]["id"], q1.id)
def test_filter_involved(self):
q1 = QuestionFactory()
a1 = AnswerFactory(question=q1)
q2 = QuestionFactory(creator=a1.creator)
querystring = "?involved={0}".format(q1.creator.username)
res = self.client.get(reverse("question-list") + querystring)
eq_(res.status_code, 200)
eq_(len(res.data["results"]), 1)
eq_(res.data["results"][0]["id"], q1.id)
querystring = "?involved={0}".format(q2.creator.username)
res = self.client.get(reverse("question-list") + querystring)
eq_(res.status_code, 200)
eq_(len(res.data["results"]), 2)
# The API has a default sort, so ordering will be consistent.
eq_(res.data["results"][0]["id"], q2.id)
eq_(res.data["results"][1]["id"], q1.id)
def test_is_taken(self):
q = QuestionFactory()
u = UserFactory()
q.take(u)
url = reverse("question-detail", args=[q.id])
res = self.client.get(url)
eq_(res.status_code, 200)
eq_(res.data["taken_by"]["username"], u.username)
def test_take(self):
q = QuestionFactory()
u = UserFactory()
self.client.force_authenticate(user=u)
res = self.client.post(reverse("question-take", args=[q.id]))
eq_(res.status_code, 204)
q = Question.objects.get(id=q.id)
eq_(q.taken_by, u)
def test_take_by_owner(self):
q = QuestionFactory()
self.client.force_authenticate(user=q.creator)
res = self.client.post(reverse("question-take", args=[q.id]))
eq_(res.status_code, 400)
q = Question.objects.get(id=q.id)
eq_(q.taken_by, None)
def test_take_conflict(self):
u1 = UserFactory()
u2 = UserFactory()
taken_until = datetime.now() + timedelta(seconds=30)
q = QuestionFactory(taken_until=taken_until, taken_by=u1)
self.client.force_authenticate(user=u2)
res = self.client.post(reverse("question-take", args=[q.id]))
eq_(res.status_code, 409)
q = Question.objects.get(id=q.id)
eq_(q.taken_by, u1)
def test_follow(self):
q = QuestionFactory()
u = UserFactory()
self.client.force_authenticate(user=u)
res = self.client.post(reverse("question-follow", args=[q.id]))
eq_(res.status_code, 204)
f = Follow.objects.get(user=u)
eq_(f.follow_object, q)
eq_(f.actor_only, False)
def test_unfollow(self):
q = QuestionFactory()
u = UserFactory()
actstream.actions.follow(u, q, actor_only=False)
eq_(Follow.objects.filter(user=u).count(), 1) # pre-condition
self.client.force_authenticate(user=u)
res = self.client.post(reverse("question-unfollow", args=[q.id]))
eq_(res.status_code, 204)
eq_(Follow.objects.filter(user=u).count(), 0)
def test_add_tags(self):
q = QuestionFactory()
eq_(0, q.tags.count())
u = UserFactory()
add_permission(u, Tag, "add_tag")
self.client.force_authenticate(user=u)
res = self.client.post(
reverse("question-add-tags", args=[q.id]),
content_type="application/json",
data=json.dumps({"tags": ["test", "more", "tags"]}),
)
eq_(res.status_code, 200)
eq_(3, q.tags.count())
def test_remove_tags(self):
q = QuestionFactory()
q.tags.add("test")
q.tags.add("more")
q.tags.add("tags")
eq_(3, q.tags.count())
u = UserFactory()
self.client.force_authenticate(user=u)
res = self.client.post(
reverse("question-remove-tags", args=[q.id]),
content_type="application/json",
data=json.dumps({"tags": ["more", "tags"]}),
)
eq_(res.status_code, 204)
eq_(1, q.tags.count())
def test_bleaching(self):
"""Tests whether question content is bleached."""
q = QuestionFactory(content="<unbleached>Cupcakes are the best</unbleached>")
url = reverse("question-detail", args=[q.id])
res = self.client.get(url)
eq_(res.status_code, 200)
assert "<unbleached>" not in res.data["content"]
def test_auto_tagging(self):
"""Test that questions created via the API are auto-tagged."""
TagFactory(name="desktop")
q = QuestionFactory()
self.client.force_authenticate(user=q.creator)
tags_eq(q, [])
res = self.client.post(
reverse("question-set-metadata", args=[q.id]),
content_type="application/json",
data=json.dumps({"name": "product", "value": "desktop"}),
)
eq_(res.status_code, 200)
tags_eq(q, [])
res = self.client.post(
reverse("question-auto-tag", args=[q.id]), content_type="application/json"
)
eq_(res.status_code, 204)
tags_eq(q, ["desktop"])
class TestAnswerSerializerDeserialization(TestCase):
def test_no_votes(self):
a = AnswerFactory()
serializer = api.AnswerSerializer(instance=a)
eq_(serializer.data["num_helpful_votes"], 0)
eq_(serializer.data["num_unhelpful_votes"], 0)
def test_with_votes(self):
a = AnswerFactory()
AnswerVoteFactory(answer=a, helpful=True)
AnswerVoteFactory(answer=a, helpful=True)
AnswerVoteFactory(answer=a, helpful=False)
AnswerVoteFactory()
serializer = api.AnswerSerializer(instance=a)
eq_(serializer.data["num_helpful_votes"], 2)
eq_(serializer.data["num_unhelpful_votes"], 1)
class TestAnswerViewSet(TestCase):
def setUp(self):
self.client = APIClient()
def test_create(self):
q = QuestionFactory()
u = UserFactory()
self.client.force_authenticate(user=u)
data = {
"question": q.id,
"content": "You just need to click the fox.",
}
eq_(Answer.objects.count(), 0)
res = self.client.post(reverse("answer-list"), data)
eq_(res.status_code, 201)
eq_(Answer.objects.count(), 1)
a = Answer.objects.all()[0]
eq_(a.content, data["content"])
eq_(a.content_parsed, res.data["content"])
eq_(a.question, q)
def test_delete_permissions(self):
u1 = UserFactory()
u2 = UserFactory()
a = AnswerFactory(creator=u1)
# Anonymous user can't delete
self.client.force_authenticate(user=None)
res = self.client.delete(reverse("answer-detail", args=[a.id]))
eq_(res.status_code, 401) # Unauthorized
# Non-owner can't deletea
self.client.force_authenticate(user=u2)
res = self.client.delete(reverse("answer-detail", args=[a.id]))
eq_(res.status_code, 403) # Forbidden
# Owner can delete
self.client.force_authenticate(user=u1)
res = self.client.delete(reverse("answer-detail", args=[a.id]))
eq_(res.status_code, 204) # No content
def test_ordering(self):
a1 = AnswerFactory()
a2 = AnswerFactory()
res = self.client.get(reverse("answer-list"))
eq_(res.data["results"][0]["id"], a2.id)
eq_(res.data["results"][1]["id"], a1.id)
res = self.client.get(reverse("answer-list") + "?ordering=id")
eq_(res.data["results"][0]["id"], a1.id)
eq_(res.data["results"][1]["id"], a2.id)
res = self.client.get(reverse("answer-list") + "?ordering=-id")
eq_(res.data["results"][0]["id"], a2.id)
eq_(res.data["results"][1]["id"], a1.id)
def test_helpful(self):
a = AnswerFactory()
u = UserFactory()
self.client.force_authenticate(user=u)
res = self.client.post(reverse("answer-helpful", args=[a.id]))
eq_(res.status_code, 200)
eq_(res.data, {"num_helpful_votes": 1, "num_unhelpful_votes": 0})
eq_(Answer.objects.get(id=a.id).num_votes, 1)
def test_helpful_double_vote(self):
a = AnswerFactory()
u = UserFactory()
AnswerVoteFactory(answer=a, creator=u)
self.client.force_authenticate(user=u)
res = self.client.post(reverse("answer-helpful", args=[a.id]))
eq_(res.status_code, 409)
# It's 1, not 0, because one was created above. The failure cause is
# if the number of votes is 2, one from above and one from the api call.
eq_(Answer.objects.get(id=a.id).num_votes, 1)
def test_helpful_answer_not_editable(self):
q = QuestionFactory(is_locked=True)
a = AnswerFactory(question=q)
u = UserFactory()
self.client.force_authenticate(user=u)
res = self.client.post(reverse("answer-helpful", args=[a.id]))
eq_(res.status_code, 403)
eq_(Answer.objects.get(id=a.id).num_votes, 0)
def test_follow(self):
a = AnswerFactory()
u = UserFactory()
self.client.force_authenticate(user=u)
eq_(Follow.objects.filter(user=u).count(), 0) # pre-condition
res = self.client.post(reverse("answer-follow", args=[a.id]))
eq_(res.status_code, 204)
f = Follow.objects.get(user=u)
eq_(f.follow_object, a)
eq_(f.actor_only, False)
def test_unfollow(self):
a = AnswerFactory()
u = UserFactory()
actstream.actions.follow(u, a, actor_only=False)
eq_(Follow.objects.filter(user=u).count(), 1) # pre-condition
self.client.force_authenticate(user=u)
res = self.client.post(reverse("answer-unfollow", args=[a.id]))
eq_(res.status_code, 204)
eq_(Follow.objects.filter(user=u).count(), 0)
def test_bleaching(self):
"""Tests whether answer content is bleached."""
a = AnswerFactory(content="<unbleached>Cupcakes are the best</unbleached>")
url = reverse("answer-detail", args=[a.id])
res = self.client.get(url)
eq_(res.status_code, 200)
assert "<unbleached>" not in res.data["content"]
class TestQuestionFilter(TestCase):
def setUp(self):
self.filter_instance = api.QuestionFilter()
self.queryset = Question.objects.all()
def filter(self, filter_data):
return self.filter_instance.filter_metadata(
self.queryset, "metadata", json.dumps(filter_data)
)
def test_filter_involved(self):
q1 = QuestionFactory()
a1 = AnswerFactory(question=q1)
q2 = QuestionFactory(creator=a1.creator)
qs = self.filter_instance.filter_involved(
self.queryset, "filter_involved", q1.creator.username
)
eq_(list(qs), [q1])
qs = self.filter_instance.filter_involved(
self.queryset, "filter_involved", q2.creator.username
)
# The filter does not have a strong order.
qs = sorted(qs, key=lambda q: q.id)
eq_(qs, [q1, q2])
def test_filter_is_solved(self):
q1 = QuestionFactory()
a1 = AnswerFactory(question=q1)
q1.solution = a1
q1.save()
q2 = QuestionFactory()
qs = self.filter_instance.filter_is_solved(self.queryset, "is_solved", True)
eq_(list(qs), [q1])
qs = self.filter_instance.filter_is_solved(self.queryset, "is_solved", False)
eq_(list(qs), [q2])
def test_filter_solved_by(self):
q1 = QuestionFactory()
a1 = AnswerFactory(question=q1)
q1.solution = a1
q1.save()
q2 = QuestionFactory()
AnswerFactory(question=q2, creator=a1.creator)
q3 = QuestionFactory()
a3 = AnswerFactory(question=q3)
q3.solution = a3
q3.save()
qs = self.filter_instance.filter_solved_by(self.queryset, "solved_by", a1.creator.username)
eq_(list(qs), [q1])
qs = self.filter_instance.filter_solved_by(self.queryset, "solved_by", a3.creator.username)
eq_(list(qs), [q3])
@raises(APIException)
def test_metadata_not_json(self):
self.filter_instance.filter_metadata(self.queryset, "metadata", "not json")
@raises(APIException)
def test_metadata_bad_json(self):
self.filter_instance.filter_metadata(self.queryset, "metadata", "not json")
def test_single_filter_match(self):
q1 = QuestionFactory(metadata={"os": "Linux"})
QuestionFactory(metadata={"os": "OSX"})
res = self.filter({"os": "Linux"})
eq_(list(res), [q1])
def test_single_filter_no_match(self):
QuestionFactory(metadata={"os": "Linux"})
QuestionFactory(metadata={"os": "OSX"})
res = self.filter({"os": "Windows 8"})
eq_(list(res), [])
def test_multi_filter_is_and(self):
q1 = QuestionFactory(metadata={"os": "Linux", "category": "troubleshooting"})
QuestionFactory(metadata={"os": "OSX", "category": "troubleshooting"})
res = self.filter({"os": "Linux", "category": "troubleshooting"})
eq_(list(res), [q1])
def test_list_value_is_or(self):
q1 = QuestionFactory(metadata={"os": "Linux"})
q2 = QuestionFactory(metadata={"os": "OSX"})
QuestionFactory(metadata={"os": "Windows 7"})
res = self.filter({"os": ["Linux", "OSX"]})
eq_(sorted(res, key=lambda q: q.id), [q1, q2])
def test_none_value_is_missing(self):
q1 = QuestionFactory(metadata={})
QuestionFactory(metadata={"os": "Linux"})
res = self.filter({"os": None})
eq_(list(res), [q1])
def test_list_value_with_none(self):
q1 = QuestionFactory(metadata={"os": "Linux"})
q2 = QuestionFactory(metadata={})
QuestionFactory(metadata={"os": "Windows 7"})
res = self.filter({"os": ["Linux", None]})
eq_(sorted(res, key=lambda q: q.id), [q1, q2])
def test_is_taken(self):
u = UserFactory()
taken_until = datetime.now() + timedelta(seconds=30)
q = QuestionFactory(taken_by=u, taken_until=taken_until)
QuestionFactory()
res = self.filter_instance.filter_is_taken(self.queryset, "is_taken", True)
eq_(list(res), [q])
def test_is_not_taken(self):
u = UserFactory()
taken_until = datetime.now() + timedelta(seconds=30)
QuestionFactory(taken_by=u, taken_until=taken_until)
q = QuestionFactory()
res = self.filter_instance.filter_is_taken(self.queryset, "is_taken", False)
eq_(list(res), [q])
def test_is_taken_expired(self):
u = UserFactory()
taken_until = datetime.now() - timedelta(seconds=30)
QuestionFactory(taken_by=u, taken_until=taken_until)
res = self.filter_instance.filter_is_taken(self.queryset, "is_taken", True)
eq_(list(res), [])
def test_is_not_taken_expired(self):
u = UserFactory()
taken_until = datetime.now() - timedelta(seconds=30)
q = QuestionFactory(taken_by=u, taken_until=taken_until)
res = self.filter_instance.filter_is_taken(self.queryset, "is_taken", False)
eq_(list(res), [q])
def test_it_works_with_users_who_have_gotten_first_contrib_emails(self):
# This flag caused a regression, tracked in bug 1163855.
# The error was that the help text on the field was a str instead of a
# unicode. Yes, really, that matters apparently.
u = UserFactory(profile__first_answer_email_sent=True)
QuestionFactory(creator=u)
url = reverse("question-list")
res = self.client.get(url)
eq_(res.status_code, 200)
| bsd-3-clause | 2,663,527,402,003,844,000 | 34.86514 | 99 | 0.593083 | false |
soumyanishan/azure-linux-extensions | VMAccess/vmaccess.py | 1 | 18922 | #!/usr/bin/env python
#
# VMAccess extension
#
# Copyright 2014 Microsoft Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import platform
import re
import shutil
import sys
import tempfile
import time
import traceback
import Utils.HandlerUtil as Util
from waagentloader import load_waagent
waagent = load_waagent()
# Define global variables
ExtensionShortName = 'VMAccess'
BeginCertificateTag = '-----BEGIN CERTIFICATE-----'
EndCertificateTag = '-----END CERTIFICATE-----'
OutputSplitter = ';'
SshdConfigPath = '/etc/ssh/sshd_config'
def main():
waagent.LoggerInit('/var/log/waagent.log', '/dev/stdout')
waagent.Log("%s started to handle." % (ExtensionShortName))
waagent.MyDistro = waagent.GetMyDistro()
try:
for a in sys.argv[1:]:
if re.match("^([-/]*)(disable)", a):
disable()
elif re.match("^([-/]*)(uninstall)", a):
uninstall()
elif re.match("^([-/]*)(install)", a):
install()
elif re.match("^([-/]*)(enable)", a):
enable()
elif re.match("^([-/]*)(update)", a):
update()
except Exception as e:
err_msg = "Failed with error: {0}, {1}".format(e, traceback.format_exc())
waagent.Error(err_msg)
def install():
hutil = Util.HandlerUtility(waagent.Log, waagent.Error)
hutil.do_parse_context('Uninstall')
hutil.do_exit(0, 'Install', 'success', '0', 'Install Succeeded')
def enable():
hutil = Util.HandlerUtility(waagent.Log, waagent.Error)
hutil.do_parse_context('Enable')
try:
_forcibly_reset_chap(hutil)
reset_ssh = None
remove_user = None
protect_settings = hutil.get_protected_settings()
if protect_settings:
reset_ssh = protect_settings.get('reset_ssh')
remove_user = protect_settings.get('remove_user')
if remove_user and _is_sshd_config_modified(protect_settings):
hutil.error("Cannot reset sshd_config and remove a user in one operation.")
waagent.AddExtensionEvent(name=hutil.get_name(),
op=waagent.WALAEventOperation.Enable,
isSuccess=False,
message="(03002)Argument error, conflicting operations")
hutil.do_exit(1, 'Enable', 'error', '0', 'Enable failed.')
# check port each time the VM boots up
if reset_ssh:
_open_ssh_port()
hutil.log("Succeeded in check and open ssh port.")
hutil.exit_if_enabled()
if _is_sshd_config_modified(protect_settings):
_backup_sshd_config(SshdConfigPath)
if reset_ssh:
waagent.AddExtensionEvent(name=hutil.get_name(), op="scenario", isSuccess=True, message="reset-ssh")
_reset_sshd_config(SshdConfigPath)
hutil.log("Succeeded in reset sshd_config.")
if remove_user:
waagent.AddExtensionEvent(name=hutil.get_name(), op="scenario", isSuccess=True, message="remove-user")
_remove_user_account(remove_user, hutil)
_set_user_account_pub_key(protect_settings, hutil)
if _is_sshd_config_modified(protect_settings):
waagent.MyDistro.restartSshService()
check_and_repair_disk(hutil)
hutil.do_exit(0, 'Enable', 'success', '0', 'Enable succeeded.')
except Exception as e:
hutil.error(("Failed to enable the extension with error: {0}, "
"stack trace: {1}").format(str(e), traceback.format_exc()))
hutil.do_exit(1, 'Enable', 'error', '0', 'Enable failed.')
def _forcibly_reset_chap(hutil):
name = "ChallengeResponseAuthentication"
config = waagent.GetFileContents(SshdConfigPath).split("\n")
for i in range(0, len(config)):
if config[i].startswith(name) and "no" in config[i].lower():
waagent.AddExtensionEvent(name=hutil.get_name(), op="sshd", isSuccess=True, message="ChallengeResponseAuthentication no")
return
waagent.AddExtensionEvent(name=hutil.get_name(), op="sshd", isSuccess=True, message="ChallengeResponseAuthentication yes")
_backup_sshd_config(SshdConfigPath)
_set_sshd_config(config, name, "no")
waagent.ReplaceFileContentsAtomic(SshdConfigPath, "\n".join(config))
waagent.MyDistro.restartSshService()
def _is_sshd_config_modified(protected_settings):
result = protected_settings.get('reset_ssh') or protected_settings.get('password')
return result is not None
def uninstall():
hutil = Util.HandlerUtility(waagent.Log, waagent.Error)
hutil.do_parse_context('Uninstall')
hutil.do_exit(0, 'Uninstall', 'success', '0', 'Uninstall succeeded')
def disable():
hutil = Util.HandlerUtility(waagent.Log, waagent.Error)
hutil.do_parse_context('Disable')
hutil.do_exit(0, 'Disable', 'success', '0', 'Disable Succeeded')
def update():
hutil = Util.HandlerUtility(waagent.Log, waagent.Error)
hutil.do_parse_context('Update')
hutil.do_exit(0, 'Update', 'success', '0', 'Update Succeeded')
def _remove_user_account(user_name, hutil):
hutil.log("Removing user account")
try:
sudoers = _get_other_sudoers(user_name)
waagent.MyDistro.DeleteAccount(user_name)
_save_other_sudoers(sudoers)
except Exception as e:
waagent.AddExtensionEvent(name=hutil.get_name(),
op=waagent.WALAEventOperation.Enable,
isSuccess=False,
message="(02102)Failed to remove user.")
raise Exception("Failed to remove user {0}".format(e))
waagent.AddExtensionEvent(name=hutil.get_name(),
op=waagent.WALAEventOperation.Enable,
isSuccess=True,
message="Successfully removed user")
def _set_user_account_pub_key(protect_settings, hutil):
ovf_xml = waagent.GetFileContents('/var/lib/waagent/ovf-env.xml')
ovf_env = waagent.OvfEnv().Parse(ovf_xml)
# user name must be provided if set ssh key or password
if not protect_settings or not protect_settings.has_key('username'):
return
user_name = protect_settings['username']
user_pass = protect_settings.get('password')
cert_txt = protect_settings.get('ssh_key')
expiration = protect_settings.get('expiration')
no_convert = False
if not user_pass and not cert_txt and not ovf_env.SshPublicKeys:
raise Exception("No password or ssh_key is specified.")
if user_pass is not None and len(user_pass) == 0:
user_pass = None
hutil.log("empty passwords are not allowed, ignoring password reset")
# Reset user account and password, password could be empty
sudoers = _get_other_sudoers(user_name)
error_string = waagent.MyDistro.CreateAccount(
user_name, user_pass, expiration, None)
_save_other_sudoers(sudoers)
if error_string is not None:
err_msg = "Failed to create the account or set the password"
waagent.AddExtensionEvent(name=hutil.get_name(),
op=waagent.WALAEventOperation.Enable,
isSuccess=False,
message="(02101)" + err_msg)
raise Exception(err_msg + " with " + error_string)
hutil.log("Succeeded in create the account or set the password.")
# Allow password authentication if user_pass is provided
if user_pass is not None:
waagent.AddExtensionEvent(name=hutil.get_name(), op="scenario", isSuccess=True, message="create-user-with-password")
_allow_password_auth()
# Reset ssh key with the new public key passed in or reuse old public key.
if cert_txt or len(ovf_env.SshPublicKeys) > 0:
if cert_txt and cert_txt.strip().lower().startswith("ssh-rsa"):
no_convert = True
try:
pub_path = os.path.join('/home/', user_name, '.ssh',
'authorized_keys')
ovf_env.UserName = user_name
if no_convert:
if cert_txt:
pub_path = ovf_env.PrepareDir(pub_path)
final_cert_txt = cert_txt
if(not cert_txt.endswith("\n")):
final_cert_txt = final_cert_txt+"\n"
waagent.AppendFileContents(pub_path, final_cert_txt)
waagent.MyDistro.setSelinuxContext(pub_path,
'unconfined_u:object_r:ssh_home_t:s0')
waagent.ChangeOwner(pub_path, user_name)
waagent.AddExtensionEvent(name=hutil.get_name(), op="scenario", isSuccess=True, message="create-user")
hutil.log("Succeeded in resetting ssh_key.")
else:
err_msg = "Failed to reset ssh key because the cert content is empty."
waagent.AddExtensionEvent(name=hutil.get_name(),
op=waagent.WALAEventOperation.Enable,
isSuccess=False,
message="(02100)"+err_msg)
else:
if cert_txt:
_save_cert_str_as_file(cert_txt, 'temp.crt')
else:
for pkey in ovf_env.SshPublicKeys:
if pkey[1]:
shutil.copy(
os.path.join(waagent.LibDir, pkey[0] + '.crt'),
os.path.join(os.getcwd(), 'temp.crt'))
break
pub_path = ovf_env.PrepareDir(pub_path)
retcode = waagent.Run(waagent.Openssl + " x509 -in temp.crt -noout -pubkey > temp.pub")
if retcode > 0:
raise Exception("Failed to generate public key file.")
waagent.MyDistro.sshDeployPublicKey('temp.pub', pub_path)
waagent.MyDistro.setSelinuxContext(pub_path,
'unconfined_u:object_r:ssh_home_t:s0')
waagent.ChangeOwner(pub_path, user_name)
os.remove('temp.pub')
os.remove('temp.crt')
waagent.AddExtensionEvent(name=hutil.get_name(), op="scenario", isSuccess=True, message="create-user")
hutil.log("Succeeded in resetting ssh_key.")
except Exception as e:
hutil.log(str(e))
waagent.AddExtensionEvent(name=hutil.get_name(),
op=waagent.WALAEventOperation.Enable,
isSuccess=False,
message="(02100)Failed to reset ssh key.")
def _get_other_sudoers(userName):
sudoersFile = '/etc/sudoers.d/waagent'
if not os.path.isfile(sudoersFile):
return None
sudoers = waagent.GetFileContents(sudoersFile).split("\n")
pattern = '^{0}\s'.format(userName)
sudoers = filter(lambda x : re.match(pattern, x) is None, sudoers)
return sudoers
def _save_other_sudoers(sudoers):
sudoersFile = '/etc/sudoers.d/waagent'
if sudoers is None:
return
waagent.AppendFileContents(sudoersFile, "\n".join(sudoers))
os.chmod("/etc/sudoers.d/waagent", 0o440)
def _allow_password_auth():
config = waagent.GetFileContents(SshdConfigPath).split("\n")
_set_sshd_config(config, "PasswordAuthentication", "yes")
waagent.ReplaceFileContentsAtomic(SshdConfigPath, "\n".join(config))
def _set_sshd_config(config, name, val):
notfound = True
for i in range(0, len(config)):
if config[i].startswith(name):
config[i] = "{0} {1}".format(name, val)
notfound = False
elif config[i].startswith("Match"):
# Match block must be put in the end of sshd config
break
if notfound:
config.insert(i, "{0} {1}".format(name, val))
return config
def _reset_sshd_config(sshd_file_path):
distro = platform.dist()
distro_name = distro[0]
version = distro[1]
config_file_path = os.path.join(os.getcwd(), 'resources', '%s_%s' % (distro_name, version))
if not(os.path.exists(config_file_path)):
config_file_path = os.path.join(os.getcwd(), 'resources', '%s_%s' % (distro_name, 'default'))
if not(os.path.exists(config_file_path)):
config_file_path = os.path.join(os.getcwd(), 'resources', 'default')
if distro_name == "CoreOS":
# Parse sshd port from config_file_path
sshd_port = 22
regex = re.compile(r"^Port\s+(\d+)", re.VERBOSE)
with open(config_file_path) as f:
for line in f:
match = regex.match(line)
if match:
sshd_port = match.group(1)
break
# Prepare cloud init config for coreos-cloudinit
f = tempfile.NamedTemporaryFile(delete=False)
f.close()
cfg_tempfile = f.name
cfg_content = "#cloud-config\n\n"
# Overwrite /etc/ssh/sshd_config
cfg_content += "write_files:\n"
cfg_content += " - path: {0}\n".format(sshd_file_path)
cfg_content += " permissions: 0600\n"
cfg_content += " owner: root:root\n"
cfg_content += " content: |\n"
for line in waagent.GetFileContents(config_file_path).split('\n'):
cfg_content += " {0}\n".format(line)
# Change the sshd port in /etc/systemd/system/sshd.socket
cfg_content += "\ncoreos:\n"
cfg_content += " units:\n"
cfg_content += " - name: sshd.socket\n"
cfg_content += " command: restart\n"
cfg_content += " content: |\n"
cfg_content += " [Socket]\n"
cfg_content += " ListenStream={0}\n".format(sshd_port)
cfg_content += " Accept=yes\n"
waagent.SetFileContents(cfg_tempfile, cfg_content)
waagent.Run("coreos-cloudinit -from-file " + cfg_tempfile, chk_err=False)
os.remove(cfg_tempfile)
else:
shutil.copyfile(config_file_path, sshd_file_path)
waagent.MyDistro.restartSshService()
def _backup_sshd_config(sshd_file_path):
if os.path.exists(sshd_file_path):
backup_file_name = '%s_%s' % (
sshd_file_path, time.strftime("%Y-%m-%dT%H:%M:%SZ", time.gmtime()))
shutil.copyfile(sshd_file_path, backup_file_name)
def _save_cert_str_as_file(cert_txt, file_name):
cert_start = cert_txt.find(BeginCertificateTag)
if cert_start >= 0:
cert_txt = cert_txt[cert_start + len(BeginCertificateTag):]
cert_end = cert_txt.find(EndCertificateTag)
if cert_end >= 0:
cert_txt = cert_txt[:cert_end]
cert_txt = cert_txt.strip()
cert_txt = "{0}\n{1}\n{2}\n".format(BeginCertificateTag, cert_txt, EndCertificateTag)
waagent.SetFileContents(file_name, cert_txt)
def _open_ssh_port():
_del_rule_if_exists('INPUT -p tcp -m tcp --dport 22 -j DROP')
_del_rule_if_exists('INPUT -p tcp -m tcp --dport 22 -j REJECT')
_del_rule_if_exists('INPUT -p -j DROP')
_del_rule_if_exists('INPUT -p -j REJECT')
_insert_rule_if_not_exists('INPUT -p tcp -m tcp --dport 22 -j ACCEPT')
_del_rule_if_exists('OUTPUT -p tcp -m tcp --sport 22 -j DROP')
_del_rule_if_exists('OUTPUT -p tcp -m tcp --sport 22 -j REJECT')
_del_rule_if_exists('OUTPUT -p -j DROP')
_del_rule_if_exists('OUTPUT -p -j REJECT')
_insert_rule_if_not_exists('OUTPUT -p tcp -m tcp --dport 22 -j ACCEPT')
def _del_rule_if_exists(rule_string):
cmd_result = waagent.RunGetOutput("iptables-save")
while cmd_result[0] == 0 and (rule_string in cmd_result[1]):
waagent.Run("iptables -D %s" % rule_string)
cmd_result = waagent.RunGetOutput("iptables-save")
def _insert_rule_if_not_exists(rule_string):
cmd_result = waagent.RunGetOutput("iptables-save")
if cmd_result[0] == 0 and (rule_string not in cmd_result[1]):
waagent.Run("iptables -I %s" % rule_string)
def check_and_repair_disk(hutil):
public_settings = hutil.get_public_settings()
if public_settings:
check_disk = public_settings.get('check_disk')
repair_disk = public_settings.get('repair_disk')
disk_name = public_settings.get('disk_name')
if check_disk and repair_disk:
err_msg = ("check_disk and repair_disk was both specified."
"Only one of them can be specified")
hutil.error(err_msg)
hutil.do_exit(1, 'Enable', 'error', '0', 'Enable failed.')
if check_disk:
waagent.AddExtensionEvent(name=hutil.get_name(), op="scenario", isSuccess=True, message="check_disk")
outretcode = _fsck_check(hutil)
hutil.log("Successfully checked disk")
return outretcode
if repair_disk:
waagent.AddExtensionEvent(name=hutil.get_name(), op="scenario", isSuccess=True, message="repair_disk")
outdata = _fsck_repair(hutil, disk_name)
hutil.log("Repaired and remounted disk")
return outdata
def _fsck_check(hutil):
try:
retcode = waagent.Run("fsck -As -y")
if retcode > 0:
hutil.log(retcode)
raise Exception("Disk check was not successful")
else:
return retcode
except Exception as e:
hutil.error("Failed to run disk check with error: {0}, {1}".format(
str(e), traceback.format_exc()))
hutil.do_exit(1, 'Check', 'error', '0', 'Check failed.')
def _fsck_repair(hutil, disk_name):
# first unmount disks and loop devices lazy + forced
try:
cmd_result = waagent.Run("umount -f /%s" % disk_name)
if cmd_result != 0:
# Fail fast
hutil.log("Failed to unmount disk: %s" % disk_name)
# run repair
retcode = waagent.Run("fsck -AR -y")
hutil.log("Ran fsck with return code: %d" % retcode)
if retcode == 0:
retcode, output = waagent.RunGetOutput("mount")
hutil.log(output)
return output
else:
raise Exception("Failed to mount disks")
except Exception as e:
hutil.error("{0}, {1}".format(str(e), traceback.format_exc()))
hutil.do_exit(1, 'Repair','error','0', 'Repair failed.')
if __name__ == '__main__' :
main()
| apache-2.0 | 2,167,910,506,784,171,300 | 38.919831 | 133 | 0.595973 | false |
firebase/firebase-admin-python | setup.py | 1 | 2584 | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Setup file for distribution artifacts."""
from __future__ import print_function
from os import path
import sys
from setuptools import setup
(major, minor) = (sys.version_info.major, sys.version_info.minor)
if major != 3 or minor < 6:
print('firebase_admin requires python >= 3.6', file=sys.stderr)
sys.exit(1)
# Read in the package metadata per recommendations from:
# https://packaging.python.org/guides/single-sourcing-package-version/
about_path = path.join(path.dirname(path.abspath(__file__)), 'firebase_admin', '__about__.py')
about = {}
with open(about_path) as fp:
exec(fp.read(), about) # pylint: disable=exec-used
long_description = ('The Firebase Admin Python SDK enables server-side (backend) Python developers '
'to integrate Firebase into their services and applications.')
install_requires = [
'cachecontrol>=0.12.6',
'google-api-core[grpc] >= 1.22.1, < 2.0.0dev; platform.python_implementation != "PyPy"',
'google-api-python-client >= 1.7.8',
'google-cloud-firestore>=2.1.0; platform.python_implementation != "PyPy"',
'google-cloud-storage>=1.37.1',
]
setup(
name=about['__title__'],
version=about['__version__'],
description='Firebase Admin Python SDK',
long_description=long_description,
url=about['__url__'],
author=about['__author__'],
license=about['__license__'],
keywords='firebase cloud development',
install_requires=install_requires,
packages=['firebase_admin'],
python_requires='>=3.6',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Topic :: Software Development :: Build Tools',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.9',
'License :: OSI Approved :: Apache Software License',
],
)
| apache-2.0 | -2,863,027,261,551,218,000 | 35.914286 | 100 | 0.674923 | false |
hoytak/lazyrunner | lazyrunner/manager.py | 1 | 4915 | """
A class that manages a batch of sessions.
"""
import time, logging, sys
from os import makedirs, remove
from os.path import join, expanduser, exists, split, abspath, normpath
from treedict import TreeDict
from pnstructures import PNodeCommon, PNode
import parameters as parameter_module
import pmodule
import loading
import configuration
################################################################################
def __initLoggingSystem(custom_opttree):
# get one filled in with the defaults
opttree = configuration.setupOptionTree(custom_opttree, None, False)
# Set up the logging stuff
logging.basicConfig(
format = opttree.logging.format,
datefmt = opttree.logging.datefmt,
level = logging.DEBUG if opttree.verbose else logging.INFO
)
if hasattr(logging, "captureWarnings"):
logging.captureWarnings(True)
def clean(custom_opttree = None, **kwargs):
if custom_opttree is None:
custom_opttree = TreeDict()
custom_opttree.update(TreeDict.fromdict(kwargs))
__initLoggingSystem(custom_opttree)
log = logging.getLogger("Configuration")
opttree = configuration.setupOptionTree(custom_opttree, log, False)
loading.cleanAll(opttree)
################################################################################
__manager = None
def initialize(custom_opttree = None, **kwargs):
global __manager
if __manager is not None:
raise RuntimeError("Initialize has already been called! Call reset first to reinitialize.")
# fill in the custom opt_tree here with default options.
if custom_opttree is None:
custom_opttree = TreeDict()
custom_opttree.update(TreeDict.fromdict(kwargs))
__initLoggingSystem(custom_opttree)
# set up the manager
__manager = _RunManager(custom_opttree)
def manager():
global __manager
if __manager is None:
raise RuntimeError("Initialize must be called before manager is available.")
return __manager
def reset():
global __manager
__manager = None
class _RunManager(object):
"""
A class providing an API for interfacing directly with a
lazyrunner project.
"""
def __init__(self, custom_opttree):
"""
Initializes a lazyrunner environment. The environment options
are identical to those on the command line.
project_directory = '.',
debug_mode = False,
verbose = False,
no_cache = False,
force = False,
cache_read_only = False,
cache_directory = None,
no_compile = False,
config_module = 'conf'
"""
self.log = logging.getLogger("Manager")
################################################################################
# Init all the module lookup stuff
opttree = configuration.setupOptionTree(custom_opttree, self.log, False)
loading.resetAndInitModuleLoading(opttree)
opttree = configuration.setupOptionTree(custom_opttree, self.log, True)
self.opttree = opttree
pmodule.resetAndInitialize()
parameter_module.resetAndInitialize()
loading.resetAndInitModules(self.opttree)
parameter_module.finalize()
pmodule.finalize()
########################################################################################
# General Control Functions
def getResults(self, modules = None, presets = [], parameters = None):
common = PNodeCommon(self.opttree)
ptree = parameter_module.getParameterTree(presets, parameters = parameters)
if modules is None:
modules = pmodule.getCurrentRunQueue()
if type(modules) is str:
modules = [modules]
results = common.getResults(ptree, modules)
return dict(zip(modules, results))
def getPresetHelp(self, width = None):
return '\n'.join(parameters_module.getPresetHelpList(width = width))
def updatePresetCompletionCache(self, preset_name_cache_file):
parameter_module.presets.updatePresetCompletionCache(preset_name_cache_file)
def run(modules, presets = [], project_directory = '.', options = None):
"""
Convenience function for running things directly. `options`, if given,
should be a TreeDict of configuration options.
"""
if options is None:
options = TreeDict()
else:
if type(options) is not TreeDict:
raise TypeError("options parameter needs to be a TreeDict.")
options.project_directory = project_directory
m = RunManager(options)
return m.getResults(modules, presets)
| bsd-3-clause | 107,189,760,431,933,970 | 28.608434 | 100 | 0.587792 | false |
allanlei/django-backup | example/settings.py | 1 | 5087 | # Django settings for example project.
DEBUG = True
TEMPLATE_DEBUG = DEBUG
ADMINS = (
# ('Your Name', '[email protected]'),
)
MANAGERS = ADMINS
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2', # Add 'postgresql_psycopg2', 'postgresql', 'mysql', 'sqlite3' or 'oracle'.
'NAME': 'djbackup', # Or path to database file if using sqlite3.
'USER': 'djbackup', # Not used with sqlite3.
'PASSWORD': 'djbackup', # Not used with sqlite3.
'HOST': '', # Set to empty string for localhost. Not used with sqlite3.
'PORT': '', # Set to empty string for default. Not used with sqlite3.
}
}
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# On Unix systems, a value of None will cause Django to use the same
# timezone as the operating system.
# If running in a Windows environment this must be set to the same as your
# system time zone.
TIME_ZONE = 'America/Chicago'
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'en-us'
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# If you set this to False, Django will not format dates, numbers and
# calendars according to the current locale
USE_L10N = True
# Absolute filesystem path to the directory that will hold user-uploaded files.
# Example: "/home/media/media.lawrence.com/media/"
MEDIA_ROOT = ''
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash.
# Examples: "http://media.lawrence.com/media/", "http://example.com/media/"
MEDIA_URL = ''
# Absolute path to the directory static files should be collected to.
# Don't put anything in this directory yourself; store your static files
# in apps' "static/" subdirectories and in STATICFILES_DIRS.
# Example: "/home/media/media.lawrence.com/static/"
STATIC_ROOT = ''
# URL prefix for static files.
# Example: "http://media.lawrence.com/static/"
STATIC_URL = '/static/'
# URL prefix for admin static files -- CSS, JavaScript and images.
# Make sure to use a trailing slash.
# Examples: "http://foo.com/static/admin/", "/static/admin/".
ADMIN_MEDIA_PREFIX = '/static/admin/'
# Additional locations of static files
STATICFILES_DIRS = (
# Put strings here, like "/home/html/static" or "C:/www/django/static".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
)
# List of finder classes that know how to find static files in
# various locations.
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
# 'django.contrib.staticfiles.finders.DefaultStorageFinder',
)
# Make this unique, and don't share it with anybody.
SECRET_KEY = '_4uay=x9gf78pd9lt^4nz!1*us2xkcma3zu@=*zruh1ti0_u64'
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
# 'django.template.loaders.eggs.Loader',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
)
ROOT_URLCONF = 'example.urls'
TEMPLATE_DIRS = (
# Put strings here, like "/home/html/django_templates" or "C:/www/django/templates".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
)
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
'backup',
'example',
)
# A sample logging configuration. The only tangible logging
# performed by this configuration is to send an email to
# the site admins on every HTTP 500 error.
# See http://docs.djangoproject.com/en/dev/topics/logging for
# more details on how to customize your logging configuration.
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'handlers': {
'mail_admins': {
'level': 'ERROR',
'class': 'django.utils.log.AdminEmailHandler'
}
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
}
}
DEFAULT_FROM_EMAIL = ''
EMAIL_HOST = ''
EMAIL_HOST_USER = ''
EMAIL_HOST_PASSWORD = ''
EMAIL_PORT = 587
EMAIL_SUBJECT_PREFIX = ''
EMAIL_USE_TLS = True
| bsd-3-clause | 4,444,275,903,648,570,400 | 32.032468 | 134 | 0.684097 | false |
iot-factory/synapse | synapse/storage/transactions.py | 1 | 10722 | # -*- coding: utf-8 -*-
# Copyright 2014, 2015 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from ._base import SQLBaseStore
from synapse.util.caches.descriptors import cached
from collections import namedtuple
from canonicaljson import encode_canonical_json
import logging
logger = logging.getLogger(__name__)
class TransactionStore(SQLBaseStore):
"""A collection of queries for handling PDUs.
"""
def get_received_txn_response(self, transaction_id, origin):
"""For an incoming transaction from a given origin, check if we have
already responded to it. If so, return the response code and response
body (as a dict).
Args:
transaction_id (str)
origin(str)
Returns:
tuple: None if we have not previously responded to
this transaction or a 2-tuple of (int, dict)
"""
return self.runInteraction(
"get_received_txn_response",
self._get_received_txn_response, transaction_id, origin
)
def _get_received_txn_response(self, txn, transaction_id, origin):
result = self._simple_select_one_txn(
txn,
table=ReceivedTransactionsTable.table_name,
keyvalues={
"transaction_id": transaction_id,
"origin": origin,
},
retcols=ReceivedTransactionsTable.fields,
allow_none=True,
)
if result and result["response_code"]:
return result["response_code"], result["response_json"]
else:
return None
def set_received_txn_response(self, transaction_id, origin, code,
response_dict):
"""Persist the response we returened for an incoming transaction, and
should return for subsequent transactions with the same transaction_id
and origin.
Args:
txn
transaction_id (str)
origin (str)
code (int)
response_json (str)
"""
return self._simple_insert(
table=ReceivedTransactionsTable.table_name,
values={
"transaction_id": transaction_id,
"origin": origin,
"response_code": code,
"response_json": buffer(encode_canonical_json(response_dict)),
},
or_ignore=True,
desc="set_received_txn_response",
)
def prep_send_transaction(self, transaction_id, destination,
origin_server_ts):
"""Persists an outgoing transaction and calculates the values for the
previous transaction id list.
This should be called before sending the transaction so that it has the
correct value for the `prev_ids` key.
Args:
transaction_id (str)
destination (str)
origin_server_ts (int)
Returns:
list: A list of previous transaction ids.
"""
return self.runInteraction(
"prep_send_transaction",
self._prep_send_transaction,
transaction_id, destination, origin_server_ts
)
def _prep_send_transaction(self, txn, transaction_id, destination,
origin_server_ts):
next_id = self._transaction_id_gen.get_next_txn(txn)
# First we find out what the prev_txns should be.
# Since we know that we are only sending one transaction at a time,
# we can simply take the last one.
query = (
"SELECT * FROM sent_transactions"
" WHERE destination = ?"
" ORDER BY id DESC LIMIT 1"
)
txn.execute(query, (destination,))
results = self.cursor_to_dict(txn)
prev_txns = [r["transaction_id"] for r in results]
# Actually add the new transaction to the sent_transactions table.
self._simple_insert_txn(
txn,
table=SentTransactions.table_name,
values={
"id": next_id,
"transaction_id": transaction_id,
"destination": destination,
"ts": origin_server_ts,
"response_code": 0,
"response_json": None,
}
)
# TODO Update the tx id -> pdu id mapping
return prev_txns
def delivered_txn(self, transaction_id, destination, code, response_dict):
"""Persists the response for an outgoing transaction.
Args:
transaction_id (str)
destination (str)
code (int)
response_json (str)
"""
return self.runInteraction(
"delivered_txn",
self._delivered_txn,
transaction_id, destination, code,
buffer(encode_canonical_json(response_dict)),
)
def _delivered_txn(self, txn, transaction_id, destination,
code, response_json):
self._simple_update_one_txn(
txn,
table=SentTransactions.table_name,
keyvalues={
"transaction_id": transaction_id,
"destination": destination,
},
updatevalues={
"response_code": code,
"response_json": None, # For now, don't persist response_json
}
)
def get_transactions_after(self, transaction_id, destination):
"""Get all transactions after a given local transaction_id.
Args:
transaction_id (str)
destination (str)
Returns:
list: A list of dicts
"""
return self.runInteraction(
"get_transactions_after",
self._get_transactions_after, transaction_id, destination
)
def _get_transactions_after(self, txn, transaction_id, destination):
query = (
"SELECT * FROM sent_transactions"
" WHERE destination = ? AND id >"
" ("
" SELECT id FROM sent_transactions"
" WHERE transaction_id = ? AND destination = ?"
" )"
)
txn.execute(query, (destination, transaction_id, destination))
return self.cursor_to_dict(txn)
@cached()
def get_destination_retry_timings(self, destination):
"""Gets the current retry timings (if any) for a given destination.
Args:
destination (str)
Returns:
None if not retrying
Otherwise a dict for the retry scheme
"""
return self.runInteraction(
"get_destination_retry_timings",
self._get_destination_retry_timings, destination)
def _get_destination_retry_timings(self, txn, destination):
result = self._simple_select_one_txn(
txn,
table=DestinationsTable.table_name,
keyvalues={
"destination": destination,
},
retcols=DestinationsTable.fields,
allow_none=True,
)
if result and result["retry_last_ts"] > 0:
return result
else:
return None
def set_destination_retry_timings(self, destination,
retry_last_ts, retry_interval):
"""Sets the current retry timings for a given destination.
Both timings should be zero if retrying is no longer occuring.
Args:
destination (str)
retry_last_ts (int) - time of last retry attempt in unix epoch ms
retry_interval (int) - how long until next retry in ms
"""
# XXX: we could chose to not bother persisting this if our cache thinks
# this is a NOOP
return self.runInteraction(
"set_destination_retry_timings",
self._set_destination_retry_timings,
destination,
retry_last_ts,
retry_interval,
)
def _set_destination_retry_timings(self, txn, destination,
retry_last_ts, retry_interval):
txn.call_after(self.get_destination_retry_timings.invalidate, (destination,))
self._simple_upsert_txn(
txn,
"destinations",
keyvalues={
"destination": destination,
},
values={
"retry_last_ts": retry_last_ts,
"retry_interval": retry_interval,
},
insertion_values={
"destination": destination,
"retry_last_ts": retry_last_ts,
"retry_interval": retry_interval,
}
)
def get_destinations_needing_retry(self):
"""Get all destinations which are due a retry for sending a transaction.
Returns:
list: A list of dicts
"""
return self.runInteraction(
"get_destinations_needing_retry",
self._get_destinations_needing_retry
)
def _get_destinations_needing_retry(self, txn):
query = (
"SELECT * FROM destinations"
" WHERE retry_last_ts > 0 and retry_next_ts < ?"
)
txn.execute(query, (self._clock.time_msec(),))
return self.cursor_to_dict(txn)
class ReceivedTransactionsTable(object):
table_name = "received_transactions"
fields = [
"transaction_id",
"origin",
"ts",
"response_code",
"response_json",
"has_been_referenced",
]
class SentTransactions(object):
table_name = "sent_transactions"
fields = [
"id",
"transaction_id",
"destination",
"ts",
"response_code",
"response_json",
]
EntryType = namedtuple("SentTransactionsEntry", fields)
class TransactionsToPduTable(object):
table_name = "transaction_id_to_pdu"
fields = [
"transaction_id",
"destination",
"pdu_id",
"pdu_origin",
]
class DestinationsTable(object):
table_name = "destinations"
fields = [
"destination",
"retry_last_ts",
"retry_interval",
]
| apache-2.0 | -488,149,266,400,128,260 | 29.202817 | 85 | 0.561929 | false |
esa/SpaceAMPL | interplanetary/impulsive/single_phase/include/writeequations.py | 1 | 13720 | import sys;
file = open("equations.inc","w")
file2 = open("writeinitialguess.inc","w")
file3 = open("writesolution.inc","w")
file4 = open("guesstangential.inc","w")
n=int(sys.argv[1]);
file.write("#------------------------------------------------------------------------\n")
file.write("#Optimisation Variables\n\n")
file.write("#Impulsive DVs\n")
file.write("var ux{i in 2..n-1};\n")
file.write("var uy{i in 2..n-1};\n")
file.write("var uz{i in 2..n-1};\n")
file.write("var uT{i in 2..n-1} = sqrt(ux[i]**2+uy[i]**2+uz[i]**2);\n\n")
file.write("#Starting VINF\n")
file.write("var VINFx:=0.0001;\n")
file.write("var VINFy:=0.0001;\n")
file.write("var VINFz:=0.0001;\n")
file.write("var VINF = sqrt(VINFx^2+VINFy^2+VINFz^2);\n\n")
file.write("#Ending VINF\n")
file.write("var VINFxf:=0.0001;\n")
file.write("var VINFyf:=0.0001;\n")
file.write("var VINFzf:=0.0001;\n")
file.write("var VINFf = sqrt(VINFxf^2+VINFyf^2+VINFzf^2);\n\n")
file.write("#Eccentric Anomaly Differences between nodes\n")
file.write("var DE{i in J};\n\n")
file.write("#Initial time\n")
file.write("var timod := tI * d2u * f, <= (tI+tbnd)*d2u*f, >= (tI-tbnd)*d2u*f; \n")
file.write("#Time of flight \n")
file.write("var tfmod := tT * d2u * f, <= (tT+tbnd)*d2u*f, >= (tT-tbnd)*d2u*f; \n")
file.write("#--------------------------------------------------------------------------\n\n")
file.write("#--------------------------------------------------------------------------\n")
file.write("#We here introduce some time variables that simplifies the formulas \n")
file.write("var ti = timod /f; #Initial time non dimensional\n")
file.write("var tf = tfmod /f; #Time of flight non dimensional\n")
file.write("var tF = ti/d2u + tf/d2u; #Arrival time (MJD2000)\n")
file.write("var dt = tf/(n-1); #Inter-node temporal separation\n")
file.write("#--------------------------------------------------------------------------\n\n")
file.write("#--------------------------------------------------------------------------\n")
file.write("#Planet ephemerides are set and evaluated in tI, tI+tT\n")
file.write("include include/ephcalc.inc;\n")
file.write("fix timod;\n")
file.write("fix tfmod;\n")
file.write("solve;\n")
file.write("unfix timod;\n")
file.write("unfix tfmod;\n")
file.write("#--------------------------------------------------------------------------\n\n\n\n")
file.write("#--------------------------------------------------------------------------\n")
file.write("# Node 1: Departure Node\n")
file.write("var x1 = x0;\n")
file.write("var y1 = y0;\n")
file.write("var z1 = z0;\n")
file.write("var dx1 = dx0 + VINFx;\n")
file.write("var dy1 = dy0 + VINFy;\n")
file.write("var dz1 = dz0 + VINFz;\n\n")
file.write("#Basic definitions\n")
file.write("var r1 = sqrt(x1^2+y1^2+z1^2);\n")
file.write("var v1 = sqrt(dx1^2+dy1^2+dz1^2);\n")
file.write("var a1 = 1 / (2/r1 - v1^2);\n")
file.write("var sigma1 = x1*dx1+y1*dy1+z1*dz1;\n")
file.write("var meanmotion1 = sqrt(1/a1^3);\n")
file.write("var DM1 = meanmotion1 * dt/2;\n\n")
file.write("#Lagrange Coefficients\n")
file.write("var rvar1 = a1 + (r1-a1)*cos(DE[1]) + sigma1*sqrt(a1)*sin(DE[1]);\n")
file.write("var F1 = 1 - a1/r1 * (1-cos(DE[1]));\n")
file.write("var G1 = a1*sigma1*(1-cos(DE[1])) + r1*sqrt(a1)*sin(DE[1]);\n")
file.write("var Ft1 = -sqrt(a1)/(r1*rvar1) * sin(DE[1]);\n")
file.write("var Gt1 = 1 - a1/rvar1*(1-cos(DE[1]));\n\n")
file.write("subject to KeplerEquations1: \n")
file.write(" DM1 - DE[1] - sigma1/sqrt(a1) * (1 - cos(DE[1])) + (1 - r1/a1)*sin(DE[1]) = 0;\n")
file.write("#--------------------------------------------------------------------------\n\n")
for i in range(2,n-1):
file.write("#--------------------------------------------------------------------------\n")
file.write("# Node " +str(i)+"\n")
file.write("var x" +str(i)+" = F" +str(i-1)+"*x" +str(i-1)+" + G" +str(i-1)+"*dx" +str(i-1)+";\n")
file.write("var y" +str(i)+" = F" +str(i-1)+"*y" +str(i-1)+" + G" +str(i-1)+"*dy" +str(i-1)+";\n")
file.write("var z" +str(i)+" = F" +str(i-1)+"*z" +str(i-1)+" + G" +str(i-1)+"*dz" +str(i-1)+";\n")
file.write("var dx" +str(i)+" = Ft" +str(i-1)+"*x" +str(i-1)+" + Gt" +str(i-1)+"*dx" +str(i-1)+" + ux[" +str(i)+"];\n")
file.write("var dy" +str(i)+" = Ft" +str(i-1)+"*y" +str(i-1)+" + Gt" +str(i-1)+"*dy" +str(i-1)+" + uy[" +str(i)+"];\n")
file.write("var dz" +str(i)+" = Ft" +str(i-1)+"*z" +str(i-1)+" + Gt" +str(i-1)+"*dz" +str(i-1)+" + uz[" +str(i)+"];\n\n")
file.write("#Basic definitions\n")
file.write("var r" +str(i)+" = sqrt(x" +str(i)+"^2+y" +str(i)+"^2+z" +str(i)+"^2);\n")
file.write("var v" +str(i)+" = sqrt(dx" +str(i)+"^2+dy" +str(i)+"^2+dz" +str(i)+"^2);\n")
file.write("var a" +str(i)+" = 1 / (2/r" +str(i)+" - v" +str(i)+"^2);\n")
file.write("var sigma" +str(i)+" = x" +str(i)+"*dx" +str(i)+"+y" +str(i)+"*dy" +str(i)+"+z" +str(i)+"*dz" +str(i)+";\n")
file.write("var meanmotion" +str(i)+" = sqrt(1/a" +str(i)+"^3);\n")
file.write("var DM" +str(i)+" = meanmotion" +str(i)+" * dt;\n\n")
file.write("#Lagrange Coefficients\n")
file.write("var rvar" +str(i)+" = a" +str(i)+" + (r" +str(i)+"-a" +str(i)+")*cos(DE[" +str(i)+"]) + sigma" +str(i)+"*sqrt(a" +str(i)+")*sin(DE[" +str(i)+"]);\n")
file.write("var F" +str(i)+" = 1 - a" +str(i)+"/r" +str(i)+" * (1-cos(DE[" +str(i)+"]));\n")
file.write("var G" +str(i)+" = a" +str(i)+"*sigma" +str(i)+"*(1-cos(DE[" +str(i)+"])) + r" +str(i)+"*sqrt(a" +str(i)+")*sin(DE[" +str(i)+"]);\n")
file.write("var Ft" +str(i)+" = -sqrt(a" +str(i)+")/(r" +str(i)+"*rvar" +str(i)+") * sin(DE[" +str(i)+"]);\n")
file.write("var Gt" +str(i)+" = 1 - a" +str(i)+"/rvar" +str(i)+"*(1-cos(DE[" +str(i)+"]));\n\n")
file.write("subject to KeplerEquations" +str(i)+": \n")
file.write(" DM" +str(i)+" - DE[" +str(i)+"] - sigma" +str(i)+"/sqrt(a" +str(i)+") * (1 - cos(DE[" +str(i)+"])) + (1 - r" +str(i)+"/a" +str(i)+")*sin(DE[" +str(i)+"]) = 0;\n")
file.write("#--------------------------------------------------------------------------\n\n")
i=n-1
file.write("#--------------------------------------------------------------------------\n")
file.write("# Node " +str(i)+"\n")
file.write("var x" +str(i)+" = F" +str(i-1)+"*x" +str(i-1)+" + G" +str(i-1)+"*dx" +str(i-1)+";\n")
file.write("var y" +str(i)+" = F" +str(i-1)+"*y" +str(i-1)+" + G" +str(i-1)+"*dy" +str(i-1)+";\n")
file.write("var z" +str(i)+" = F" +str(i-1)+"*z" +str(i-1)+" + G" +str(i-1)+"*dz" +str(i-1)+";\n")
file.write("var dx" +str(i)+" = Ft" +str(i-1)+"*x" +str(i-1)+" + Gt" +str(i-1)+"*dx" +str(i-1)+" + ux[" +str(i)+"];\n")
file.write("var dy" +str(i)+" = Ft" +str(i-1)+"*y" +str(i-1)+" + Gt" +str(i-1)+"*dy" +str(i-1)+" + uy[" +str(i)+"];\n")
file.write("var dz" +str(i)+" = Ft" +str(i-1)+"*z" +str(i-1)+" + Gt" +str(i-1)+"*dz" +str(i-1)+" + uz[" +str(i)+"];\n\n")
file.write("#Basic definitions\n")
file.write("var r" +str(i)+" = sqrt(x" +str(i)+"^2+y" +str(i)+"^2+z" +str(i)+"^2);\n")
file.write("var v" +str(i)+" = sqrt(dx" +str(i)+"^2+dy" +str(i)+"^2+dz" +str(i)+"^2);\n")
file.write("var a" +str(i)+" = 1 / (2/r" +str(i)+" - v" +str(i)+"^2);\n")
file.write("var sigma" +str(i)+" = x" +str(i)+"*dx" +str(i)+"+y" +str(i)+"*dy" +str(i)+"+z" +str(i)+"*dz" +str(i)+";\n")
file.write("var meanmotion" +str(i)+" = sqrt(1/a" +str(i)+"^3);\n")
file.write("var DM" +str(i)+" = meanmotion" +str(i)+" * dt/2;\n\n")
file.write("#Lagrange Coefficients\n")
file.write("var rvar" +str(i)+" = a" +str(i)+" + (r" +str(i)+"-a" +str(i)+")*cos(DE[" +str(i)+"]) + sigma" +str(i)+"*sqrt(a" +str(i)+")*sin(DE[" +str(i)+"]);\n")
file.write("var F" +str(i)+" = 1 - a" +str(i)+"/r" +str(i)+" * (1-cos(DE[" +str(i)+"]));\n")
file.write("var G" +str(i)+" = a" +str(i)+"*sigma" +str(i)+"*(1-cos(DE[" +str(i)+"])) + r" +str(i)+"*sqrt(a" +str(i)+")*sin(DE[" +str(i)+"]);\n")
file.write("var Ft" +str(i)+" = -sqrt(a" +str(i)+")/(r" +str(i)+"*rvar" +str(i)+") * sin(DE[" +str(i)+"]);\n")
file.write("var Gt" +str(i)+" = 1 - a" +str(i)+"/rvar" +str(i)+"*(1-cos(DE[" +str(i)+"]));\n\n")
file.write("subject to KeplerEquations" +str(i)+": \n")
file.write(" DM" +str(i)+" - DE[" +str(i)+"] - sigma" +str(i)+"/sqrt(a" +str(i)+") * (1 - cos(DE[" +str(i)+"])) + (1 - r" +str(i)+"/a" +str(i)+")*sin(DE[" +str(i)+"]) = 0;\n")
file.write("#--------------------------------------------------------------------------\n\n")
file.write("#--------------------------------------------------------------------------\n")
file.write("# Node n: Arrival node\n")
file.write("var xn = F" +str(n-1)+"*x" +str(n-1)+" + G" +str(n-1)+"*dx" +str(n-1)+";\n")
file.write("var yn = F" +str(n-1)+"*y" +str(n-1)+" + G" +str(n-1)+"*dy" +str(n-1)+";\n")
file.write("var zn = F" +str(n-1)+"*z" +str(n-1)+" + G" +str(n-1)+"*dz" +str(n-1)+";\n")
file.write("var dxn = Ft" +str(n-1)+"*x" +str(n-1)+" + Gt" +str(n-1)+"*dx" +str(n-1)+"+ VINFxf;\n")
file.write("var dyn = Ft" +str(n-1)+"*y" +str(n-1)+" + Gt" +str(n-1)+"*dy" +str(n-1)+"+ VINFyf;\n")
file.write("var dzn = Ft" +str(n-1)+"*z" +str(n-1)+" + Gt" +str(n-1)+"*dz" +str(n-1)+"+ VINFzf;\n\n")
file.write("#Basic definitions\n")
file.write("var rn = sqrt(xn^2+yn^2+zn^2);\n")
file.write("var vn = sqrt(dxn^2+dyn^2+dzn^2);\n")
file.write("var an = 1 / (2/rn - vn^2);\n")
file.write("#--------------------------------------------------------------------------\n\n")
file.write("#--------------------------------------------------------------------------\n")
file.write("#Match Constraint\n")
file.write("subject to \n")
file.write(" FinalPositionx : xn = xf;\n")
file.write(" FinalPositiony : yn = yf;\n")
file.write(" FinalPositionz : zn = zf;\n")
file.write(" FinalVelocityx : dxn = dxf;\n")
file.write(" FinalVelocityy : dyn = dyf;\n")
file.write(" FinalVelocityz : dzn = dzf;\n")
file.write("#--------------------------------------------------------------------------\n")
#file2.write("printf \"%17.16e, %17.16e, %17.16e, %17.16e, %17.16e, %17.16e, %17.16e, %17.16e, %17.16e, %17.16e\\n\",x1,y1,z1,dx1,dy1,dz1,1,VINFx,VINFy,VINFz>out/InitialGuess.out;\n")
for i in range(2,n):
file2.write("printf \"%17.16e, %17.16e, %17.16e, %17.16e, %17.16e, %17.16e, %17.16e, %17.16e, %17.16e, %17.16e\\n\\n\",x"+str(i)+",y"+str(i)+",z"+str(i)+",dx"+str(i)+",dy"+str(i)+",dz"+str(i)+",m["+str(i)+"],ux["+str(i)+"],uy["+str(i)+"],uz["+str(i)+"]>out/InitialGuess.out;\n")
#file2.write("printf \"%17.16e, %17.16e, %17.16e, %17.16e, %17.16e, %17.16e, %17.16e, %17.16e, %17.16e, %17.16e\\n\",xn,yn,zn,dxn,dyn,dzn,m[n-1],VINFxf,VINFyf,VINFzf>out/InitialGuess.out;\n")
file2.write("close out/InitialGuess.out;")
#file3.write("printf \"%17.16e, %17.16e, %17.16e, %17.16e, %17.16e, %17.16e, %17.16e, %17.16e, %17.16e, %17.16e\\n\",x1,y1,z1,dx1,dy1,dz1,1,VINFx,VINFy,VINFz>out/solution.out;\n")
for i in range(2,n):
file3.write("printf \"%17.16e, %17.16e, %17.16e, %17.16e, %17.16e, %17.16e, %17.16e, %17.16e, %17.16e, %17.16e\\n\\n\",x"+str(i)+",y"+str(i)+",z"+str(i)+",dx"+str(i)+",dy"+str(i)+",dz"+str(i)+",m["+str(i)+"],ux["+str(i)+"],uy["+str(i)+"],uz["+str(i)+"]>out/solution.out;\n")
#file3.write("printf \"%17.16e, %17.16e, %17.16e, %17.16e, %17.16e, %17.16e, %17.16e, %17.16e, %17.16e, %17.16e\\n\",xn,yn,zn,dxn,dyn,dzn,m[n-1],VINFxf,VINFyf,VINFzf>out/solution.out;\n")
file3.write("close out/solution.out;")
file4.write("let {i in 2..n-1} ux[i]:=Tmax*0.0000001;\n")
file4.write("let {i in 2..n-1} uy[i]:=Tmax*0.0000001;\n")
file4.write("let {i in 2..n-1} uz[i]:=Tmax*0.0000001;\n\n")
#Tangentialguess
file4.write("#--------------------------------------------------------------------------\n")
file4.write("#Initial Guess for the DE variables\n")
file4.write("let {i in J} DE[i] := DM1;\n")
file4.write("#-----------------------------------------------------------------------\n\n")
for i in range(2,n-1):
file4.write("let ux["+str(i)+"]:=dx"+str(i)+"/v"+str(i)+"*Tmax/2* tf/(n-1);\n")
file4.write("let uy["+str(i)+"]:=dy"+str(i)+"/v"+str(i)+"*Tmax/2* tf/(n-1);\n")
file4.write("let uz["+str(i)+"]:=dz"+str(i)+"/v"+str(i)+"*Tmax/2* tf/(n-1);\n")
file4.write("subject to\n")
file4.write(" thrustON{i in 2..n-1}: uT[i] <= Tmax*tf/(n-1);\n\n")
file4.write("minimize\n")
file4.write(" position: (xf-xn)^2+(yf-yn)^2+(zf-zn)^2+(dxf-dxn)^2+(dyf-dyn)^2+(dzf-dzn)^2;\n\n")
file4.write("drop FinalPositionx;\n")
file4.write("drop FinalPositiony;\n")
file4.write("drop FinalPositionz;\n")
file4.write("drop FinalVelocityx;\n")
file4.write("drop FinalVelocityy;\n")
file4.write("drop FinalVelocityz;\n")
file4.write("#--------------------------------------------------------------------------\n")
file4.write("solve;\n")
file4.write("#-----------------------------------------------------------------------\n")
file4.write("#--------------------------------------------------------------------------\n")
file4.write("#Print The Initial Guess x,y,z,dx,dy,dz,m,ux,uy,uz variables\n\n")
file4.write("param m{i in I} := 1;\n")
file4.write("include include/writeinitialguess.inc;\n")
file4.write("purge m;\n\n")
file4.write("#Print the initial and final times\n")
file4.write("printf \"%17.16e, %17.16e \\n\", ti/d2u , tF-ti/d2u > out/TimesGuess.out;\n")
file4.write("close out/TimesGuess.out;\n")
file4.write("#------------------------------------------------------------------------\n\n")
file4.write("#--------------------------------------------------------------------------\n")
file4.write("#Clean up\n")
file4.write("unfix timod;\n")
file4.write("unfix tfmod;\n")
file4.write("restore FinalPositionx;\n")
file4.write("restore FinalPositiony;\n")
file4.write("restore FinalPositionz;\n")
file4.write("restore FinalVelocityx;\n")
file4.write("restore FinalVelocityy;\n")
file4.write("restore FinalVelocityz;\n")
file4.write("drop thrustON;\n")
file4.write("drop position;\n")
| gpl-2.0 | -1,191,002,370,958,845,400 | 54.772358 | 279 | 0.490233 | false |
KarolBedkowski/webmon | webmon/main.py | 1 | 16243 | #!/usr/bin/python3
"""
Main functions.
Copyright (c) Karol Będkowski, 2016-2018
This file is part of webmon.
Licence: GPLv2+
"""
import argparse
from concurrent import futures
import datetime
import imp
import locale
import logging
import os.path
import pprint
import time
import typing as ty
# import typecheck as tc
from . import (cache, common, comparators, config, filters, inputs,
logging_setup, outputs, metrics)
__author__ = "Karol Będkowski"
__copyright__ = "Copyright (c) Karol Będkowski, 2016-2018"
VERSION = "0.2"
APP_NAME = "webmon"
DEFAULT_DIFF_MODE = "ndiff"
_LOG = logging.getLogger("main")
# @tc.typecheck
def compare_contents(prev_content: str, content: str, ctx: common.Context,
result: common.Result) \
-> ty.Tuple[bool, ty.Optional[str], ty.Optional[dict]]:
""" Compare contents according to configuration. """
# pylint: disable=invalid-sequence-index
opts = ctx.input_conf.get("diff_options")
comparator = comparators.get_comparator(
ctx.input_conf["diff_mode"] or DEFAULT_DIFF_MODE, opts)
update_date = result.meta.get('update_date') or time.time()
compared, diff, new_meta = comparator.compare(
prev_content, str(datetime.datetime.fromtimestamp(update_date)),
content, str(datetime.datetime.now()), ctx, result.meta)
# ctx.log_debug("compare: diff: %s", diff)
return compared, diff, {'comparator_opts': new_meta}
# @tc.typecheck
def compare_content_new(content: str, ctx: common.Context,
result: common.Result) -> ty.Tuple[str, dict]:
# pylint: disable=invalid-sequence-index
opts = ctx.input_conf.get("diff_options")
comparator = comparators.get_comparator(
ctx.input_conf["diff_mode"] or DEFAULT_DIFF_MODE, opts)
diff, new_meta = comparator.new(
content, str(datetime.datetime.now()), ctx, result.meta)
return diff, {'comparator_opts': new_meta}
# @tc.typecheck
def check_last_error_time(ctx: common.Context) -> bool:
"""
Return true when load error occurred and still `on_error_wait` interval
not pass.
"""
last_error = ctx.metadata.get('last_error')
on_error_wait = ctx.input_conf.get('on_error_wait')
if last_error and on_error_wait:
on_error_wait = common.parse_interval(on_error_wait)
return time.time() < last_error + on_error_wait
return False
# @tc.typecheck
def load_content(loader, ctx: common.Context) -> common.Result:
""" Load & filter content """
start = time.time()
# load list of parts
result = loader.load()
if ctx.debug:
ctx.log_debug("loaded: %s", result)
result.debug['loaded_duration'] = time.time() - start
fltr_start = time.time()
result.debug['items_loaded'] = len(result.items)
result.debug['filters_status'] = {}
# apply filters
for fltcfg in ctx.input_conf.get('filters') or []:
flt = filters.get_filter(fltcfg, ctx)
if not flt:
ctx.log_error("missing filter: %s", fltcfg)
continue
result = flt.filter(result)
if ctx.debug:
ctx.log_debug("filtered by %s: %s", flt, pprint.saferepr(result))
result.debug['filters_status'][flt.name] = len(result.items)
if ctx.args.debug:
result.meta['filter_duration'] = time.time() - fltr_start
result.debug['items_filterd'] = len(result.items)
result.meta['update_duration'] = time.time() - start
result.meta['update_date'] = time.time()
if not result.title:
result.title = ctx.name
if ctx.debug:
ctx.log_debug("result: %s", result)
return result
# @tc.typecheck
def process_content(ctx: common.Context, result: common.Result) \
-> ty.Tuple[str, str, ty.Optional[dict], str]:
"""Detect content status (changes). Returns content formatted to
write into cache.
Returns (status, diff_result, new metadata, content after processing)
"""
# pylint: disable=invalid-sequence-index
status = result.status
if status == common.STATUS_ERROR:
err = result.meta['error']
return common.STATUS_ERROR, err, None, None
prev_content = ctx.cache.get(ctx.oid)
content = result.format()
if status == common.STATUS_UNCHANGED:
ctx.log_debug("loading - unchanged content")
new_meta = {'comparator_opts': ctx.metadata.get('comparator_opts')}
return (common.STATUS_UNCHANGED, prev_content, new_meta, prev_content)
if prev_content is None:
ctx.log_debug("loading - new content")
content, new_meta = compare_content_new(content, ctx, result)
return common.STATUS_NEW, content, new_meta, content
new_meta = None
if prev_content != content:
ctx.log_debug("loading - changed content, making diff")
diff_result, diff, new_meta = compare_contents(
prev_content, content, ctx, result)
if diff_result:
return common.STATUS_CHANGED, diff, new_meta, content
ctx.log_debug("loading - unchanged content. %r", new_meta)
if new_meta is None:
new_meta = {'comparator_opts': ctx.metadata.get('comparator_opts')}
return (common.STATUS_UNCHANGED, prev_content, new_meta, content)
# @tc.typecheck
def create_error_result(ctx: common.Context, error_msg: str) \
-> common.Result:
result = common.Result(ctx.oid, ctx.input_idx)
result.set_error(error_msg)
return result
# @tc.typecheck
def load(ctx: common.Context) -> bool:
""" Load one input defined & configured by context"""
ctx.log_debug("start loading")
ctx.metadata = ctx.cache.get_meta(ctx.oid) or {}
# find loader
loader = inputs.get_input(ctx)
# check, is update required
if not ctx.args.force and not loader.need_update():
ctx.log_info("no update required")
return False
if not ctx.args.force and check_last_error_time(ctx):
ctx.log_info("waiting after error")
return False
# load
ctx.log_info("loading...")
try:
result = load_content(loader, ctx)
except common.InputError as err:
ctx.log_exception("input error on %s: %r", err.input, err)
ctx.log_debug("input error params: %s", err.input.dump_debug())
result = create_error_result(ctx, str(err))
except common.FilterError as err:
ctx.log_exception("filter error on %s: %r", err.filter, err)
ctx.log_debug("filter error params: %s", err.filter.dump_debug())
result = create_error_result(ctx, str(err))
if ctx.args.debug:
result.debug['items_final'] = len(result.items)
result.debug['last_updated'] = ctx.last_updated
try:
result.status, pres, new_meta, content = process_content(ctx, result)
except Exception as err: # pylint: disable=broad-except
ctx.log_exception("processing error: %r", err)
result = create_error_result(ctx, str(err))
result.status, pres, new_meta, content = process_content(ctx, result)
if new_meta:
result.meta.update(new_meta)
if result.status != common.STATUS_UNCHANGED or \
ctx.input_conf.get("report_unchanged"):
ctx.output.put(result, pres, ctx.input_conf)
if content is not None:
ctx.cache.put(ctx.oid, content)
ctx.cache.put_meta(ctx.oid, result.meta)
metrics.COLLECTOR.put_input(ctx, result)
ctx.log_info("loading done")
del loader
return True
def _parse_options():
parser = argparse.ArgumentParser(description=APP_NAME + " " + VERSION)
parser.add_argument('-i', '--inputs',
help='yaml file containing inputs definition'
' (default inputs.yaml)')
parser.add_argument('-c', '--config',
help='configuration filename (default config.yaml)')
parser.add_argument("-s", "--silent", action="store_true",
help="show only errors and warnings")
parser.add_argument("-v", "--verbose", action="store_true",
help="show additional informations")
parser.add_argument("-d", "--debug", action="store_true",
help="print debug informations")
parser.add_argument('--log',
help='log file name')
parser.add_argument('--cache-dir',
default="~/.cache/" + APP_NAME,
help='path to cache directory')
parser.add_argument("--force", action="store_true",
help="force update all sources")
parser.add_argument("--diff-mode", choices=['ndiff', 'unified', 'context'],
help="default diff mode")
parser.add_argument("--abilities", action="store_true",
help="show available filters/inputs/outputs/"
"comparators")
parser.add_argument("--list-inputs", action="store_true",
help="show configured inputs")
parser.add_argument("--sel", help="select (by idx, separated by comma) "
"inputs to update")
parser.add_argument("--tasks", help="background task to launch",
type=int, default=2)
return parser.parse_args()
def _show_abilities_cls(title, base_cls):
print(title)
for name, cls in common.get_subclasses_with_name(base_cls):
print(" -", name)
if hasattr(cls, "description"):
print(" " + cls.description)
if hasattr(cls, "params") and cls.params:
print(" Parameters:")
for param in cls.params:
print(" - {:<15s}\t{:<20s}\tdef={!r:<10}\treq={!r}".format(
*param))
print()
def show_abilities():
_show_abilities_cls("Inputs:", inputs.AbstractInput)
_show_abilities_cls("Outputs:", outputs.AbstractOutput)
_show_abilities_cls("Filters:", filters.AbstractFilter)
_show_abilities_cls("Comparators:", comparators.AbstractComparator)
def _load_user_classes():
users_scripts_dir = os.path.expanduser("~/.local/share/webmon")
if not os.path.isdir(users_scripts_dir):
return
for fname in os.listdir(users_scripts_dir):
fpath = os.path.join(users_scripts_dir, fname)
if os.path.isfile(fpath) and fname.endswith(".py") \
and not fname.startswith("_"):
_LOG.debug("loading %r", fpath)
try:
imp.load_source(fname[:-3], fpath)
except ImportError as err:
_LOG.error("Importing '%s' error %s", fpath, err)
# @tc.typecheck
def _list_inputs(inps, conf, args):
print("Inputs:")
defaults = _build_defaults(args, conf)
for idx, inp_conf in enumerate(inps, 1):
params = common.apply_defaults(defaults, inp_conf)
name = config.get_input_name(params, idx)
act = "" if params.get("enable", True) else "DISABLE"
print(" {:2d} {:<40s} {}".format(idx, name, act))
# @tc.typecheck
def _list_inputs_dbg(inps, conf, args):
try:
gcache = cache.Cache(os.path.join(
os.path.expanduser(args.cache_dir), "cache"))
except IOError:
_LOG.error("Init cache error")
return
print("Inputs:")
defaults = _build_defaults(args, conf)
for idx, inp_conf in enumerate(inps, 1):
params = common.apply_defaults(defaults, inp_conf)
ctx = common.Context(params, gcache, idx, None, args)
ctx.metadata = ctx.cache.get_meta(ctx.oid) or {}
if ctx.last_updated:
last_update = time.strftime("%x %X",
time.localtime(ctx.last_updated))
else:
last_update = 'never loaded'
loader = inputs.get_input(ctx)
next_update_ts = loader.next_update()
if next_update_ts:
next_update = time.strftime(
"%x %X", time.localtime(next_update_ts))
else:
next_update = 'now'
print(" {:2d} {:<40s} {} last: {} next: {} {} {}".format(
idx,
config.get_input_name(params, idx),
"ENB" if params.get("enable", True) else "DIS",
last_update, next_update,
ctx.metadata.get('status'),
config.gen_input_oid(params)
))
def _build_defaults(args, conf):
defaults = {}
defaults.update(config.DEFAULTS)
defaults.update(conf.get("defaults") or {})
defaults["diff_mode"] = args.diff_mode
return defaults
def load_all(args, inps, conf, selection=None):
""" Load all (or selected) inputs"""
metrics.configure(conf)
start = time.time()
try:
gcache = cache.Cache(os.path.join(
os.path.expanduser(args.cache_dir), "cache"))
except IOError:
_LOG.error("Init cache error")
return
partial_reports_dir = os.path.join(
os.path.expanduser(args.cache_dir), "partials")
try:
output = outputs.OutputManager(conf, partial_reports_dir)
except RuntimeError as err:
_LOG.error("Init parts dir error: %s", err)
return
# defaults for inputs
defaults = _build_defaults(args, conf)
def task(idx, iconf):
params = common.apply_defaults(defaults, iconf)
ctx = common.Context(params, gcache, idx, output, args)
try:
load(ctx)
except Exception as err: # pylint: disable=broad-except
ctx.log_exception("loading error: %s", err)
ctx.output.put_error(ctx, str(err))
del ctx
with futures.ThreadPoolExecutor(max_workers=args.tasks or 2) as ex:
wait_for = [
ex.submit(task, idx, iconf)
for idx, iconf in enumerate(inps, 1)
if not selection or idx in selection
]
futures.wait(wait_for)
_LOG.info("Loading: all done")
metrics.COLLECTOR.put_loading_summary(time.time() - start)
footer = " ".join((APP_NAME, VERSION, time.asctime()))
output.write(footer=footer, debug=args.debug)
# if processing all files - clean unused / old cache files
if not selection:
gcache.clean_cache()
metrics.COLLECTOR.put_total(time.time() - start)
metrics.COLLECTOR.write()
def check_libraries():
try:
from lxml import etree
except ImportError:
_LOG.warn("missing lxml library")
try:
import cssselect
except ImportError:
_LOG.warn("missing cssselect library")
try:
import html2text
except ImportError:
_LOG.warn("missing html2text library")
try:
import docutils.core
except ImportError:
_LOG.warn("missing docutils library")
try:
import yaml
except ImportError:
_LOG.warn("missing yaml library")
try:
import requests
except ImportError:
_LOG.warn("missing requests library")
try:
import feedparser
except ImportError:
_LOG.warn("missing feedparser library")
try:
import github3
except ImportError:
_LOG.warn("missing github3 library")
def main():
"""Main function."""
locale.setlocale(locale.LC_ALL, locale.getdefaultlocale())
args = _parse_options()
logging_setup.setup(args.log, args.debug, args.silent)
check_libraries()
# if not args.debug:
# tc.disable()
_load_user_classes()
if args.abilities:
show_abilities()
return
inps = config.load_inputs(args.inputs)
if not inps:
return
conf = config.load_configuration(args.config)
if not conf:
return
if args.list_inputs:
with config.lock():
if args.verbose:
_list_inputs_dbg(inps, conf, args)
else:
_list_inputs(inps, conf, args)
return
selection = None
if args.sel:
try:
selection = set(int(idx.strip()) for idx in args.sel.split(","))
except ValueError:
_LOG.error("Invalid --sel parameter - expected numbers separated"
"by comma")
return
try:
with config.lock():
load_all(args, inps, conf, selection)
except RuntimeError:
pass
if __name__ == "__main__":
main()
| gpl-2.0 | -2,854,431,417,029,589,000 | 31.41517 | 79 | 0.604618 | false |
sky15179/Debug | TestTabelController/my-python/relaceGuideImages.py | 1 | 3738 | #!/usr/bin/python
# coding:utf-8
import os
import re
import fnmatch
import zipfile
import shutil
import getpass
import glob
PATH = "/Users/"+ getpass.getuser() + "/Downloads"
ProtectImagePath = "/Users/wzg/Downloads/testimage/help_images"
prefix = "FS_HelpGuide_"
#获取替换文件的路径
def realProtectImagePath():
global ProtectImagePath
if os.path.isdir(ProtectImagePath):
pass
else:
inputContent = raw_input("请输入待替换图片文件的路径:")
if os.path.isdir(ProtectImagePath):
ProtectImagePath = inputContent
pass
#删除已有图片文件夹
def deleteExistDirs():
# '''delete files and folders'''
for path,dirs,files in os.walk(PATH):
for secDir in dirs:
if fnmatch.fnmatch(secDir,'*引导*'):
turePath = os.path.join(PATH,secDir)
shutil.rmtree(turePath)
pass
#解压操作
def unzip(file_name):
# """unzip zip file"""
zip_file = zipfile.ZipFile(file_name)
zipDirName = file_name.replace('.zip','',1)
if os.path.isdir(zipDirName):
pass
else:
os.mkdir(zipDirName)
for names in zip_file.namelist():
if names.startswith('__MACOSX/'):
continue
zip_file.extract(names,zipDirName)
zip_file.close()
# zip_file.printdir()
pass
#解压得路径
def unzipImages():
for filename in os.listdir(PATH):
if fnmatch.fnmatch(filename,'*引导*'):
return os.path.join(PATH,filename)
pass
#获取zip包的路径
def realPath():
for path,dirs,files in os.walk(PATH):
for secDir in dirs:
if fnmatch.fnmatch(secDir,'*引导*'):
# print '压缩包' + secDir
turePath = os.path.join(PATH,secDir)
# print '真实路径:' + turePath
return turePath
pass
# 替换文件名
def rename_fils(turePath):
for path,secdirs,files in os.walk(turePath):
for subDir in secdirs:
subPath = os.path.join(turePath,subDir)
for subfile in os.listdir(subPath):
# print '文件:' + subfile
subfilePath = os.path.join(subPath,subfile)
if os.path.isfile(subfilePath):
if '.DS_Store' not in subfile:
newName = os.path.join(subPath,prefix+subDir+'_'+subfile.replace('0','',1))
os.rename(subfilePath,newName)
pass
# 根目录下得文件完整路径
def fileInDirPath(turePath):
fileList = []
for path,secdirs,files in os.walk(turePath):
for subDir in secdirs:
subPath = os.path.join(turePath,subDir)
for subfile in os.listdir(subPath):
subfilePath = os.path.join(subPath,subfile)
if os.path.isfile(subfilePath):
if '.DS_Store' not in subfile:
newName = os.path.join(subPath,subfile)
fileList.append(newName)
return fileList
pass
#替换图片
def repalceImages(newImageDirPath,oldImageDirPath):
if (os.path.isdir(newImageDirPath)) and (os.path.isdir(oldImageDirPath)):
for newImageFilePath in fileInDirPath(newImageDirPath):
if os.path.isfile(newImageFilePath):
shutil.copy(newImageFilePath,oldImageDirPath)
print "替换成功" + os.path.basename(newImageFilePath)
pass
if __name__ == '__main__':
deleteExistDirs()
unzipPath = unzipImages()
if os.path.isfile(unzipPath):
unzip(unzipPath)
rename_fils(realPath())
realProtectImagePath()
repalceImages(realPath(),ProtectImagePath)
else:
print '无效解压地址'
| apache-2.0 | 614,262,965,934,811,400 | 28.38843 | 99 | 0.600675 | false |
mxcube/mxcube | mxcubeqt/utils/widget_utils.py | 1 | 9801 | #
# Project: MXCuBE
# https://github.com/mxcube
#
# This file is part of MXCuBE software.
#
# MXCuBE is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# MXCuBE is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with MXCuBE. If not, see <http://www.gnu.org/licenses/>.
from mxcubeqt.utils import colors, qt_import
from mxcubecore.dispatcher import dispatcher
from mxcubecore.ConvertUtils import string_types
__credits__ = ["MXCuBE collaboration"]
__license__ = "LGPLv3+"
class DataModelInputBinder(object):
def __init__(self, obj):
object.__init__(self)
self.__model = obj
# Key - field name/attribute name of the persistant object.
# Value - The tuple (widget, validator, type_fn)
self.bindings = {}
dispatcher.connect(self._update_widget, "model_update", dispatcher.Any)
def __checkbox_update_value(self, field_name, new_value):
setattr(self.__model, field_name, new_value)
dispatcher.send("model_update", self.__model, field_name, self)
def __combobox_update_value(self, field_name, new_value):
setattr(self.__model, field_name, new_value)
dispatcher.send("model_update", self.__model, field_name, self)
def __ledit_update_value(self, field_name, widget, new_value, type_fn, validator):
if not self.bindings[field_name][3]:
origin_value = new_value
if type_fn == float and validator:
pattern = "%." + str(validator.decimals()) + "f"
new_value = pattern % float(new_value)
# fix validation if PyQt4 and sipapi 1 is used
if isinstance(new_value, string_types):
if "QString" in globals():
new_value = qt_import.QString(new_value)
self.__validated(
field_name, validator, self.bindings[field_name][0], new_value
)
if isinstance(widget, qt_import.QLineEdit):
if type_fn is float and validator:
widget.setText(
"{:g}".format(
round(float(origin_value), validator.decimals())
)
)
try:
setattr(self.__model, field_name, type_fn(origin_value))
except ValueError:
if origin_value != "":
raise
else:
dispatcher.send("model_update", self.__model, field_name, self)
def __ledit_text_edited(self, field_name, widget, new_value, type_fn, validator):
self.bindings[field_name][3] = True
if self.__validated(
field_name, validator, self.bindings[field_name][0], new_value
):
try:
setattr(self.__model, field_name, type_fn(new_value))
except ValueError:
if new_value != "":
raise
else:
dispatcher.send("model_update", self.__model, field_name, self)
def __validated(self, field_name, validator, widget, new_value):
if validator:
try:
flt_value = float(new_value)
except BaseException:
colors.set_widget_color(
widget, colors.LIGHT_RED, qt_import.QPalette.Base
)
return False
if flt_value >= min(
validator.bottom(), validator.top()
) and flt_value <= max(validator.bottom(), validator.top()):
# if validator.validate(new_value, widget.cursorPosition())[0] \
# == QValidator.Acceptable:
if self.bindings[field_name][3]:
colors.set_widget_color(
widget, colors.LIGHT_YELLOW, qt_import.QPalette.Base
)
else:
colors.set_widget_color(
widget, colors.WHITE, qt_import.QPalette.Base
)
return True
else:
colors.set_widget_color(
widget, colors.LIGHT_RED, qt_import.QPalette.Base
)
return False
else:
if self.bindings[field_name][3]:
colors.set_widget_color(
widget, colors.LIGHT_YELLOW, qt_import.QPalette.Base
)
else:
colors.set_widget_color(widget, colors.WHITE, qt_import.QPalette.Base)
return True
def get_model(self):
return self.__model
def set_model(self, obj):
self.__model = obj
self.init_bindings()
self.clear_edit()
self.validate_all()
def init_bindings(self):
for field_name in self.bindings.keys():
self._update_widget(field_name, None)
def _update_widget(self, field_name, data_binder):
if data_binder == self:
return
try:
widget, validator, type_fn, edited = self.bindings[field_name]
except KeyError:
return
try:
widget.blockSignals(True)
if isinstance(widget, qt_import.QLineEdit):
if type_fn is float and validator:
if getattr(self.__model, field_name):
value = float(getattr(self.__model, field_name))
widget.setText(
"{:g}".format(round(float(value), validator.decimals()))
)
elif type_fn is int and validator:
value = int(getattr(self.__model, field_name))
widget.setText("%d" % value)
else:
widget.setText(str(getattr(self.__model, field_name)))
elif isinstance(widget, qt_import.QLabel):
widget.setText(str(getattr(self.__model, field_name)))
elif isinstance(widget, qt_import.QComboBox):
widget.setCurrentIndex(int(getattr(self.__model, field_name)))
elif isinstance(widget, qt_import.QCheckBox) or isinstance(
widget, qt_import.QRadioButton
):
widget.setChecked(bool(getattr(self.__model, field_name)))
finally:
widget.blockSignals(False)
def bind_value_update(self, field_name, widget, type_fn, validator=None):
self.bindings[field_name] = [widget, validator, type_fn, False]
if isinstance(widget, qt_import.QLineEdit):
widget.textChanged.connect(
lambda new_value: self.__ledit_update_value(
field_name, widget, new_value, type_fn, validator
)
)
widget.textEdited.connect(
lambda new_value: self.__ledit_text_edited(
field_name, widget, new_value, type_fn, validator
)
)
if type_fn is float and validator:
pattern = "%." + str(validator.decimals()) + "f"
if getattr(self.__model, field_name):
widget.setText(pattern % float(getattr(self.__model, field_name)))
else:
widget.setText(str(getattr(self.__model, field_name)))
elif isinstance(widget, qt_import.QLabel):
widget.setText(str(getattr(self.__model, field_name)))
elif isinstance(widget, qt_import.QComboBox):
widget.activated.connect(
lambda new_value: self.__combobox_update_value(field_name, new_value)
)
widget.setCurrentIndex(int(getattr(self.__model, field_name)))
elif isinstance(widget, qt_import.QCheckBox) or isinstance(
widget, qt_import.QRadioButton
):
widget.toggled.connect(
lambda new_value: self.__checkbox_update_value(field_name, new_value)
)
widget.setChecked(bool(getattr(self.__model, field_name)))
if validator and not widget.toolTip():
if isinstance(validator, qt_import.QDoubleValidator):
tooltip = "%s limits %.2f : %.2f" % (
field_name.replace("_", " ").capitalize(),
validator.bottom(),
validator.top(),
)
else:
tooltip = "%s limits %d : %d" % (
field_name.replace("_", " ").capitalize(),
validator.bottom(),
validator.top(),
)
widget.setToolTip(tooltip)
def validate_all(self):
result = []
for item in self.bindings.items():
key = item[0]
widget = item[1][0]
validator = item[1][1]
# if validator:
if isinstance(widget, qt_import.QLineEdit):
if not self.__validated(key, validator, widget, widget.text()):
result.append(key)
elif isinstance(widget, qt_import.QComboBox):
pass
elif isinstance(widget, qt_import.QCheckBox) or isinstance(
widget, qt_import.QRadioButton
):
pass
return result
def clear_edit(self):
for key in self.bindings.keys():
self.bindings[key][3] = False
| lgpl-3.0 | -875,978,514,272,446,100 | 37.435294 | 86 | 0.540149 | false |
google/tink | python/tink/jwt/_jwt_key_templates.py | 1 | 4813 | # Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
"""Pre-generated JWT KeyTemplate."""
from tink.proto import jwt_ecdsa_pb2
from tink.proto import jwt_hmac_pb2
from tink.proto import jwt_rsa_ssa_pkcs1_pb2
from tink.proto import jwt_rsa_ssa_pss_pb2
from tink.proto import tink_pb2
_F4 = 65537
# TODO(juerg): Add TINK key templates.
def _create_jwt_hmac_template(algorithm: jwt_hmac_pb2.JwtHmacAlgorithm,
key_size: int) -> tink_pb2.KeyTemplate:
key_format = jwt_hmac_pb2.JwtHmacKeyFormat(
algorithm=algorithm, key_size=key_size)
return tink_pb2.KeyTemplate(
type_url='type.googleapis.com/google.crypto.tink.JwtHmacKey',
value=key_format.SerializeToString(),
output_prefix_type=tink_pb2.RAW)
def _create_jwt_ecdsa_template(
algorithm: jwt_ecdsa_pb2.JwtEcdsaAlgorithm) -> tink_pb2.KeyTemplate:
key_format = jwt_ecdsa_pb2.JwtEcdsaKeyFormat(
algorithm=algorithm)
return tink_pb2.KeyTemplate(
type_url='type.googleapis.com/google.crypto.tink.JwtEcdsaPrivateKey',
value=key_format.SerializeToString(),
output_prefix_type=tink_pb2.RAW)
# TODO(juerg): Move this function into a util lib.
def _num_to_bytes(n: int) -> bytes:
"""Converts a number to bytes."""
if n < 0:
raise OverflowError("number can't be negative")
if n == 0:
return b'\x00'
octets = bytearray()
while n:
octets.append(n % 256)
n //= 256
return bytes(octets[::-1])
def _create_jwt_rsa_ssa_pkcs1_template(
algorithm: jwt_rsa_ssa_pkcs1_pb2.JwtRsaSsaPkcs1Algorithm,
modulus_size: int
) -> tink_pb2.KeyTemplate:
key_format = jwt_rsa_ssa_pkcs1_pb2.JwtRsaSsaPkcs1KeyFormat(
algorithm=algorithm,
modulus_size_in_bits=modulus_size,
public_exponent=_num_to_bytes(_F4))
return tink_pb2.KeyTemplate(
type_url='type.googleapis.com/google.crypto.tink.JwtRsaSsaPkcs1PrivateKey',
value=key_format.SerializeToString(),
output_prefix_type=tink_pb2.RAW)
def _create_jwt_rsa_ssa_pss_template(
algorithm: jwt_rsa_ssa_pss_pb2.JwtRsaSsaPssAlgorithm,
modulus_size: int
) -> tink_pb2.KeyTemplate:
key_format = jwt_rsa_ssa_pss_pb2.JwtRsaSsaPssKeyFormat(
algorithm=algorithm,
modulus_size_in_bits=modulus_size,
public_exponent=_num_to_bytes(_F4))
return tink_pb2.KeyTemplate(
type_url='type.googleapis.com/google.crypto.tink.JwtRsaSsaPssPrivateKey',
value=key_format.SerializeToString(),
output_prefix_type=tink_pb2.RAW)
# Hmac Templates
def jwt_hs256_template() -> tink_pb2.KeyTemplate:
return _create_jwt_hmac_template(jwt_hmac_pb2.HS256, 32)
def jwt_hs384_template() -> tink_pb2.KeyTemplate:
return _create_jwt_hmac_template(jwt_hmac_pb2.HS384, 48)
def jwt_hs512_template() -> tink_pb2.KeyTemplate:
return _create_jwt_hmac_template(jwt_hmac_pb2.HS512, 64)
# ECDSA Templates
def jwt_es256_template() -> tink_pb2.KeyTemplate:
return _create_jwt_ecdsa_template(jwt_ecdsa_pb2.ES256)
def jwt_es384_template() -> tink_pb2.KeyTemplate:
return _create_jwt_ecdsa_template(jwt_ecdsa_pb2.ES384)
def jwt_es512_template() -> tink_pb2.KeyTemplate:
return _create_jwt_ecdsa_template(jwt_ecdsa_pb2.ES512)
# RSA SSA PKCS1 Templates
def jwt_rs256_2048_f4_template() -> tink_pb2.KeyTemplate:
return _create_jwt_rsa_ssa_pkcs1_template(jwt_rsa_ssa_pkcs1_pb2.RS256, 2048)
def jwt_rs256_3072_f4_template() -> tink_pb2.KeyTemplate:
return _create_jwt_rsa_ssa_pkcs1_template(jwt_rsa_ssa_pkcs1_pb2.RS256, 3072)
def jwt_rs384_3072_f4_template() -> tink_pb2.KeyTemplate:
return _create_jwt_rsa_ssa_pkcs1_template(jwt_rsa_ssa_pkcs1_pb2.RS384, 3072)
def jwt_rs512_4096_f4_template() -> tink_pb2.KeyTemplate:
return _create_jwt_rsa_ssa_pkcs1_template(jwt_rsa_ssa_pkcs1_pb2.RS512, 4096)
# RSA SSA PSS Templates
def jwt_ps256_2048_f4_template() -> tink_pb2.KeyTemplate:
return _create_jwt_rsa_ssa_pss_template(jwt_rsa_ssa_pss_pb2.PS256, 2048)
def jwt_ps256_3072_f4_template() -> tink_pb2.KeyTemplate:
return _create_jwt_rsa_ssa_pss_template(jwt_rsa_ssa_pss_pb2.PS256, 3072)
def jwt_ps384_3072_f4_template() -> tink_pb2.KeyTemplate:
return _create_jwt_rsa_ssa_pss_template(jwt_rsa_ssa_pss_pb2.PS384, 3072)
def jwt_ps512_4096_f4_template() -> tink_pb2.KeyTemplate:
return _create_jwt_rsa_ssa_pss_template(jwt_rsa_ssa_pss_pb2.PS512, 4096)
| apache-2.0 | -4,097,483,200,969,170,000 | 31.965753 | 81 | 0.724288 | false |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.