repo_name
stringlengths 5
92
| path
stringlengths 4
232
| copies
stringclasses 19
values | size
stringlengths 4
7
| content
stringlengths 721
1.04M
| license
stringclasses 15
values | hash
int64 -9,223,277,421,539,062,000
9,223,102,107B
| line_mean
float64 6.51
99.9
| line_max
int64 15
997
| alpha_frac
float64 0.25
0.97
| autogenerated
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|
ClusterLabs/booth | test/assertions.py | 1 | 2281 | import re
class BoothAssertions:
def configFileMissingMyIP(self, config_file=None, lock_file=None):
(pid, ret, stdout, stderr, runner) = \
self.run_booth(config_file=config_file, lock_file=lock_file,
expected_exitcode=1, expected_daemon=False)
expected_error = "(ERROR|error): Cannot find myself in the configuration"
self.assertRegexpMatches(stderr, expected_error)
def assertLockFileError(self, config_file=None, config_text=None,
lock_file=True, args=()):
(pid, ret, stdout, stderr, runner) = \
self.run_booth(config_text=config_text, config_file=config_file,
lock_file=lock_file, args=args, expected_exitcode=1)
expected_error = 'lockfile open error %s: Permission denied' % runner.lock_file_used()
self.assertRegexpMatches(self.read_log(), expected_error)
######################################################################
# backported from 2.7 just in case we're running on an older Python
def assertRegexpMatches(self, text, expected_regexp, msg=None):
"""Fail the test unless the text matches the regular expression."""
if isinstance(expected_regexp, str):
expected_regexp = re.compile(expected_regexp)
if not expected_regexp.search(text):
msg = msg or "Regexp didn't match"
msg = '%s: %r not found in %r' % (msg, expected_regexp.pattern, text)
raise self.failureException(msg)
def assertNotRegexpMatches(self, text, unexpected_regexp, msg=None):
"""Fail the test if the text matches the regular expression."""
if isinstance(unexpected_regexp, str):
unexpected_regexp = re.compile(unexpected_regexp)
match = unexpected_regexp.search(text)
if match:
msg = msg or "Regexp matched"
msg = '%s: %r matches %r in %r' % (msg,
text[match.start():match.end()],
unexpected_regexp.pattern,
text)
raise self.failureException(msg)
######################################################################
| gpl-2.0 | -1,527,476,993,304,187,000 | 52.046512 | 94 | 0.553705 | false |
ideamonk/Web2Hunter-GAE | web2hunter.py | 1 | 2098 | #!/usr/bin/env python
# Web2Hunter -- Abhishek Mishra <ideamonk at gmail.com>
#
# a web 2.0 name generator extension to domainhunter.py
#
# usage -
# $ python web2hunter.py
import domainhunter as DH
import random
A = ["Anti", "Aero", "Babble", "Buzz", "Blog", "Blue", "Brain", "Bright", "Browse", "Bubble", "Chat", "Chatter", "Dab", "Dazzle", "Dev", "Digi", "Edge", "Feed", "Five", "Flash", "Flip", "Gab", "Giga", "Inno", "Jabber", "Jax", "Jet", "Jump", "Link", "Live", "My", "N", "Photo", "Pod", "Real", "Riff", "Shuffle", "Snap", "Skip", "Tag", "Tek", "Thought", "Top", "Topic", "Twitter", "Word", "You", "Zoom"]
B = ["bean", "beat", "bird", "blab", "box", "bridge", "bug", "buzz", "cast", "cat", "chat", "club", "cube", "dog", "drive", "feed", "fire", "fish", "fly", "ify", "jam", "links", "list", "lounge", "mix", "nation", "opia", "pad", "path", "pedia", "point", "pulse", "set", "space", "span", "share", "shots", "sphere", "spot", "storm", "ster", "tag", "tags", "tube", "tune", "type", "verse", "vine", "ware", "wire", "works", "XS", "Z", "zone", "zoom"]
C = ["Ai", "Aba", "Agi", "Ava", "Awesome", "Cami", "Centi", "Cogi", "Demi", "Diva", "Dyna", "Ea", "Ei", "Fa", "Ge", "Ja", "I", "Ka", "Kay", "Ki", "Kwi", "La", "Lee", "Mee", "Mi", "Mu", "My", "Oo", "O", "Oyo", "Pixo", "Pla", "Qua", "Qui", "Roo", "Rhy", "Ska", "Sky", "Ski", "Ta", "Tri", "Twi", "Tru", "Vi", "Voo", "Wiki", "Ya", "Yaki", "Yo", "Za", "Zoo"]
D = ["ba", "ble", "boo", "box", "cero", "deo", "del", "do", "doo", "gen", "jo", "lane", "lia", "lith", "loo", "lium", "mba", "mbee", "mbo", "mbu", "mia", "mm", "nder", "ndo", "ndu", "noodle", "nix", "nte", "nti", "nu", "nyx", "pe", "re", "ta", "tri", "tz", "va", "vee", "veo", "vu", "xo", "yo", "zz", "zzy", "zio", "zu"]
def genName():
output = ""
random.shuffle(A)
random.shuffle(B)
random.shuffle(C)
random.shuffle(D)
if (random.randint(0,1) == 1):
awesomename = A[0] + B[0]
else:
awesomename = C[0] + D[0]
random.shuffle(DH.tlds)
tld = DH.tlds[0]
if ( DH.domainSearch(awesomename + tld) ):
output = awesomename + tld
return output
| bsd-3-clause | -202,349,895,963,669 | 55.702703 | 448 | 0.515729 | false |
buzzz321/SerMan | server.py | 1 | 4112 | #!/usr/bin/env python3
# found some where on internet and added my own stuff to it.
import logging
import socket
import select
import subprocess
HOSTNAME = 'localhost'
PORT = '4000'
MAXIMUM_QUEUED_CONNECTIONS = 5
RECEIVING_BUFFER_SIZE = 4096
logger = logging.getLogger(__name__)
def start_server(hostname, port):
# Get all possible binding addresses for given hostname and port.
possible_addresses = socket.getaddrinfo(
hostname,
port,
family=socket.AF_UNSPEC,
type=socket.SOCK_STREAM,
flags=socket.AI_PASSIVE
)
server_socket = None
# Look for an address that will actually bind.
for family, socket_type, protocol, name, address in possible_addresses:
try:
# Create socket.
server_socket = socket.socket(family, socket_type, protocol)
# Make socket port reusable.
server_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
# Bind socket to the address.
server_socket.bind(address)
except OSError:
# Try another address.
continue
break
if server_socket is None:
logger.error("No suitable address available.")
return
# Listen for incoming connections.
server_socket.listen(MAXIMUM_QUEUED_CONNECTIONS)
logger.info("Listening on %s port %d." % server_socket.getsockname()[:2])
monitored_sockets = [server_socket]
try:
while True:
# Wait for any of the monitored sockets to become readable.
ready_to_read_sockets = select.select(
monitored_sockets,
tuple(),
tuple()
)[0]
for ready_socket in ready_to_read_sockets:
if ready_socket == server_socket:
# If server socket is readable, accept new client
# connection.
client_socket, client_address = server_socket.accept()
monitored_sockets.append(client_socket)
logger.info("New connection #%d on %s:%d." % (
client_socket.fileno(),
client_address[0],
client_address[1]
))
else:
message = ready_socket.recv(RECEIVING_BUFFER_SIZE)
if message:
# Client send correct message. Echo it.
if b'lista' in message:
print(message)
lista = subprocess.check_output(["ls", "-l"])
print(lista)
ready_socket.sendall(lista)
if b'long' in message:
print(message)
infile = open('serman/sermanwindow.cc')
lista = infile.readlines()
lista = ', '.join([str(x) for x in lista])
print(lista)
ready_socket.sendall(str.encode(lista))
else:
print(message)
ready_socket.sendall(message)
else:
# Client connection is lost. Handle it.
logger.info(
"Lost connection #%d." % ready_socket.fileno()
)
monitored_sockets.remove(ready_socket)
except KeyboardInterrupt:
pass
logger.info("Shutdown initiated.")
# Close client connections.
monitored_sockets.remove(server_socket)
for client_socket in monitored_sockets:
logger.info("Closing connection #%d." % client_socket.fileno())
client_socket.close()
# Close server socket.
logger.info("Shutting server down...")
server_socket.close()
if __name__ == '__main__':
# Configure logging.
logger.setLevel(logging.INFO)
logger.addHandler(logging.StreamHandler())
# Start server.
start_server(HOSTNAME, PORT)
| bsd-2-clause | 5,011,016,042,292,578,000 | 37.074074 | 79 | 0.530399 | false |
zibraproject/zika-pipeline | scripts/extract.py | 1 | 1704 | from poretools.Fast5File import Fast5FileSet
import sys
# extract with constraints:
# -- only one group ever
# -- only one flowcell ID ever
# -- always unique read ID
def run(parser, args):
flowcells = set()
reads = set()
i = 0
basecaller_version = None
for fast5 in Fast5FileSet(args.directory, None, args.basecaller):
# if not basecaller_version:
# basecaller_version = fast5.get_basecaller_version()
# elif fast5.get_basecaller_version() != basecaller_version:
# print >>sys.stderr, "ABORTED: More than one basecaller version found: %s, %s" % (basecaller_version, fast5.get_basecaller_version())
# raise SystemExit
if not fast5.is_open:
print >>sys.stderr, "Skipping read: %s" % (fast5.filename)
continue
read_flowcell_id = fast5.get_flowcell_id()
flowcells.add(read_flowcell_id)
if len(flowcells) != 1:
print >>sys.stderr, "ABORTED: More than one flowcell found in dataset: %s" % (flowcells,)
raise SystemExit
#if flowcell_id != read_flowcell_id:
# print >>sys.stderr, "Skipping read from flowcell: %s" % (read_flowcell_id)
# continue
read_id = fast5.get_read_id()
if read_id in reads:
print >>sys.stderr, "Skipping duplicate read: %s" % (read_id)
continue
reads.add(read_id)
fas = fast5.get_fastas('fwd')
for read in fas:
if read:
print read
fast5.close()
i += 1
if i % 1000 == 0:
print >>sys.stderr, "Extracted %s reads" % (i,)
# zibra.py
# run
# --flowcell
# --type 1d / 2d
# --check-sample-name
# --check-flowcell-name
# --min-support-value
# --min-depth
# --min-log-likelihood
# --normalised-depth
# --use-indels
# --trim-reads
# <scheme> <sample> <directory>
# list-schemes
| mit | -3,093,050,626,163,535,000 | 23.695652 | 136 | 0.658451 | false |
inspirehep/inspire-next | inspirehep/factory.py | 1 | 2892 | # -*- coding: utf-8 -*-
#
# This file is part of INSPIRE.
# Copyright (C) 2014-2017 CERN.
#
# INSPIRE is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# INSPIRE is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with INSPIRE. If not, see <http://www.gnu.org/licenses/>.
#
# In applying this license, CERN does not waive the privileges and immunities
# granted to it by virtue of its status as an Intergovernmental Organization
# or submit itself to any jurisdiction.
"""INSPIREHEP app factories."""
from __future__ import absolute_import, division, print_function
import os
import sys
from invenio_base.app import create_app_factory
from invenio_base.wsgi import create_wsgi_factory
from invenio_config import create_config_loader
from . import config
env_prefix = 'APP'
def config_loader(app, **kwargs_config):
invenio_config_loader = create_config_loader(config=config, env_prefix=env_prefix)
result = invenio_config_loader(app, **kwargs_config)
app.url_map.strict_slashes = False
return result
def api_config_loader(app, **kwargs_config):
return config_loader(app, RESTFUL_API=True, **kwargs_config)
instance_path = os.getenv(env_prefix + '_INSTANCE_PATH') or \
os.path.join(sys.prefix, 'var', 'inspirehep-instance')
"""Instance path for Invenio.
Defaults to ``<env_prefix>_INSTANCE_PATH`` or if environment variable is not
set ``<sys.prefix>/var/<app_name>-instance``.
"""
static_folder = os.getenv(env_prefix + '_STATIC_FOLDER') or \
os.path.join(instance_path, 'static')
"""Static folder path.
Defaults to ``<env_prefix>_STATIC_FOLDER`` or if environment variable is not
set ``<sys.prefix>/var/<app_name>-instance/static``.
"""
static_url_path = '/oldui'
create_api = create_app_factory(
'inspirehep',
config_loader=api_config_loader,
blueprint_entry_points=['invenio_base.api_blueprints'],
extension_entry_points=['invenio_base.api_apps'],
converter_entry_points=['invenio_base.api_converters'],
instance_path=instance_path,
static_url_path=static_url_path,
)
create_app = create_app_factory(
'inspirehep',
config_loader=config_loader,
blueprint_entry_points=['invenio_base.blueprints'],
extension_entry_points=['invenio_base.apps'],
converter_entry_points=['invenio_base.converters'],
wsgi_factory=create_wsgi_factory({'/api': create_api}),
instance_path=instance_path,
static_folder=static_folder,
static_url_path=static_url_path,
)
| gpl-3.0 | -4,535,237,258,872,902,000 | 31.494382 | 86 | 0.72787 | false |
GeoMatDigital/django-geomat | geomat/feedback/views.py | 1 | 1251 | from django.shortcuts import render
from rest_framework import generics
from rest_framework.response import Response
from rest_framework import views
from geomat.feedback.serializers import FeedBackSerializer
from django.core.mail import send_mail
from rest_framework import status
from drf_yasg.utils import swagger_auto_schema
class FeedBackView(generics.GenericAPIView):
serializer_class = FeedBackSerializer
permission_classes = ()
@swagger_auto_schema(responses={200:"The Views response is 200 if mail is sent"})
def post(self, request, *args, **kwargs):
serializer = FeedBackSerializer(data=request.data)
serializer.is_valid(raise_exception=True)
serializer = serializer.data
message = send_mail(subject=serializer["emailTitle"],
from_email= "{0} <{1}>".format(serializer["username"],serializer["userEmail"]),
message=serializer["emailContent"],
recipient_list=["[email protected]"],
fail_silently=False)
if not message:
return Response(status=status.HTTP_400_BAD_REQUEST)
return Response(data=serializer)
# Create your views here.
| bsd-3-clause | -621,155,066,860,859,400 | 39.354839 | 107 | 0.681055 | false |
xmendez/wfuzz | tests/server_dir/simple_server.py | 1 | 3589 | # slightly modified from
# https://gist.github.com/trungly/5889154
from http.server import HTTPServer
from http.server import SimpleHTTPRequestHandler
import urllib.parse
class GetHandler(SimpleHTTPRequestHandler):
def do_HEAD(self):
parsed_path = urllib.parse.urlparse(self.path)
if parsed_path.path.startswith("/echo"):
message = "\n".join(
[
"CLIENT VALUES:",
"client_address=%s (%s)"
% (self.client_address, self.address_string()),
"command=%s" % self.command,
"path=%s" % self.path,
"real path=%s" % parsed_path.path,
"query=%s" % parsed_path.query,
"request_version=%s" % self.request_version,
"",
"HEADERS:",
"%s" % self.headers,
]
)
self.send_response(200)
self.end_headers()
self.wfile.write(message.encode("utf-8"))
elif parsed_path.path.startswith("/redirect"):
self.send_response(301)
self.send_header("Location", "/echo")
self.end_headers()
else:
SimpleHTTPRequestHandler.do_HEAD(self)
return
def do_GET(self):
parsed_path = urllib.parse.urlparse(self.path)
if parsed_path.path.startswith("/echo"):
message = "\n".join(
[
"CLIENT VALUES:",
"client_address=%s (%s)"
% (self.client_address, self.address_string()),
"command=%s" % self.command,
"path=%s" % self.path,
"real path=%s" % parsed_path.path,
"query=%s" % parsed_path.query,
"request_version=%s" % self.request_version,
"",
"HEADERS:",
"%s" % self.headers,
]
)
self.send_response(200)
self.end_headers()
self.wfile.write(message.encode("utf-8"))
elif parsed_path.path.startswith("/redirect"):
self.send_response(301)
self.send_header("Location", "/echo")
self.end_headers()
else:
SimpleHTTPRequestHandler.do_GET(self)
return
def do_POST(self):
parsed_path = urllib.parse.urlparse(self.path)
if parsed_path.path.startswith("/echo"):
content_len = int(self.headers.get("content-length"))
post_body = self.rfile.read(content_len).decode("utf-8")
self.send_response(200)
self.end_headers()
message = "\n".join(
[
"CLIENT VALUES:",
"client_address=%s (%s)"
% (self.client_address, self.address_string()),
"command=%s" % self.command,
"path=%s" % self.path,
"real path=%s" % parsed_path.path,
"query=%s" % parsed_path.query,
"request_version=%s" % self.request_version,
"",
"HEADERS:",
"%s" % self.headers,
"POST_DATA=%s" % post_body,
"",
]
)
self.wfile.write(message.encode("utf-8"))
return
if __name__ == "__main__":
server = HTTPServer(("0.0.0.0", 8000), GetHandler)
server.serve_forever()
| gpl-2.0 | 6,534,772,595,874,089,000 | 33.84466 | 68 | 0.462246 | false |
josefmonje/Django_demo | main/migrations/0001_initial.py | 1 | 1096 | # -*- coding: utf-8 -*-
# Generated by Django 1.9.2 on 2016-03-01 10:46
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Profile',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('modified', models.DateTimeField(auto_now=True)),
('is_verified', models.BooleanField(default=False)),
('name', models.CharField(max_length=50)),
('update', models.TextField()),
('image', models.ImageField(blank=True, null=True, upload_to='')),
('user', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, related_name='profile', to=settings.AUTH_USER_MODEL)),
],
),
]
| mit | 313,435,659,963,360,100 | 34.354839 | 145 | 0.609489 | false |
hsmade/expenses | expenses/tests.py | 1 | 9500 | from django.test import TestCase
from django.utils import timezone
from .models import Month, Withdrawal, Deposit, DepositType, WithdrawalType
from django.core.urlresolvers import reverse
from django.core.exceptions import ObjectDoesNotExist
from . import import_data
from expenses.lib import rabo
from tempfile import NamedTemporaryFile
class ImportTest(TestCase):
def setUp(self):
self.temp_file = NamedTemporaryFile(delete=False)
with self.temp_file as temp_file:
temp_file.write('"NL11RABO0123456789","EUR","20150615","D","100.00","NL22RABO0123456789","My Name","20150615","tb","Description","line","another","line 4","line5","end","","123456789","",""\r\n')
temp_file.write('"NL11RABO0123456789","EUR","20150615","C","100.00","NL22RABO0123456789","My Name","20150615","tb","Description","line","another","line 4","line5","end","","123456789","",""\r\n')
def test_read_csv(self):
data = rabo.CSVExport(self.temp_file.name)
self.assertTrue(data.transactions)
self.assertEqual(data.transactions[0].description, 'Description line another line 4 line5 end')
def test_import(self):
month = Month.objects.create(year=2015, month=1)
import_data.import_rabo(self.temp_file.name, month)
withdrawal = False
try:
withdrawal = Withdrawal.objects.get(month=month, description='Description line another line 4 line5 end')
except Exception as e:
self.assertFalse(True, e)
self.assertTrue(withdrawal)
deposit = False
try:
deposit = Deposit.objects.get(month=month, description='Description line another line 4 line5 end')
except Exception as e:
self.assertFalse(True, e)
self.assertTrue(deposit)
class ExpensesViewTest(TestCase):
def test_index_view(self):
_ = Month.objects.create(year=2015, month=1)
response = self.client.get(reverse('expenses:index'))
self.assertContains(response, '2015/1: 0')
def test_month_detail(self):
month = Month.objects.create(year=2015, month=1)
deposit_type = DepositType(name='deposit type')
deposit_type.save()
withdrawal_type = WithdrawalType(name='withdrawal type')
withdrawal_type.save()
_ = Deposit.objects.create(month=month, amount=20.0, description='test deposit',
date=timezone.now(), deposit_type=deposit_type)
_ = Withdrawal.objects.create(month=month, amount=5.0, description='test withdrawal',
date=timezone.now(), withdrawal_type=withdrawal_type)
response = self.client.get(reverse('expenses:month', args=(month.id,)))
# print response.content
self.assertContains(response, '2015/1')
self.assertContains(response, '15.00')
self.assertContains(response, 'test deposit')
self.assertContains(response, 'test withdrawal')
def test_add_withdrawal(self):
month = Month.objects.create(year=2015, month=1)
month.save()
withdrawal_type = WithdrawalType.objects.create(name='withdrawal type')
withdrawal_type.save()
response = self.client.post(reverse('expenses:withdrawal', args=(month.id, -1)), {
'description': 'description',
'amount': 10,
'date': timezone.now().date(),
'withdrawal_type': withdrawal_type.id,
'account_number': '1234',
})
print response
self.assertEqual(response.status_code, 302)
withdrawal = Withdrawal.objects.get(month=month, withdrawal_type=withdrawal_type)
self.assertTrue(withdrawal)
self.assertEqual(withdrawal.description, 'description')
self.assertEqual(withdrawal.amount, 10.0)
self.assertEqual(withdrawal.date, timezone.now().date())
self.assertEqual(withdrawal.withdrawal_type, withdrawal_type)
self.assertEqual(withdrawal.account_number, '1234')
def test_update_withdrawal(self):
month = Month.objects.create(year=2015, month=1)
month.save()
withdrawal_type = WithdrawalType.objects.create(name='withdrawal type')
withdrawal_type.save()
withdrawal1 = Withdrawal.objects.create(
month=month,
description='description',
amount=10,
date=timezone.now().date(),
withdrawal_type=withdrawal_type,
account_number='1234',
)
withdrawal1.save()
response = self.client.post(reverse('expenses:withdrawal', args=(month.id, withdrawal1.id)), {
'description': 'description2',
'amount': 11,
'date': timezone.now().date(),
'withdrawal_type': withdrawal_type.id,
'account_number': '12345',
})
print response
self.assertEqual(response.status_code, 302)
withdrawal = Withdrawal.objects.get(month=month, withdrawal_type=withdrawal_type)
self.assertTrue(withdrawal)
self.assertEqual(withdrawal.description, 'description2')
self.assertEqual(withdrawal.amount, 11.0)
self.assertEqual(withdrawal.date, timezone.now().date())
self.assertEqual(withdrawal.withdrawal_type, withdrawal_type)
self.assertEqual(withdrawal.account_number, '12345')
def test_delete_withdrawal(self):
month = Month.objects.create(year=2015, month=1)
month.save()
withdrawal_type = WithdrawalType.objects.create(name='withdrawal type')
withdrawal_type.save()
withdrawal = Withdrawal.objects.create(
month=month,
description='description',
amount=10,
date=timezone.now().date(),
withdrawal_type=withdrawal_type,
account_number='1234',
)
withdrawal.save()
withdrawal_id = withdrawal.id
response = self.client.get(reverse('expenses:withdrawal-delete', args=(month.id, withdrawal.id)))
self.assertEqual(response.status_code, 302)
# Withdrawal.objects.get(pk=withdrawal_id)
try:
Withdrawal.objects.get(pk=withdrawal_id)
except ObjectDoesNotExist:
pass
else:
self.assertTrue(False, msg='Withdrawal object not removed')
def test_add_deposit(self):
month = Month.objects.create(year=2015, month=1)
month.save()
deposit_type = DepositType.objects.create(name='deposit type')
deposit_type.save()
response = self.client.post(reverse('expenses:deposit', args=(month.id, -1)), {
'description': 'description',
'amount': 10,
'date': timezone.now().date(),
'deposit_type': deposit_type.id,
'account_number': '1234',
})
print response
self.assertEqual(response.status_code, 302)
deposit = Deposit.objects.get(month=month, deposit_type=deposit_type)
self.assertTrue(deposit)
self.assertEqual(deposit.description, 'description')
self.assertEqual(deposit.amount, 10.0)
self.assertEqual(deposit.date, timezone.now().date())
self.assertEqual(deposit.deposit_type, deposit_type)
self.assertEqual(deposit.account_number, '1234')
def test_update_deposit(self):
month = Month.objects.create(year=2015, month=1)
month.save()
deposit_type = DepositType.objects.create(name='deposit type')
deposit_type.save()
deposit1 = Deposit.objects.create(
month=month,
description='description',
amount=10,
date=timezone.now().date(),
deposit_type=deposit_type,
account_number='1234',
)
deposit1.save()
response = self.client.post(reverse('expenses:deposit', args=(month.id, deposit1.id)), {
'description': 'description2',
'amount': 11,
'date': timezone.now().date(),
'deposit_type': deposit_type.id,
'account_number': '12345',
})
print response
self.assertEqual(response.status_code, 302)
deposit = Deposit.objects.get(month=month, deposit_type=deposit_type)
self.assertTrue(deposit)
self.assertEqual(deposit.description, 'description2')
self.assertEqual(deposit.amount, 11.0)
self.assertEqual(deposit.date, timezone.now().date())
self.assertEqual(deposit.deposit_type, deposit_type)
self.assertEqual(deposit.account_number, '12345')
def test_delete_deposit(self):
month = Month.objects.create(year=2015, month=1)
month.save()
deposit_type = DepositType.objects.create(name='deposit type')
deposit_type.save()
deposit = Deposit.objects.create(
month=month,
description='description',
amount=10,
date=timezone.now().date(),
deposit_type=deposit_type,
account_number='1234',
)
deposit.save()
deposit_id = deposit.id
response = self.client.get(reverse('expenses:deposit-delete', args=(month.id, deposit.id)))
self.assertEqual(response.status_code, 302)
# Withdrawal.objects.get(pk=deposit_id)
try:
Withdrawal.objects.get(pk=deposit_id)
except ObjectDoesNotExist:
pass
else:
self.assertTrue(False, msg='Withdrawal object not removed')
| gpl-2.0 | -9,034,774,651,408,447,000 | 42.577982 | 207 | 0.626316 | false |
yannrouillard/weboob | modules/banqueaccord/pages.py | 1 | 5605 | # -*- coding: utf-8 -*-
# Copyright(C) 2013 Romain Bignon
#
# This file is part of weboob.
#
# weboob is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# weboob is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with weboob. If not, see <http://www.gnu.org/licenses/>.
from decimal import Decimal
import re
from weboob.capabilities.bank import Account
from weboob.tools.browser import BasePage, BrokenPageError
from weboob.tools.captcha.virtkeyboard import MappedVirtKeyboard, VirtKeyboardError
from weboob.tools.capabilities.bank.transactions import FrenchTransaction
__all__ = ['LoginPage', 'IndexPage', 'AccountsPage', 'OperationsPage']
class VirtKeyboard(MappedVirtKeyboard):
symbols={'0':('8664b9cdfa66b4c3a1ec99c35a2bf64b','9eb80c6e99410eaac32905b2c77e65e5','37717277dc2471c8a7bf37e2068a8f01'),
'1':('1f36986f9d27dde54ce5b08e8e285476','9d0aa7a0a2bbab4f2c01ef1e820cb3f1'),
'2':('b560b0cce2ca74d3d499d73775152ab7',),
'3':('d16e426e71fc29b1b55d0fbded99a473',),
'4':('19c68066e414e08d17c86fc5c4acc949','c43354a7f7739508f76c538d5b3bce26'),
'5':('4b9abf98e30a1475997ec770cbe5e702','2059b4aa95c7b3156b171255fa10bbdd'),
'6':('804be4171d61f9cc10e9978c43b1d2a0','a41b091d4a11a318406a5a8bd3ed3837'),
'7':('8adf951f4eea5f446f714214e101d555',),
'8':('568135f3844213c30f2c7880be867d3d',),
'9':('a3750995c511ea1492ac244421109e77','eeb3a8ba804f19380dfe94a91a37595b'),
}
color=(0,0,0)
def __init__(self, page):
img = page.document.find("//img[@usemap='#cv']")
img_file = page.browser.openurl(img.attrib['src'])
MappedVirtKeyboard.__init__(self, img_file, page.document, img, self.color, 'href', convert='RGB')
self.check_symbols(self.symbols, page.browser.responses_dirname)
def check_color(self, pixel):
for p in pixel:
if p >= 0xd5:
return False
return True
def get_symbol_coords(self, (x1, y1, x2, y2)):
# strip borders
return MappedVirtKeyboard.get_symbol_coords(self, (x1+10, y1+10, x2-10, y2-10))
def get_symbol_code(self, md5sum_list):
for md5sum in md5sum_list:
try:
code = MappedVirtKeyboard.get_symbol_code(self,md5sum)
except VirtKeyboardError:
continue
else:
return ''.join(re.findall("'(\d+)'", code)[-2:])
raise VirtKeyboardError('Symbol not found')
def get_string_code(self, string):
code = ''
for c in string:
code += self.get_symbol_code(self.symbols[c])
return code
class LoginPage(BasePage):
def login(self, login, password):
vk = VirtKeyboard(self)
form = self.document.xpath('//form[@id="formulaire-login"]')[0]
code = vk.get_string_code(password)
assert len(code)==10, BrokenPageError("Wrong number of character.")
self.browser.location(self.browser.buildurl(form.attrib['action'], identifiant=login, code=code), no_login=True)
class IndexPage(BasePage):
def get_list(self):
for line in self.document.xpath('//li[@id="menu-n2-mesproduits"]//li//a'):
if line.get('onclick') is None:
continue
account = Account()
account.id = line.get('onclick').split("'")[1]
account.label = self.parser.tocleanstring(line)
yield account
def get_card_name(self):
return self.parser.tocleanstring(self.document.xpath('//h1')[0])
class AccountsPage(BasePage):
def get_balance(self):
balance = Decimal('0.0')
for line in self.document.xpath('//div[@class="detail"]/table//tr'):
try:
left = line.xpath('./td[@class="gauche"]')[0]
right = line.xpath('./td[@class="droite"]')[0]
except IndexError:
#useless line
continue
if len(left.xpath('./span[@class="precision"]')) == 0 and (left.text is None or not 'total' in left.text.lower()):
continue
balance -= Decimal(FrenchTransaction.clean_amount(right.text))
return balance
class OperationsPage(BasePage):
def get_history(self):
for tr in self.document.xpath('//div[contains(@class, "mod-listeoperations")]//table/tbody/tr'):
cols = tr.findall('td')
date = self.parser.tocleanstring(cols[0])
raw = self.parser.tocleanstring(cols[1])
label = re.sub(u' - traité le \d+/\d+', '', raw)
debit = self.parser.tocleanstring(cols[3])
if len(debit) > 0:
t = FrenchTransaction(0)
t.parse(date, raw)
t.label = label
t.set_amount(debit)
yield t
amount = self.parser.tocleanstring(cols[2])
if len(amount) > 0:
t = FrenchTransaction(0)
t.parse(date, raw)
t.label = label
t.set_amount(amount)
t.amount = - t.amount
yield t
| agpl-3.0 | 2,130,746,300,878,250,000 | 37.383562 | 126 | 0.616881 | false |
franblas/facialrecoChallenge | itml.py | 1 | 3881 | # -*- coding: utf-8 -*-
"""
Created on Thu Jun 18 19:04:51 2015
@author: Paco
"""
"""
Information Theoretic Metric Learning, Kulis et al., ICML 2007
"""
import numpy as np
from sklearn.metrics import pairwise_distances
from base_metric import BaseMetricLearner
class ITML(BaseMetricLearner):
"""
Information Theoretic Metric Learning (ITML)
"""
def __init__(self, gamma=1., max_iters=1000, convergence_threshold=1e-3):
"""
gamma: value for slack variables
"""
self.gamma = gamma
self.max_iters = max_iters
self.convergence_threshold = convergence_threshold
def _process_inputs(self, X, constraints, bounds, A0):
self.X = X
# check to make sure that no two constrained vectors are identical
a,b,c,d = constraints
ident = _vector_norm(self.X[a] - self.X[b]) > 1e-9
a, b = a[ident], b[ident]
ident = _vector_norm(self.X[c] - self.X[d]) > 1e-9
c, d = c[ident], d[ident]
# init bounds
if bounds is None:
self.bounds = np.percentile(pairwise_distances(X), (5, 95))
else:
assert len(bounds) == 2
self.bounds = bounds
# init metric
if A0 is None:
self.A = np.identity(X.shape[1])
else:
self.A = A0
return a,b,c,d
def fit(self, X, constraints, bounds=None, A0=None, verbose=False):
"""
X: (n x d) data matrix - each row corresponds to a single instance
constraints: tuple of arrays: (a,b,c,d) indices into X, such that:
d(X[a],X[b]) < d(X[c],X[d])
bounds: (pos,neg) pair of bounds on similarity, such that:
d(X[a],X[b]) < pos
d(X[c],X[d]) > neg
A0: [optional] (d x d) initial regularization matrix, defaults to identity
"""
a,b,c,d = self._process_inputs(X, constraints, bounds, A0)
gamma = self.gamma
num_pos = len(a)
num_neg = len(c)
_lambda = np.zeros(num_pos + num_neg)
lambdaold = np.zeros_like(_lambda)
gamma_proj = 1. if gamma is np.inf else gamma/(gamma+1.)
pos_bhat = np.zeros(num_pos) + self.bounds[0]
neg_bhat = np.zeros(num_neg) + self.bounds[1]
A = self.A
for it in xrange(self.max_iters):
# update positives
vv = self.X[a] - self.X[b]
for i,v in enumerate(vv):
wtw = v.dot(A).dot(v) # scalar
alpha = min(_lambda[i], gamma_proj*(1./wtw - 1./pos_bhat[i]))
_lambda[i] -= alpha
beta = alpha/(1 - alpha*wtw)
pos_bhat[i] = 1./((1 / pos_bhat[i]) + (alpha / gamma))
A += beta * A.dot(np.outer(v,v)).dot(A)
# update negatives
vv = self.X[c] - self.X[d]
for i,v in enumerate(vv):
wtw = v.dot(A).dot(v) # scalar
alpha = min(_lambda[i+num_pos],gamma_proj*(1./neg_bhat[i] - 1./wtw))
_lambda[i+num_pos] -= alpha
beta = -alpha/(1 + alpha*wtw)
neg_bhat[i] = 1./((1 / neg_bhat[i]) - (alpha / gamma))
A += beta * A.dot(np.outer(v,v)).dot(A)
normsum = np.linalg.norm(_lambda) + np.linalg.norm(lambdaold)
if normsum == 0:
conv = np.inf
break
conv = np.abs(lambdaold - _lambda).sum() / normsum
if conv < self.convergence_threshold:
break
lambdaold = _lambda.copy()
if verbose:
print 'itml iter: %d, conv = %f' % (it, conv)
if verbose:
print 'itml converged at iter: %d, conv = %f' % (it, conv)
return self
def metric(self):
return self.A
@classmethod
def prepare_constraints(self, labels, num_points, num_constraints):
ac,bd = np.random.randint(num_points, size=(2,num_constraints))
pos = labels[ac] == labels[bd]
a,c = ac[pos], ac[~pos]
b,d = bd[pos], bd[~pos]
return a,b,c,d
# hack around lack of axis kwarg in older numpy versions
try:
np.linalg.norm([[4]], axis=1)
except TypeError:
def _vector_norm(X):
return np.apply_along_axis(np.linalg.norm, 1, X)
else:
def _vector_norm(X):
return np.linalg.norm(X, axis=1) | mit | 2,450,078,011,670,945,000 | 30.056 | 78 | 0.592373 | false |
rickysarraf/autoEqualizer | autoEqualizer.py | 1 | 8836 | #!/usr/bin/python
# autoEqualizer - Script to load equalizer presets ondemand based on what genre of track is playing
# Copyright (C) 2007 Ritesh Raj Sarraf <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import os
import sys
import threading
import signal
from time import sleep
try:
from qt import *
except ImportError:
sys.stderr.write("Err!!! I need the Python Qt modules. Please install the python-qt3 package.\n")
DEBUG=1
MODE=1
try:
from dcopext import DCOPClient, DCOPApp
except ImportError:
sys.stderr.write("Err!!! I can't find the dcopext module.\n")
os.popen( "kdialog --sorry 'PyKDE3 (KDE3 bindings for Python) is required for this script.'" )
raise
if DEBUG:
if os.path.isfile(__file__+".log") is True:
try:
os.remove(__file__+".log")
except IOError:
sys.stderr.write("Couldn't remove the file. Do you have ownership.\n")
f = open(__file__+".log", 'a')
#class Notification( QCustomEvent ):
class Notification( QCustomEvent ):
__super_init = QCustomEvent.__init__
def __init__( self, str ):
self.__super_init(QCustomEvent.User + 1)
self.string = str
class autoEqualizer( QApplication):
""" The main application, also sets up the Qt event loop """
def __init__( self, args, mode ):
QApplication.__init__( self, args )
# create a new DCOP-Client
self.client = DCOPClient()
# Select what mode we want to run in
# 1 => shortStatusMessage
# 2 => popupMessage
self.mode = mode
# connect the client to the local DCOP-Server
if self.client.attach() is not True:
os.popen( "kdialog --sorry 'Could not connect to local DCOP server. Something weird happened.'" )
sys.exit(1)
# create a DCOP-Application-Object to talk to Amarok
self.amarok = DCOPApp('amarok', self.client)
debug( "Started.\n" )
# Start separate thread for reading data from stdin
self.stdinReader = threading.Thread( target = self.readStdin )
self.stdinReader.start()
self.readSettings()
def osCommands(self, command):
# Execute the command and return the exit value
# Once the extraction problem is root-caused, we can fix this easily.
os.environ['__autoEqualizer_command'] = command
try:
old_environ = os.environ['LANG']
except KeyError:
old_environ = "C"
os.environ['LANG'] = "C"
#if os.system(command '> __autoEqualizer_output 2>&1') != 0:
# debug("Couldn't execute the command using the dcopy command interface also.")
def saveState(self, sessionmanager):
# script is started by amarok, not by KDE's session manager
debug("We're in saveState. We should be avoiding session starts with this in place.\n")
sessionmanager.setRestartHint(QSessionManager.RestartNever)
def readSettings( self ):
""" Reads settings from configuration file """
try:
foovar = config.get( "General", "foo" )
except:
debug( "No config file found, using defaults.\n" )
############################################################################
# Stdin-Reader Thread
############################################################################
def readStdin( self ):
""" Reads incoming notifications from stdin """
while True:
# Read data from stdin. Will block until data arrives.
line = sys.stdin.readline()
debug ("Line is %s.\n" % (line) )
if line:
qApp.postEvent( self, Notification(line) )
else:
break
############################################################################
# Notification Handling
############################################################################
def customEvent( self, notification ):
""" Handles notifications """
string = QString(notification.string)
debug( "Received notification: " + str( string ) + "\n" )
if string.contains( "configure" ):
self.configure()
if string.contains( "engineStateChange: play" ):
debug("Play event triggered.\n")
self.engineStatePlay()
if string.contains( "engineStateChange: idle" ):
self.engineStateIdle()
if string.contains( "engineStateChange: pause" ):
self.engineStatePause()
if string.contains( "engineStateChange: empty" ):
self.engineStatePause()
if string.contains( "trackChange" ):
debug("Track change event occured.\n")
self.trackChange()
# Notification callbacks. Implement these functions to react to specific notification
# events from Amarok:
def configure( self ):
debug( "configuration" )
self.dia = ConfigDialog()
self.dia.show()
self.connect( self.dia, SIGNAL( "destroyed()" ), self.readSettings )
def engineStatePlay( self ):
""" Called when Engine state changes to Play """
debug ("Enable the equalizer to On")
self.equalizerState()
def engineStateIdle( self ):
""" Called when Engine state changes to Idle """
pass
def engineStatePause( self ):
""" Called when Engine state changes to Pause """
pass
def engineStateEmpty( self ):
""" Called when Engine state changes to Empty """
pass
def trackChange( self ):
""" Called when a new track starts """
debug ("Track Change event called.\n")
self.setEqualizer()
def getGenre(self):
# get the Genre from the current song.
retval, genre = self.amarok.player.genre()
genre = genre.__str__()
if retval is not True:
debug("I couldn't get the genre using the library. Is Amarok running?")
#TODO: debug("Will try using the dcop commandline interface")
else:
return genre
def setEqualizer(self):
# set the equalizer accordingly
# TODO: It would be good to have a list of preset equalizers
# and match them
self.genre = self.getGenre()
retval, success = self.amarok.player.setEqualizerPreset(self.genre)
if retval is not True:
debug("I couldn't get the equalizer preset. Is Amarok running?")
else:
if self.mode == 1:
self.amarok.playlist.shortStatusMessage("Activated equalizer preset -> %s" % (self.genre) )
debug ("Activated equalizer preset -> %s\n" % (self.genre) )
elif self.mode == 2:
self.amarok.playlist.popupMessage("Activated equalizer preset -> %s" % (self.genre) )
debug ("Activated equalizer preset -> %s\n" % (self.genre) )
else:
# Default Mode
self.amarok.playlist.popupMessage("Activated equalizer preset -> %s" % (self.genre) )
debug ("Activated equalizer preset -> %s\n" % (self.genre) )
def equalizerState(self):
# check if the equalizer is on or not
# FIXME: Currently, it looks like dcopext has a bug
# even though I try to set the equalizer to on, it doesn't
# so for now we will check if the equalizer is on or not and
# enable it using the dcop command
retval, equalizerState = self.amarok.player.equalizerEnabled()
if not equalizerState:
os.system( "dcop amarok player setEqualizerEnabled True" )
debug("Enable the Equalizer.")
############################################################################
def debug( message ):
""" Prints debug message to stdout """
f.writelines(message)
f.flush()
#print debug_prefix + " " + message
def onStop(signum, stackframe):
""" Called when script is stopped by user """
debug("I'm in onStop.\n")
debug("We need to kill the process, otherwise it strays around even if amarok exits.\n")
os.kill(os.getpid(), 9)
def main( ):
app = autoEqualizer ( sys.argv, MODE )
app.exec_loop()
if __name__ == "__main__":
mainapp = threading.Thread(target=main)
mainapp.start()
signal.signal(signal.SIGTERM, onStop)
# necessary for signal catching
while 1: sleep(120)
| gpl-3.0 | -8,767,119,271,233,244,000 | 32.596958 | 109 | 0.603327 | false |
fffy2366/image-processing | api.py | 1 | 10056 | #!bin/evn python
# -*-coding:utf8-*-
import base64
import sys
import os
import logging
import datetime
import re
import multiprocessing
# from pylab import *
from PIL import Image
import cv2
from bceocrapi import BceOCRAPI
from bin.python.models.images import Images
from nude import Nude
import imagehash
from bin.python.utils import logger
from bin.python.models.redis_results import RedisResults
Image.LOAD_TRUNCATED_IMAGES = True
reload(sys)
sys.setdefaultencoding('utf-8')
#日志
# 默认log存放目录,需要在程序入口调用才能生效,可省略
logger.log_dir = "./logs"
# log文件名前缀,需要在程序入口调用才能生效,可省略
logger.log_name = "api"
conf = logger.Logger()
# conf.debug('debug')
# conf.warn('tr-warn')
# conf.info('ds-info')
# conf.error('ss-error')
# IMAGE_DIR = "/Users/fengxuting/Downloads/testphoto/"
IMAGE_DIR = "public/uploads/api/"
class Api:
def __init__(self):
self.IMAGE_HASH = ""
# 获取图片哈希值
def get_image_hash(self,file):
img = Image.open(file)
h = str(imagehash.dhash(img))
return h
# 人脸识别
def face(self,file):
# Get user supplied values
oriImg = IMAGE_DIR + file
#图像压缩处理
# disImg = IMAGE_DIR +"ocrdis"+file
# newImg = resizeImg(ori_img=oriImg,dst_img=disImg,dst_w=2048,dst_h=2048,save_q=100)
# cascPath = "./data/haarcascades/haarcascade_frontalface_alt.xml"
cascPath = "./data/lbpcascades/lbpcascade_frontalface.xml"
# Create the haar 级联
facecascade = cv2.CascadeClassifier(cascPath)
# Read the image
image = cv2.imread(oriImg)
gray = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
gray = cv2.equalizeHist(gray, gray) # 直方图均衡化:直方图均衡化是通过拉伸像素强度分布范围来增强图像对比度的一种方法。
gray = cv2.medianBlur(gray, 3) # 降噪?
(height, width, a) = image.shape
# Detect faces in the image
faces = facecascade.detectMultiScale(
gray,
scaleFactor=1.1,
minNeighbors=2,
minSize=(30, 30),
flags=cv2.cv.CV_HAAR_SCALE_IMAGE
)
# 1,如果小于0.5%的 不认为头像。2,多个头像的 与最大的对比,如果比值小于50%,不认为是头像。
faces_area = []
face_count = 0
for (x, y, w, h) in faces:
face_area = w * h
# 脸占整个图的比例
face_scale = (face_area) / float(height * width) * 100
# print("name %s,scale %s,x %s,y %s,w %s,h %s,area %s" % (file,face_scale,x,y,w,h,face_area))
# if face_scale<0.5:
# continue
faces_area.append(face_area)
faces_new = []
if(len(faces_area)>1):
face_max = max(faces_area)
for index,face in enumerate(faces) :
(x, y, w, h) = face
# 脸占最大脸的比例
scale = (w*h)/float(face_max) * 100
# print("scale %s" % (scale))
if(scale<50):
# delete(faces,index,axis=0)
pass
else:
faces_new.append(face)
else:
faces_new = faces
return faces_new
#黑白处理
def blackWhite(self,filename):
image_file = Image.open(IMAGE_DIR+filename) # open colour image
#exception : Premature end of JPEG file . IOError: image file is truncated (1 bytes not processed)
try:
image_file = image_file.convert('L') # convert image to black and white
except Exception as e:
raise
return IMAGE_DIR+filename
dst_path = IMAGE_DIR+"wb"+filename
image_file.save(dst_path)
return dst_path
#数字识别
def ocr(self,file):
ocr = BceOCRAPI("02fbe03acf3042a1b40e067bba1971f7", "bb1d4aafe7924fc0829fc33fa26b3347");
#黑白处理
# newImg = IMAGE_DIR +file
newImg = self.blackWhite(file)
#图像压缩处理
disImg = IMAGE_DIR +"ocrdis"+file
newImg = self.resizeImg(ori_img=newImg,dst_img=disImg,dst_w=1600,dst_h=1600,save_q=100)
with open(newImg, 'rb') as f:
content = f.read()
content = base64.b64encode(content)
try:
# ocr
# result = ocr.get_ocr_text(content, language='CHN_ENG')
result = ocr.get_ocr_text(content, language='ENG')
# print("file:"+file+"----------result:"+result)
# conf.info("file:"+file+"----------result:"+result)
return result
except Exception as e:
raise
# 图片如果宽或高大于300则等比例压缩
def resizeImg(self,**args):
args_key = {'ori_img': '', 'dst_img': '', 'dst_w': '', 'dst_h': '', 'save_q': 75}
arg = {}
for key in args_key:
if key in args:
arg[key] = args[key]
im = Image.open(arg['ori_img'])
ori_w, ori_h = im.size
widthRatio = heightRatio = None
ratio = 1
if (ori_w and ori_w > arg['dst_w']) or (ori_h and ori_h > arg['dst_h']):
if arg['dst_w'] and ori_w > arg['dst_w']:
widthRatio = float(arg['dst_w']) / ori_w # 正确获取小数的方式
if arg['dst_h'] and ori_h > arg['dst_h']:
heightRatio = float(arg['dst_h']) / ori_h
if widthRatio and heightRatio:
if widthRatio < heightRatio:
ratio = widthRatio
else:
ratio = heightRatio
if widthRatio and not heightRatio:
ratio = widthRatio
if heightRatio and not widthRatio:
ratio = heightRatio
newWidth = int(ori_w * ratio)
newHeight = int(ori_h * ratio)
else:
newWidth = ori_w
newHeight = ori_h
im.resize((newWidth, newHeight), Image.ANTIALIAS).save(arg['dst_img'], quality=arg['save_q'])
return arg['dst_img']
# 裁剪人脸以下的图片
def cropImg(self, file, faces):
oriImg = IMAGE_DIR + file
# 裁剪人脸以下最多五倍高度的图片
# ipl_image = cv.LoadImage(oriImg)
ipl_image = Image.open(oriImg)
# print(ipl_image.height)
if (len(faces) < 1):
# print("no face")
return faces
(x, y, w, h) = faces[0]
yy = int(y + 1.5*h)
hh = h * 6
(width, height) = ipl_image.size
if (hh > height - y):
hh = height - y
if(yy>=height):
return False
dst = ipl_image.crop((x, yy, x + w, y + hh))
dst.save(IMAGE_DIR + file)
#鉴别黄色图片
def isnude(self,file):
#图像压缩处理
imagePath = IMAGE_DIR + file
nudeImg = IMAGE_DIR +"nude_"+file
self.resizeImg(ori_img=imagePath,dst_img=nudeImg,dst_w=300,dst_h=300,save_q=100)
faces = self.face("nude_"+file)
self.cropImg("nude_"+file,faces)
n = Nude(nudeImg)
# n.setFaces(faces)
# n.resize(1000,1000)
n.parse()
# print n.result
return 1 if n.result else 0
# 统计数字个数
def countdigits(self,s):
digitpatt = re.compile('\d')
return len(digitpatt.findall(s))
# 删除图片
def delImg(self,file):
#黑白的
wbImg = IMAGE_DIR+"wb"+file
ocrImg300 = IMAGE_DIR +"dis"+file
#大于1600的
ocrImg1600 = IMAGE_DIR +"ocrdis"+file
nudeImg = IMAGE_DIR +"nude_"+file
if os.path.isfile(wbImg):
os.remove(wbImg)
if os.path.isfile(ocrImg300):
os.remove(ocrImg300)
if os.path.isfile(ocrImg1600):
os.remove(ocrImg1600)
# 鉴黄裁剪图
if os.path.isfile(nudeImg):
os.remove(nudeImg)
#删除原文件
# os.remove(IMAGE_DIR+file)
def one(self,file):
filepath = IMAGE_DIR+file
if(os.path.isfile(filepath)):
self.IMAGE_HASH = self.get_image_hash(filepath)
redis_result = self.get_result_from_redis(self.IMAGE_HASH)
if(redis_result):
#删除图像
self.delImg(file)
print redis_result
sys.exit(0)
is_pass = 1
#人脸检测
fc = self.face(file)
# 如果人脸不是1则 ocr和鉴黄不用检测
if(len(fc)!=1):
l = -1
is_nude = -1
is_pass = 0
else:
#ocr
text = ""
text = self.ocr(file)
text = text.encode("utf-8")
l = self.countdigits(text)
if (l > 6):
is_nude = -1
is_pass = 0
else:
#鉴黄
is_nude = self.isnude(file)
if(is_nude==1):
is_pass = 0
#删除图像
self.delImg(file)
# print {"face_count":len(fc),"digital_count":l,"is_nude":is_nude,"pass":is_pass}
result = str(len(fc))+","+str(l)+","+str(is_nude)+","+str(is_pass)
# 结果保存redis数据库
self.save_redis(self.IMAGE_HASH,result)
print result
else:
print("error:",file, "is not a img file")
# 保存redis
def save_redis(self,hash,result):
rr = RedisResults()
rr.save(hash,result)
# redis数据是否存在,并返回检测结果
def get_result_from_redis(self,hash):
rr = RedisResults()
return rr.get(hash)
if __name__ == '__main__':
api = Api()
api.one(sys.argv[1])
# api.one("9d27d550-4beb-11e6-aefd-4f827560e966.png")
# api.one("91787150-4bf1-11e6-aefd-4f827560e966.png")
pass
| mit | -6,490,777,745,784,625,000 | 28.816456 | 106 | 0.525472 | false |
Huyuwei/tvm | tests/python/contrib/test_cublas.py | 1 | 2856 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import tvm
import numpy as np
from tvm.contrib import cublas
def test_matmul_add():
n = 1024
l = 128
m = 235
A = tvm.placeholder((n, l), name='A')
B = tvm.placeholder((l, m), name='B')
C = cublas.matmul(A, B)
s = tvm.create_schedule(C.op)
def verify(target="cuda"):
if not tvm.module.enabled(target):
print("skip because %s is not enabled..." % target)
return
if not tvm.get_global_func("tvm.contrib.cublas.matmul", True):
print("skip because extern function is not available")
return
ctx = tvm.gpu(0)
f = tvm.build(s, [A, B, C], target)
a = tvm.nd.array(np.random.uniform(size=(n, l)).astype(A.dtype), ctx)
b = tvm.nd.array(np.random.uniform(size=(l, m)).astype(B.dtype), ctx)
c = tvm.nd.array(np.zeros((n, m), dtype=C.dtype), ctx)
f(a, b, c)
tvm.testing.assert_allclose(
c.asnumpy(), np.dot(a.asnumpy(), b.asnumpy()), rtol=1e-5)
verify()
def test_batch_matmul():
j = 16
n = 1024
l = 128
m = 235
A = tvm.placeholder((j, n, l), name='A')
B = tvm.placeholder((j, l, m), name='B')
C = cublas.batch_matmul(A, B)
s = tvm.create_schedule(C.op)
def verify(target="cuda"):
if not tvm.module.enabled(target):
print("skip because %s is not enabled..." % target)
return
if not tvm.get_global_func("tvm.contrib.cublas.matmul", True):
print("skip because extern function is not available")
return
ctx = tvm.gpu(0)
f = tvm.build(s, [A, B, C], target)
a = tvm.nd.array(np.random.uniform(size=(j, n, l)).astype(A.dtype), ctx)
b = tvm.nd.array(np.random.uniform(size=(j, l, m)).astype(B.dtype), ctx)
c = tvm.nd.array(np.zeros((j, n, m), dtype=C.dtype), ctx)
f(a, b, c)
tvm.testing.assert_allclose(
c.asnumpy(), np.matmul(a.asnumpy(), b.asnumpy()), rtol=1e-5)
verify()
if __name__ == "__main__":
test_matmul_add()
test_batch_matmul()
| apache-2.0 | 2,408,037,564,744,352,000 | 36.090909 | 80 | 0.613796 | false |
prataprc/eazytext | eazytext/extension/.Attic/box.py | 1 | 4735 | # This file is subject to the terms and conditions defined in
# file 'LICENSE', which is part of this source code package.
# Copyright (c) 2010 SKR Farms (P) LTD.
# -*- coding: utf-8 -*-
# Gotcha : none
# Notes : none
# Todo : none
# 1. Unit test case for this extension.
from zope.component import getGlobalSiteManager
from eazytext.extension import Extension
from eazytext.interfaces import IEazyTextExtensionFactory
from eazytext.lib import split_style
gsm = getGlobalSiteManager()
doc = """
h3. Box
: Description ::
Generate a box with title and content. Text within the curly braces
will be interpreted as the content and can contain EazyText text as well.
If title text is provided, then the extension can take parameter
''hide'' which can be used to shrink/expand box content.
Property key-value pairs accepts CSS styling attributes and other special
attributes like,
|= title | optional, title string
|= titlestyle | optional, title style string in CSS style format
|= contentstyle | optional, content style string in CSS style format
''Example''
> [<PRE{{{ Box hide
#{
# 'title' : 'Building Mnesia Database',
# 'titlestyle' : 'color: brown;',
# 'contentstyle' : 'color: gray;',
# 'border' : '1px solid gray',
# 'style' : { 'margin' : '10px', 'padding' : '10px' },
#}
This chapter details the basic steps involved when designing a Mnesia
database and the programming constructs which make different solutions
available to the programmer. The chapter includes the following sections,
* defining a schema
* the datamodel
* starting Mnesia
* creating new tables.
}}} >]
{{{ Box hide
#{
# 'title' : 'Building Mnesia Database',
# 'titlestyle' : 'color: brown;',
# 'contentstyle' : 'color: gray;',
# 'border' : '1px solid gray',
# 'style' : { 'margin' : '10px', 'padding' : '10px' },
#}
This chapter details the basic steps involved when designing a Mnesia database
and the programming constructs which make different solutions available to the
programmer. The chapter includes the following sections:
* defining a schema
* the datamodel
* starting Mnesia
* creating new tables.
}}}
"""
tmpl = """
<div class="etext-box" style="%s">
<div class="boxtitle" style="%s">
%s %s
</div>
<div class="boxcont" style="%s">%s</div>
</div>
"""
spantmpl = """
<span class="boxhide"> hide</span>
<span class="boxshow"> show</span>
"""
class Box( Extension ) :
_doc = doc
def __init__( self, props, nowiki, *args ) :
self.nowiki = nowiki
self.title = props.pop( 'title', '' )
boxstyle = props.pop( 'style', {} )
titlestyle = props.pop( 'titlestyle', {} )
contentstyle = props.pop( 'contentstyle', '' )
d_style, s_style = split_style( boxstyle )
self.style = s_style
self.css = {}
self.css.update( props )
self.css.update( d_style )
d_style, s_style = split_style( titlestyle )
self.titlestyle = s_style
self.title_css = {}
self.title_css.update( d_style )
d_style, s_style = split_style( contentstyle )
self.contentstyle = s_style
self.cont_css = {}
self.cont_css.update( d_style )
self.hide = 'hide' in args
def __call__( self, argtext ):
return eval( 'Box( %s )' % argtext )
def html( self, node, igen, *args, **kwargs ) :
from eazytext.parser import ETParser
fn = lambda (k, v) : '%s : %s' % (k,v)
boxstyle = '; '.join(map( fn, self.css.items() ))
if self.style :
boxstyle += '; %s ;' % self.style
titlestyle = '; '.join(map( fn, self.title_css.items() ))
if self.titlestyle :
titlestyle += '; %s ;' % self.titlestyle
contstyle = '; '.join(map( fn, self.cont_css.items() ))
if self.contentstyle :
contstyle += '; %s ;' % self.contentstyle
self.nowiki_h = ''
if self.nowiki :
etparser = ETParser(
skin=None,
nested=True,
lex_optimize=False,
yacc_optimize=False,
)
tu = etparser.parse( self.nowiki, debuglevel=0 )
self.nowiki_h = tu.tohtml()
if self.title :
html = tmpl % ( boxstyle, titlestyle, self.title, spantmpl,
contstyle, self.nowiki_h )
else :
html = tmpl % ( boxstyle, titlestyle, self.title, '',
contstyle, self.nowiki_h )
return html
# Register this plugin
gsm.registerUtility( Box(), IEazyTextExtensionFactory, 'Box' )
Box._doc = doc
| gpl-3.0 | 856,791,562,702,116,600 | 28.409938 | 78 | 0.594931 | false |
Maskawanian/ude-components | Components/Client/__init__.py | 1 | 1318 | # See LICENCE for the source code licence.
# (c) 2010 Dan Saul
from Base import Base
SAVE_STATUS_SAVED = 0 # We can close this without losing data.
SAVE_STATUS_NOT_SAVED = 1 # If we close this we will lose data, but we can save.
SAVE_STATUS_NOT_SAVED_NEED_PATH = 2 # If we close this we will lose data, but we can save, however need the save path.
SAVE_STATUS_SAVING = 3 # In the progress of saving.
SAVE_STATUS_UNSAVABLE = 4 # If we close this we will lose data, we are not able to save however.
SAVE_STATUS_RANGE = range(SAVE_STATUS_SAVED,SAVE_STATUS_UNSAVABLE+1)
BUS_INTERFACE_NAME = "org.ude.components.client"
BUS_INTERFACE_NAME_PID_FORMAT = BUS_INTERFACE_NAME+"_{0}"
BUS_OBJECT_PATH = "/org/ude/components/client"
import logging,os
logger = logging.getLogger('Client')
logger.setLevel(logging.DEBUG)
__fh = logging.FileHandler('runtime.log')
__fh.setLevel(logging.DEBUG)
__ch = logging.StreamHandler()
__ch.setLevel(logging.ERROR)
__formatter = logging.Formatter('%(asctime)s %(name)s[%(process)d:%(levelname)s] %(filename)s::%(funcName)s() - %(message)s')
__fh.setFormatter(__formatter)
__ch.setFormatter(__formatter)
logger.addHandler(__fh)
logger.addHandler(__ch)
#http://docs.python.org/library/logging.html#http-handler eventually...
| lgpl-3.0 | 7,542,065,898,066,238,000 | 34.621622 | 125 | 0.698786 | false |
zncb/wtrace | setup.py | 1 | 4273 | #!/usr/bin/env python
"""A setuptools based setup module.
See:
https://packaging.python.org/en/latest/distributing.html
https://github.com/pypa/sampleproject
"""
# Always prefer setuptools over distutils
from setuptools import setup, find_packages
# To use a consistent encoding
from codecs import open
from os import path
here = path.abspath(path.dirname(__file__))
# Get the long description from the relevant file
with open(path.join(here, 'DESCRIPTION.rst'), encoding='utf-8') as f:
long_description = f.read()
# Get the version
with open('wtrace/_version.py') as f:
exec(f.read()) # will register __version__
setup(
name='wtrace',
# Versions should comply with PEP440. For a discussion on single-sourcing
# the version across setup.py and the project code, see
# https://packaging.python.org/en/latest/single_source_version.html
version=__version__,
description='Trace http transactions and asset dependencies involved in loading a web page.',
long_description=long_description,
# The project's main homepage.
url='https://github.com/zncb/wtrace',
# Author details
author='Etienne Noreau-Hebert',
author_email='[email protected]',
# Choose your license
license='AGPLv3+',
# See https://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
# How mature is this project? Common values are
# 3 - Alpha
# 4 - Beta
# 5 - Production/Stable
'Development Status :: 2 - Pre-Alpha',
# Indicate who your project is intended for
'Intended Audience :: Developers',
'Intended Audience :: Information Technology',
'Topic :: Security',
'Topic :: Software Development :: Testing',
'Topic :: Utilities',
# Pick your license as you wish (should match "license" above)
'License :: OSI Approved :: GNU Affero General Public License v3 or later (AGPLv3+)',
# Specify the Python versions you support here. In particular, ensure
# that you indicate whether you support Python 2, Python 3 or both.
'Programming Language :: Python :: 2',
#'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
#'Programming Language :: Python :: 3',
#'Programming Language :: Python :: 3.2',
#'Programming Language :: Python :: 3.3',
#'Programming Language :: Python :: 3.4',
],
# What does your project relate to?
keywords='web www tracer http dependency',
# You can just specify the packages manually here if your project is
# simple. Or you can use find_packages().
packages=find_packages(exclude=['contrib', 'docs', 'tests*']),
# List run-time dependencies here. These will be installed by pip when
# your project is installed. For an analysis of "install_requires" vs pip's
# requirements files see:
# https://packaging.python.org/en/latest/requirements.html
install_requires=['mitmproxy','selenium'],
# List additional groups of dependencies here (e.g. development
# dependencies). You can install these using the following syntax,
# for example:
# $ pip install -e .[dev,test]
extras_require={
#'dev': ['check-manifest'],
#'test': ['coverage'],
},
# If there are data files included in your packages that need to be
# installed, specify them here. If using Python 2.6 or less, then these
# have to be included in MANIFEST.in as well.
package_data={
#'sample': ['package_data.dat'],
},
# Although 'package_data' is the preferred approach, in some case you may
# need to place data files outside of your packages. See:
# http://docs.python.org/3.4/distutils/setupscript.html#installing-additional-files # noqa
# In this case, 'data_file' will be installed into '<sys.prefix>/my_data'
#data_files=[('my_data', ['data/data_file'])],
# To provide executable scripts, use entry points in preference to the
# "scripts" keyword. Entry points provide cross-platform support and allow
# pip to create the appropriate form of executable for the target platform.
entry_points={
'console_scripts': [
'wtrace=wtrace:main',
],
},
)
| agpl-3.0 | 2,862,599,234,718,813,700 | 35.521368 | 97 | 0.659256 | false |
mardiros/apium | apium/task/__init__.py | 1 | 8390 | import sys
import types
import logging
import asyncio
import inspect
import traceback
import importlib
from collections import defaultdict
from uuid import uuid4
import venusian
from .. import registry
log = logging.getLogger(__name__)
class TaskRegistry(object):
""" Default implementation of the task registry """
def __init__(self):
self._registry = {}
self.default_queue = '#master'
self.queues = defaultdict(list)
def register(self, task):
""" Register a task """
if task.name in self._registry:
raise RuntimeError('Task {} is already registered'
''.format(task.name))
if task.queue:
self.queues[task.queue].append(task.name)
else:
if self.get_queue(task.name) not in self.queues:
self.queues[self.default_queue].append(task.name)
self._registry[task.name] = task
def get(self, task_name):
"""
Get the task from it's name.
The tasks must be registred previously.
"""
try:
return self._registry[task_name]
except KeyError:
raise RuntimeError('Task {} is not registered'.format(task_name))
def configure_queues(self, default_queue='#master', queues=None):
self.default_queue = default_queue
if queues:
for queue, tasks in queues.items():
self.queues[queue].extend(tasks)
def get_queue(self, task_name):
"""
Get the queue for the given task name
"""
for name, queue in self.queues.items():
if task_name in queue:
return name
return self.default_queue
class TaskRequest:
""" Represent a task instance to run """
def __init__(self, driver, task_name, task_args, task_kwargs,
uuid=None, ignore_result=None,
result_queue=None):
self._driver = driver
self.uuid = uuid or str(uuid4())
self.task_name = task_name
self.task_args = task_args
self.task_kwargs = task_kwargs
self.result_queue = result_queue or self._driver.get_result_queue()
self.ignore_result = ignore_result
@asyncio.coroutine
def get(self, timeout=None):
"""
Return the result of the task or the result of the chained tasks in
case some callback have been attached.
:param timeout: timeout for the tasks. if None, the default timeout
of the TaskRequest will be used. The default timeout is the
timeout attribute of the tasks
:type timeout: float
:return: the result of the task
"""
result = yield from self._driver.pop_result(self, timeout)
return result
def to_dict(self):
return {'uuid': self.uuid,
'ignore_result': self.ignore_result,
'result_queue': self.result_queue,
'task_name': self.task_name,
'task_args': self.task_args,
'task_kwargs': self.task_kwargs,
}
def __str__(self):
return '<TaskRequest {}>'.format(self.uuid)
class TaskResponse:
def __init__(self, uuid, status, result=None,
exception=None, tracback=None):
self.uuid = uuid
self.status = status
self.result = result
self.exception = exception
self.traceback = traceback
def to_dict(self):
ret = {'uuid': self.uuid,
'status': self.status,
}
if self.status == 'DONE':
ret['result'] = self.result
elif self.status == 'ERROR':
ret['exception'] = {'module': getattr(self.exception, '__module__',
'__builtin__'),
'class': exc.__class__.__name__,
'args': exc.args,
}
ret['traceback'] = traceback.format_exc().strip()
return ret
class Task:
ignore_result = False
queue = None
timeout = None
def __init__(self, driver, method, **kwargs):
self._driver = driver
if 'name' in kwargs:
self.name = kwargs['name']
else:
self.name = '{}.{}'.format(method.__module__,
method.__name__)
self._origin = method
if inspect.isclass(method):
method = method()
if (not asyncio.iscoroutinefunction(method) and
(isinstance(method, asyncio.Future) or
inspect.isgenerator(method)
)):
method = asyncio.coroutine(method)
self.method = method
if 'ignore_result' in kwargs:
self.ignore_result = kwargs['ignore_result']
if 'timeout' in kwargs:
self.timeout = kwargs['timeout']
self._name = kwargs.get('name', None)
@asyncio.coroutine
def __call__(self, *args, **kwargs):
ignore_result = self.ignore_result
timeout = self.timeout
if 'task_options' in kwargs:
task_options = kwargs.pop('task_options')
ignore_result = task_options.get('ignore_result', ignore_result)
timeout = task_options.get('timeout', timeout)
request = TaskRequest(self._driver, self.name, args, kwargs,
ignore_result=ignore_result)
yield from self._driver.push_task(request)
if ignore_result:
return
result = yield from request.get(timeout)
return result
def execute(self, *args, **kwargs):
""" Execute the wrapped method.
This call must run in a process of the apium worker.
If the wrapped method is a coroutine, it will spawn a new
event loop in the process executor to wait untile the coroutine
is done.
"""
ret = self.method(*args, **kwargs)
if isinstance(ret, asyncio.Future) or inspect.isgenerator(ret):
# In that case,
# run the asyncio coroutine in a dedicated event loop
# of the process pool executure
@asyncio.coroutine
def routine(method, future):
ret = yield from method
future.set_result(ret)
future = asyncio.Future()
old_loop = asyncio.get_event_loop()
try:
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
loop.run_until_complete(asyncio.Task(routine(ret, future)))
ret = future.result()
finally:
asyncio.set_event_loop(old_loop)
loop.close()
return ret
def __str__(self):
return '<task {}>'.format(self.name)
def execute_task(task_name, uuid, args, kwargs):
""" Glue function that can be pickle.
Python cannot easily pickle class method, that why the ITaskRegistry cannot
be used directly.
"""
driver = registry.get_driver() # Here is the main reason we have a singleton
task_to_run = driver.get_task(task_name)
log.info('Executing task {}'.format(task_name))
log.debug('with param {}, {}'.format(args, kwargs))
try:
ret = TaskResponse(uuid, 'DONE',
task_to_run.execute(*args, **kwargs))
except Exception as exc:
log.error('Error {} while running task {} with param {}, {}'
''.format(exc, task_name, args, kwargs))
ret = TaskResponse(uuid, 'ERROR',
exception=exc,
traceback=sys.exc_info[2])
ret = ret.to_dict()
log.info('task {} executed'.format(task_name))
log.debug('task returns {}'.format(ret))
return ret
class task:
"""
Transform a class or a function to a coroutine, attach it to
be used via the apium application.
"""
def __init__(self, **task_options):
self.task_options = task_options
def __call__(self, wrapped):
def callback(scanner, name, ob):
task_ = Task(scanner.driver, wrapped, **self.task_options)
log.info('Register task {}'.format(task_.name))
scanner.driver.register_task(task_)
venusian.attach(wrapped, callback, category='apium')
return wrapped
| bsd-3-clause | -306,071,942,864,579,460 | 31.269231 | 81 | 0.556853 | false |
tuzhaopeng/NMT-Coverage | build/lib/groundhog/trainer/SGD_adadelta.py | 1 | 8042 | """
Stochastic Gradient Descent.
TODO: write more documentation
"""
__docformat__ = 'restructedtext en'
__authors__ = ("Razvan Pascanu "
"KyungHyun Cho "
"Caglar Gulcehre ")
__contact__ = "Razvan Pascanu <r.pascanu@gmail>"
import numpy
import time
import logging
import theano
import theano.tensor as TT
from theano.sandbox.scan import scan
from theano.sandbox.rng_mrg import MRG_RandomStreams as RandomStreams
from groundhog.utils import print_time, print_mem, const
logger = logging.getLogger(__name__)
class SGD(object):
def __init__(self,
model,
state,
data):
"""
Parameters:
:param model:
Class describing the model used. It should provide the
computational graph to evaluate the model, and have a
similar structure to classes on the models folder
:param state:
Dictionary containing the current state of your job. This
includes configuration of the job, specifically the seed,
the startign damping factor, batch size, etc. See main.py
for details
:param data:
Class describing the dataset used by the model
"""
if 'adarho' not in state:
state['adarho'] = 0.96
if 'adaeps' not in state:
state['adaeps'] = 1e-6
#####################################
# Step 0. Constructs shared variables
#####################################
bs = state['bs']
self.model = model
self.rng = numpy.random.RandomState(state['seed'])
srng = RandomStreams(self.rng.randint(213))
self.gs = [theano.shared(numpy.zeros(p.get_value(borrow=True).shape,
dtype=theano.config.floatX),
name=p.name)
for p in model.params]
self.gnorm2 = [theano.shared(numpy.zeros(p.get_value(borrow=True).shape,
dtype=theano.config.floatX),
name=p.name+'_g2')
for p in model.params]
self.dnorm2 = [theano.shared(numpy.zeros(p.get_value(borrow=True).shape,
dtype=theano.config.floatX),
name=p.name+'_d2')
for p in model.params]
self.step = 0
self.bs = bs
self.state = state
self.data = data
self.step_timer = time.time()
self.gdata = [theano.shared(numpy.zeros( (2,)*x.ndim,
dtype=x.dtype),
name=x.name) for x in model.inputs]
if 'profile' not in self.state:
self.state['profile'] = 0
###################################
# Step 1. Compile training function
###################################
logger.debug('Constructing grad function')
loc_data = self.gdata
self.prop_exprs = [x[1] for x in model.properties]
self.prop_names = [x[0] for x in model.properties]
self.update_rules = [x[1] for x in model.updates]
rval = theano.clone(model.param_grads + self.update_rules + \
self.prop_exprs + [model.train_cost],
replace=zip(model.inputs, loc_data))
nparams = len(model.params)
nouts = len(self.prop_exprs)
nrules = len(self.update_rules)
gs = rval[:nparams]
rules = rval[nparams:nparams + nrules]
outs = rval[nparams + nrules:]
norm_gs = TT.sqrt(sum(TT.sum(x**2)
for x,p in zip(gs, self.model.params) if p not in self.model.exclude_params_for_norm))
if 'cutoff' in state and state['cutoff'] > 0:
c = numpy.float32(state['cutoff'])
if state['cutoff_rescale_length']:
c = c * TT.cast(loc_data[0].shape[0], 'float32')
notfinite = TT.or_(TT.isnan(norm_gs), TT.isinf(norm_gs))
_gs = []
for g,p in zip(gs,self.model.params):
if p not in self.model.exclude_params_for_norm:
tmpg = TT.switch(TT.ge(norm_gs, c), g*c/norm_gs, g)
_gs.append(
TT.switch(notfinite, numpy.float32(.1)*p, tmpg))
else:
_gs.append(g)
gs = _gs
store_gs = [(s,g) for s,g in zip(self.gs, gs)]
updates = store_gs + [(s[0], r) for s,r in zip(model.updates, rules)]
rho = self.state['adarho']
eps = self.state['adaeps']
# grad2
gnorm2_up = [rho * gn2 + (1. - rho) * (g ** 2.) for gn2,g in zip(self.gnorm2, gs)]
updates = updates + zip(self.gnorm2, gnorm2_up)
logger.debug('Compiling grad function')
st = time.time()
self.train_fn = theano.function(
[], outs, name='train_function',
updates = updates,
givens = zip(model.inputs, loc_data))
logger.debug('took {}'.format(time.time() - st))
self.lr = numpy.float32(1.)
new_params = [p - (TT.sqrt(dn2 + eps) / TT.sqrt(gn2 + eps)) * g
for p, g, gn2, dn2 in
zip(model.params, self.gs, self.gnorm2, self.dnorm2)]
updates = zip(model.params, new_params)
# d2
d2_up = [(dn2, rho * dn2 + (1. - rho) *
(((TT.sqrt(dn2 + eps) / TT.sqrt(gn2 + eps)) * g) ** 2.))
for dn2, gn2, g in zip(self.dnorm2, self.gnorm2, self.gs)]
updates = updates + d2_up
self.update_fn = theano.function(
[], [], name='update_function',
allow_input_downcast=True,
updates = updates)
self.old_cost = 1e20
self.schedules = model.get_schedules()
self.return_names = self.prop_names + \
['cost',
'error',
'time_step',
'whole_time', 'lr']
self.prev_batch = None
def __call__(self):
batch = self.data.next()
assert batch
# Perturb the data (! and the model)
if isinstance(batch, dict):
batch = self.model.perturb(**batch)
else:
batch = self.model.perturb(*batch)
# Load the dataset into GPU
# Note: not the most efficient approach in general, as it involves
# each batch is copied individually on gpu
if isinstance(batch, dict):
for gdata in self.gdata:
gdata.set_value(batch[gdata.name], borrow=True)
else:
for gdata, data in zip(self.gdata, batch):
gdata.set_value(data, borrow=True)
# Run the trianing function
g_st = time.time()
rvals = self.train_fn()
for schedule in self.schedules:
schedule(self, rvals[-1])
self.update_fn()
g_ed = time.time()
self.state['lr'] = float(self.lr)
cost = rvals[-1]
self.old_cost = cost
whole_time = time.time() - self.step_timer
if self.step % self.state['trainFreq'] == 0:
msg = '.. iter %4d cost %.3f'
vals = [self.step, cost]
for dx, prop in enumerate(self.prop_names):
msg += ' '+prop+' %.2e'
vals += [float(numpy.array(rvals[dx]))]
msg += ' step time %s whole time %s lr %.2e'
vals += [print_time(g_ed - g_st),
print_time(time.time() - self.step_timer),
float(self.lr)]
print msg % tuple(vals)
self.step += 1
ret = dict([('cost', float(cost)),
('error', float(cost)),
('lr', float(self.lr)),
('time_step', float(g_ed - g_st)),
('whole_time', float(whole_time))]+zip(self.prop_names, rvals))
return ret
| bsd-3-clause | 3,818,986,608,536,046,000 | 37.663462 | 98 | 0.497762 | false |
googleapis/googleapis-gen | google/ads/googleads/v6/googleads-py/google/ads/googleads/v6/services/services/billing_setup_service/transports/grpc.py | 1 | 11783 | # -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import warnings
from typing import Callable, Dict, Optional, Sequence, Tuple
from google.api_core import grpc_helpers # type: ignore
from google.api_core import gapic_v1 # type: ignore
import google.auth # type: ignore
from google.auth import credentials as ga_credentials # type: ignore
from google.auth.transport.grpc import SslCredentials # type: ignore
import grpc # type: ignore
from google.ads.googleads.v6.resources.types import billing_setup
from google.ads.googleads.v6.services.types import billing_setup_service
from .base import BillingSetupServiceTransport, DEFAULT_CLIENT_INFO
class BillingSetupServiceGrpcTransport(BillingSetupServiceTransport):
"""gRPC backend transport for BillingSetupService.
A service for designating the business entity responsible for
accrued costs.
A billing setup is associated with a payments account. Billing-
related activity for all billing setups associated with a
particular payments account will appear on a single invoice
generated monthly.
Mutates:
The REMOVE operation cancels a pending billing setup. The CREATE
operation creates a new billing setup.
This class defines the same methods as the primary client, so the
primary client can load the underlying transport implementation
and call it.
It sends protocol buffers over the wire using gRPC (which is built on
top of HTTP/2); the ``grpcio`` package must be installed.
"""
def __init__(self, *,
host: str = 'googleads.googleapis.com',
credentials: ga_credentials.Credentials = None,
credentials_file: str = None,
scopes: Sequence[str] = None,
channel: grpc.Channel = None,
api_mtls_endpoint: str = None,
client_cert_source: Callable[[], Tuple[bytes, bytes]] = None,
ssl_channel_credentials: grpc.ChannelCredentials = None,
quota_project_id: Optional[str] = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
) -> None:
"""Instantiate the transport.
Args:
host (Optional[str]):
The hostname to connect to.
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
This argument is ignored if ``channel`` is provided.
credentials_file (Optional[str]): A file with credentials that can
be loaded with :func:`google.auth.load_credentials_from_file`.
This argument is ignored if ``channel`` is provided.
scopes (Optional(Sequence[str])): A list of scopes. This argument is
ignored if ``channel`` is provided.
channel (Optional[grpc.Channel]): A ``Channel`` instance through
which to make calls.
api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint.
If provided, it overrides the ``host`` argument and tries to create
a mutual TLS channel with client SSL credentials from
``client_cert_source`` or applicatin default SSL credentials.
client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]):
Deprecated. A callback to provide client SSL certificate bytes and
private key bytes, both in PEM format. It is ignored if
``api_mtls_endpoint`` is None.
ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials
for grpc channel. It is ignored if ``channel`` is provided.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
client_info (google.api_core.gapic_v1.client_info.ClientInfo):
The client info used to send a user-agent string along with
API requests. If ``None``, then default info will be used.
Generally, you only need to set this if you're developing
your own client library.
Raises:
google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport
creation failed for any reason.
"""
self._ssl_channel_credentials = ssl_channel_credentials
if channel:
# Sanity check: Ensure that channel and credentials are not both
# provided.
credentials = False
# If a channel was explicitly provided, set it.
self._grpc_channel = channel
self._ssl_channel_credentials = None
elif api_mtls_endpoint:
warnings.warn("api_mtls_endpoint and client_cert_source are deprecated", DeprecationWarning)
host = api_mtls_endpoint if ":" in api_mtls_endpoint else api_mtls_endpoint + ":443"
if credentials is None:
credentials, _ = google.auth.default(scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id)
# Create SSL credentials with client_cert_source or application
# default SSL credentials.
if client_cert_source:
cert, key = client_cert_source()
ssl_credentials = grpc.ssl_channel_credentials(
certificate_chain=cert, private_key=key
)
else:
ssl_credentials = SslCredentials().ssl_credentials
# create a new channel. The provided one is ignored.
self._grpc_channel = type(self).create_channel(
host,
credentials=credentials,
credentials_file=credentials_file,
ssl_credentials=ssl_credentials,
scopes=scopes or self.AUTH_SCOPES,
quota_project_id=quota_project_id,
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
],
)
self._ssl_channel_credentials = ssl_credentials
else:
host = host if ":" in host else host + ":443"
if credentials is None:
credentials, _ = google.auth.default(scopes=self.AUTH_SCOPES)
# create a new channel. The provided one is ignored.
self._grpc_channel = type(self).create_channel(
host,
credentials=credentials,
ssl_credentials=ssl_channel_credentials,
scopes=self.AUTH_SCOPES,
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
],
)
self._stubs = {} # type: Dict[str, Callable]
# Run the base constructor.
super().__init__(
host=host,
credentials=credentials,
client_info=client_info,
)
@classmethod
def create_channel(cls,
host: str = 'googleads.googleapis.com',
credentials: ga_credentials.Credentials = None,
scopes: Optional[Sequence[str]] = None,
**kwargs) -> grpc.Channel:
"""Create and return a gRPC channel object.
Args:
address (Optionsl[str]): The host for the channel to use.
credentials (Optional[~.Credentials]): The
authorization credentials to attach to requests. These
credentials identify this application to the service. If
none are specified, the client will attempt to ascertain
the credentials from the environment.
scopes (Optional[Sequence[str]]): A optional list of scopes needed for this
service. These are only used when credentials are not specified and
are passed to :func:`google.auth.default`.
kwargs (Optional[dict]): Keyword arguments, which are passed to the
channel creation.
Returns:
grpc.Channel: A gRPC channel object.
"""
return grpc_helpers.create_channel(
host,
credentials=credentials,
scopes=scopes or cls.AUTH_SCOPES,
**kwargs
)
@property
def grpc_channel(self) -> grpc.Channel:
"""Return the channel designed to connect to this service.
"""
return self._grpc_channel
@property
def get_billing_setup(self) -> Callable[
[billing_setup_service.GetBillingSetupRequest],
billing_setup.BillingSetup]:
r"""Return a callable for the get billing setup method over gRPC.
Returns a billing setup.
Returns:
Callable[[~.GetBillingSetupRequest],
~.BillingSetup]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'get_billing_setup' not in self._stubs:
self._stubs['get_billing_setup'] = self.grpc_channel.unary_unary(
'/google.ads.googleads.v6.services.BillingSetupService/GetBillingSetup',
request_serializer=billing_setup_service.GetBillingSetupRequest.serialize,
response_deserializer=billing_setup.BillingSetup.deserialize,
)
return self._stubs['get_billing_setup']
@property
def mutate_billing_setup(self) -> Callable[
[billing_setup_service.MutateBillingSetupRequest],
billing_setup_service.MutateBillingSetupResponse]:
r"""Return a callable for the mutate billing setup method over gRPC.
Creates a billing setup, or cancels an existing
billing setup.
Returns:
Callable[[~.MutateBillingSetupRequest],
~.MutateBillingSetupResponse]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'mutate_billing_setup' not in self._stubs:
self._stubs['mutate_billing_setup'] = self.grpc_channel.unary_unary(
'/google.ads.googleads.v6.services.BillingSetupService/MutateBillingSetup',
request_serializer=billing_setup_service.MutateBillingSetupRequest.serialize,
response_deserializer=billing_setup_service.MutateBillingSetupResponse.deserialize,
)
return self._stubs['mutate_billing_setup']
__all__ = (
'BillingSetupServiceGrpcTransport',
)
| apache-2.0 | 1,060,008,593,816,807,600 | 43.464151 | 112 | 0.616821 | false |
jsubpy/jsub | jsub/loader.py | 1 | 1148 | import os
import sys
import inspect
from jsub.error import JsubError
class LoadError(JsubError):
pass
class ModuleNotFoundError(LoadError):
pass
class ClassNotFoundError(LoadError):
pass
class NotAClassError(LoadError):
pass
class PackageDirectoryNotFoundError(LoadError):
pass
def load_module(module_name):
try:
__import__(module_name)
except ImportError as e:
raise ModuleNotFoundError('Module "%s" not found' % module_name)
return sys.modules[module_name]
def load_class(module_name, class_name):
m = load_module(module_name)
try:
c = getattr(m, class_name)
except AttributeError as e:
raise ClassNotFoundError('Class "%s" not found in module "%s"' % (class_name, module_name))
if not inspect.isclass(c):
raise NotAClassError('"%s" in module "%s" is not a class' % (class_name, module_name))
return c
def package_dir(module_name):
m = load_module(module_name)
module_path = m.__file__
if not os.path.basename(module_path).startswith('__init__.'):
raise PackageDirectoryNotFoundError('Package directory is not found for module "%s"' % module_name)
return os.path.dirname(os.path.realpath(module_path))
| mit | 2,569,055,906,323,149,000 | 21.96 | 101 | 0.736934 | false |
okfse/froide | froide/foirequest/tasks.py | 1 | 3122 | import os
from django.conf import settings
from django.utils import translation
from django.db import transaction
from django.core.files import File
from froide.celery import app as celery_app
from .models import FoiRequest, FoiAttachment
from .foi_mail import _process_mail, _fetch_mail
from .file_utils import convert_to_pdf
@celery_app.task(acks_late=True, time_limit=60)
def process_mail(*args, **kwargs):
translation.activate(settings.LANGUAGE_CODE)
def run(*args, **kwargs):
try:
_process_mail(*args, **kwargs)
except Exception:
transaction.rollback()
raise
else:
transaction.commit()
return None
run = transaction.commit_manually(run)
run(*args, **kwargs)
@celery_app.task(expires=60)
def fetch_mail():
for rfc_data in _fetch_mail():
process_mail.delay(rfc_data)
@celery_app.task
def detect_overdue():
translation.activate(settings.LANGUAGE_CODE)
for foirequest in FoiRequest.objects.get_to_be_overdue():
foirequest.set_overdue()
@celery_app.task
def detect_asleep():
translation.activate(settings.LANGUAGE_CODE)
for foirequest in FoiRequest.objects.get_to_be_asleep():
foirequest.set_asleep()
@celery_app.task
def classification_reminder():
translation.activate(settings.LANGUAGE_CODE)
for foirequest in FoiRequest.objects.get_unclassified():
foirequest.send_classification_reminder()
@celery_app.task
def count_same_foirequests(instance_id):
translation.activate(settings.LANGUAGE_CODE)
try:
count = FoiRequest.objects.filter(same_as_id=instance_id).count()
FoiRequest.objects.filter(id=instance_id).update(same_as_count=count)
except FoiRequest.DoesNotExist:
pass
@celery_app.task(time_limit=60)
def convert_attachment_task(instance_id):
try:
att = FoiAttachment.objects.get(pk=instance_id)
except FoiAttachment.DoesNotExist:
return
return convert_attachment(att)
def convert_attachment(att):
result_file = convert_to_pdf(
att.file.path,
binary_name=settings.FROIDE_CONFIG.get(
'doc_conversion_binary'
),
construct_call=settings.FROIDE_CONFIG.get(
'doc_conversion_call_func'
)
)
if result_file is None:
return
path, filename = os.path.split(result_file)
new_file = File(open(result_file, 'rb'))
if att.converted:
new_att = att.converted
else:
if FoiAttachment.objects.filter(
belongs_to=att.belongs_to,
name=filename).exists():
name, extension = filename.rsplit('.', 1)
filename = '%s_converted.%s' % (name, extension)
new_att = FoiAttachment(
belongs_to=att.belongs_to,
approved=False,
filetype='application/pdf',
is_converted=True
)
new_att.name = filename
new_att.file = new_file
new_att.size = new_file.size
new_att.file.save(filename, new_file)
new_att.save()
att.converted = new_att
att.save()
| mit | 8,346,048,382,823,139,000 | 25.913793 | 77 | 0.652466 | false |
rtbortolin/QandA | QandA/QandA/urls.py | 1 | 1936 | """
Definition of urls for QandA.
"""
from datetime import datetime
from django.conf.urls import patterns, url
from app.forms import BootstrapAuthenticationForm
import app
import app.views
import django.contrib.auth.views
# Uncomment the next lines to enable the admin:
# from django.conf.urls import include
# from django.contrib import admin
# admin.autodiscover()
urlpatterns = [
# Examples:
url(r'^$', app.views.home, name='home'),
url(r'^contact$', app.views.contact, name='contact'),
url(r'^about', app.views.about, name='about'),
url(r'^login/$',
django.contrib.auth.views.login,
{
'template_name': 'app/login.html',
'authentication_form': BootstrapAuthenticationForm,
'extra_context':
{
'title':'Log in',
'year':datetime.now().year,
}
},
name='login'),
url(r'^logout$',
django.contrib.auth.views.logout,
{
'next_page': '/',
},
name='logout'),
url(r'^question/create$', app.views.create_question, name = "create_question"),
url(r'^question/(?P<pk>\d+)$', app.views.QuestionDetailView.as_view(template_name='app/questions/details.html'), name = "Question Detail"),
url(r'^question/comment$', app.views.make_question_comment, name = "make_question_comment"),
url(r'^question/answer$', app.views.answer_a_question, name = "answer_a_question"),
url(r'^question/answer/comment$', app.views.make_answer_comment, name = "make_answer_comment"),
url(r'^question/vote$', app.views.make_a_vote, name = "make_a_vote"),
#url(r'^$', HomePageView.as_view(), name='home'),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
# url(r'^admin/', include(admin.site.urls)),
]
| mit | -6,395,860,316,709,606,000 | 35.528302 | 143 | 0.628099 | false |
bSr43/udis86 | scripts/ud_opcode.py | 1 | 26794 | # udis86 - scripts/ud_opcode.py
#
# Copyright (c) 2009, 2013 Vivek Thampi
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
# ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import os
import copy
class UdInsnDef:
"""An x86 instruction definition
"""
def __init__(self, **insnDef):
self.mnemonic = insnDef['mnemonic']
self.eflags = insnDef['eflags']
self.firstOpAccess = insnDef['firstOpAccess']
self.secondOpAccess = insnDef['secondOpAccess']
self.implicitRegUse = insnDef['implicitRegUse']
self.implicitRegDef = insnDef['implicitRegDef']
self.prefixes = insnDef['prefixes']
self.opcodes = insnDef['opcodes']
self.operands = insnDef['operands']
self._cpuid = insnDef['cpuid']
self._opcexts = {}
for opc in self.opcodes:
if opc.startswith('/'):
e, v = opc.split('=')
self._opcexts[e] = v
def lookupPrefix(self, pfx):
"""Lookup prefix (if any, None otherwise), by name"""
return True if pfx in self.prefixes else None
@property
def vendor(self):
return self._opcexts.get('/vendor', None)
@property
def mode(self):
return self._opcexts.get('/m', None)
@property
def osize(self):
return self._opcexts.get('/o', None)
def isDef64(self):
return 'def64' in self.prefixes
def __str__(self):
return self.mnemonic + " " + ', '.join(self.operands) + \
" " + ' '.join(self.opcodes)
class UdOpcodeTable:
"""A single table of instruction definitions, indexed by
a decode field.
"""
class CollisionError(Exception):
pass
class IndexError(Exception):
"""Invalid Index Error"""
pass
@classmethod
def vendor2idx(cls, v):
return (0 if v == 'amd'
else (1 if v == 'intel'
else 2))
@classmethod
def vex2idx(cls, v):
if v.startswith("none_"):
v = v[5:]
vexOpcExtMap = {
'none' : 0x0,
'0f' : 0x1,
'0f38' : 0x2,
'0f3a' : 0x3,
'66' : 0x4,
'66_0f' : 0x5,
'66_0f38' : 0x6,
'66_0f3a' : 0x7,
'f3' : 0x8,
'f3_0f' : 0x9,
'f3_0f38' : 0xa,
'f3_0f3a' : 0xb,
'f2' : 0xc,
'f2_0f' : 0xd,
'f2_0f38' : 0xe,
'f2_0f3a' : 0xf,
}
return vexOpcExtMap[v]
# A mapping of opcode extensions to their representational
# values used in the opcode map.
OpcExtMap = {
'/rm' : lambda v: int(v, 16),
'/x87' : lambda v: int(v, 16),
'/3dnow' : lambda v: int(v, 16),
'/reg' : lambda v: int(v, 16),
# modrm.mod
# (!11, 11) => (00b, 01b)
'/mod' : lambda v: 0 if v == '!11' else 1,
# Mode extensions:
# (16, 32, 64) => (00, 01, 02)
'/o' : lambda v: (int(v) / 32),
'/a' : lambda v: (int(v) / 32),
# Disassembly mode
# (!64, 64) => (00b, 01b)
'/m' : lambda v: 1 if v == '64' else 0,
# SSE
# none => 0
# f2 => 1
# f3 => 2
# 66 => 3
'/sse' : lambda v: (0 if v == 'none'
else (((int(v, 16) & 0xf) + 1) / 2)),
# AVX
'/vex' : lambda v: UdOpcodeTable.vex2idx(v),
'/vexw' : lambda v: 0 if v == '0' else 1,
'/vexl' : lambda v: 0 if v == '0' else 1,
# Vendor
'/vendor': lambda v: UdOpcodeTable.vendor2idx(v)
}
_TableInfo = {
'opctbl' : { 'label' : 'UD_TAB__OPC_TABLE', 'size' : 256 },
'/sse' : { 'label' : 'UD_TAB__OPC_SSE', 'size' : 4 },
'/reg' : { 'label' : 'UD_TAB__OPC_REG', 'size' : 8 },
'/rm' : { 'label' : 'UD_TAB__OPC_RM', 'size' : 8 },
'/mod' : { 'label' : 'UD_TAB__OPC_MOD', 'size' : 2 },
'/m' : { 'label' : 'UD_TAB__OPC_MODE', 'size' : 2 },
'/x87' : { 'label' : 'UD_TAB__OPC_X87', 'size' : 64 },
'/a' : { 'label' : 'UD_TAB__OPC_ASIZE', 'size' : 3 },
'/o' : { 'label' : 'UD_TAB__OPC_OSIZE', 'size' : 3 },
'/3dnow' : { 'label' : 'UD_TAB__OPC_3DNOW', 'size' : 256 },
'/vendor' : { 'label' : 'UD_TAB__OPC_VENDOR', 'size' : 3 },
'/vex' : { 'label' : 'UD_TAB__OPC_VEX', 'size' : 16 },
'/vexw' : { 'label' : 'UD_TAB__OPC_VEX_W', 'size' : 2 },
'/vexl' : { 'label' : 'UD_TAB__OPC_VEX_L', 'size' : 2 },
}
def __init__(self, typ):
assert typ in self._TableInfo
self._typ = typ
self._entries = {}
def size(self):
return self._TableInfo[self._typ]['size']
def entries(self):
return self._entries.iteritems()
def numEntries(self):
return len(self._entries.keys())
def label(self):
return self._TableInfo[self._typ]['label']
def typ(self):
return self._typ
def meta(self):
return self._typ
def __str__(self):
return "table-%s" % self._typ
def add(self, opc, obj):
typ = UdOpcodeTable.getOpcodeTyp(opc)
idx = UdOpcodeTable.getOpcodeIdx(opc)
if self._typ != typ or idx in self._entries:
raise CollisionError()
self._entries[idx] = obj
def lookup(self, opc):
typ = UdOpcodeTable.getOpcodeTyp(opc)
idx = UdOpcodeTable.getOpcodeIdx(opc)
if self._typ != typ:
raise UdOpcodeTable.CollisionError("%s <-> %s" % (self._typ, typ))
return self._entries.get(idx, None)
def entryAt(self, index):
"""Returns the entry at a given index of the table,
None if there is none. Raises an exception if the
index is out of bounds.
"""
if index < self.size():
return self._entries.get(index, None)
raise self.IndexError("index out of bounds: %s" % index)
def setEntryAt(self, index, obj):
if index < self.size():
self._entries[index] = obj
else:
raise self.IndexError("index out of bounds: %s" % index)
@classmethod
def getOpcodeTyp(cls, opc):
if opc.startswith('/'):
return opc.split('=')[0]
else:
return 'opctbl'
@classmethod
def getOpcodeIdx(cls, opc):
if opc.startswith('/'):
typ, v = opc.split('=')
return cls.OpcExtMap[typ](v)
else:
# plain opctbl opcode
return int(opc, 16)
@classmethod
def getLabels(cls):
"""Returns a list of all labels"""
return [cls._TableInfo[k]['label'] for k in cls._TableInfo.keys()]
class UdOpcodeTables(object):
"""Collection of opcode tables
"""
class CollisionError(Exception):
def __init__(self, obj1, obj2):
self.obj1, self.obj2 = obj1, obj2
def newTable(self, typ):
"""Create a new opcode table of a give type `typ`. """
tbl = UdOpcodeTable(typ)
self._tables.append(tbl)
return tbl
def mkTrie(self, opcodes, obj):
"""Recursively contruct a trie entry mapping a string of
opcodes to an object.
"""
if len(opcodes) == 0:
return obj
opc = opcodes[0]
tbl = self.newTable(UdOpcodeTable.getOpcodeTyp(opc))
tbl.add(opc, self.mkTrie(opcodes[1:], obj))
return tbl
def walk(self, tbl, opcodes):
"""Walk down the opcode trie, starting at a given opcode
table, given a string of opcodes. Return None if unable
to walk, the object at the leaf otherwise.
"""
opc = opcodes[0]
e = tbl.lookup(opc)
if e is None:
return None
elif isinstance(e, UdOpcodeTable) and len(opcodes[1:]):
return self.walk(e, opcodes[1:])
return e
def map(self, tbl, opcodes, obj):
"""Create a mapping from a given string of opcodes to an
object in the opcode trie. Constructs trie branches as
needed.
"""
opc = opcodes[0]
e = tbl.lookup(opc)
if e is None:
tbl.add(opc, self.mkTrie(opcodes[1:], obj))
else:
if len(opcodes[1:]) == 0:
raise self.CollisionError(e, obj)
self.map(e, opcodes[1:], obj)
def __init__(self, xml):
self._tables = []
self._insns = []
self._mnemonics = {}
# The root table is always a 256 entry opctbl, indexed
# by a plain opcode byte
self.root = self.newTable('opctbl')
if os.getenv("UD_OPCODE_DEBUG"):
self._logFh = open("opcodeTables.log", "w")
# add an invalid instruction entry without any mapping
# in the opcode tables.
self.invalidInsn = UdInsnDef(mnemonic="invalid",
eflags="___________",
firstOpAccess="",
secondOpAccess="",
implicitRegUse=[],
implicitRegDef=[],
opcodes=[],
cpuid=[],
operands=[],
prefixes=[])
self._insns.append(self.invalidInsn)
# Construct UdOpcodeTables object from the given
# udis86 optable.xml
for insn in self.__class__.parseOptableXML(xml):
self.addInsnDef(insn)
self.patchAvx2byte()
self.mergeSSENONE()
self.printStats()
def log(self, s):
if os.getenv("UD_OPCODE_DEBUG"):
self._logFh.write(s + "\n")
def mergeSSENONE(self):
"""Merge sse tables with only one entry for /sse=none
"""
for table in self._tables:
for k, e in table.entries():
if isinstance(e, UdOpcodeTable) and e.typ() == '/sse':
if e.numEntries() == 1:
sse = e.lookup("/sse=none")
if sse:
table.setEntryAt(k, sse)
uniqTables = {}
def genTableList(tbl):
if tbl not in uniqTables:
self._tables.append(tbl)
uniqTables[tbl] = 1
for k, e in tbl.entries():
if isinstance(e, UdOpcodeTable):
genTableList(e)
self._tables = []
genTableList(self.root)
def patchAvx2byte(self):
# create avx tables
for pp in (None, 'f2', 'f3', '66'):
for m in (None, '0f', '0f38', '0f3a'):
if pp is None and m is None:
continue
if pp is None:
vex = m
elif m is None:
vex = pp
else:
vex = pp + '_' + m
table = self.walk(self.root, ('c4', '/vex=' + vex))
self.map(self.root, ('c5', '/vex=' + vex), table)
def addInsn(self, **insnDef):
# Canonicalize opcode list
opcexts = insnDef['opcexts']
opcodes = list(insnDef['opcodes'])
eflags = insnDef['eflags']
firstOpAccess = insnDef['firstOpAccess']
secondOpAccess = insnDef['secondOpAccess']
implicitRegUse = insnDef['implicitRegUse']
implicitRegDef = insnDef['implicitRegDef']
# TODO: REMOVE!
# print opcodes, eflags, insnDef['mnemonic'], firstOpAccess, secondOpAccess, implicitRegUse, implicitRegDef
# Re-order vex
if '/vex' in opcexts:
assert opcodes[0] == 'c4' or opcodes[0] == 'c5'
opcodes.insert(1, '/vex=' + opcexts['/vex'])
# Add extensions. The order is important, and determines how
# well the opcode table is packed. Also note, /sse must be
# before /o, because /sse may consume operand size prefix
# affect the outcome of /o.
for ext in ('/mod', '/x87', '/reg', '/rm', '/sse', '/o', '/a', '/m',
'/vexw', '/vexl', '/3dnow', '/vendor'):
if ext in opcexts:
opcodes.append(ext + '=' + opcexts[ext])
insn = UdInsnDef(mnemonic = insnDef['mnemonic'],
eflags = insnDef['eflags'],
firstOpAccess = insnDef['firstOpAccess'],
secondOpAccess = insnDef['secondOpAccess'],
implicitRegUse = insnDef['implicitRegUse'],
implicitRegDef = insnDef['implicitRegDef'],
prefixes = insnDef['prefixes'],
operands = insnDef['operands'],
opcodes = opcodes,
cpuid = insnDef['cpuid'])
try:
self.map(self.root, opcodes, insn)
except self.CollisionError as e:
self.pprint()
print(opcodes, insn, str(e.obj1), str(e.obj2))
raise
except Exception as e:
self.pprint()
raise
self._insns.append(insn)
# add to lookup by mnemonic structure
if insn.mnemonic not in self._mnemonics:
self._mnemonics[insn.mnemonic] = [ insn ]
else:
self._mnemonics[insn.mnemonic].append(insn)
def addInsnDef(self, insnDef):
opcodes = []
opcexts = {}
# pack plain opcodes first, and collect opcode
# extensions
for opc in insnDef['opcodes']:
if not opc.startswith('/'):
opcodes.append(opc)
else:
e, v = opc.split('=')
opcexts[e] = v
# treat vendor as an opcode extension
if len(insnDef['vendor']):
opcexts['/vendor'] = insnDef['vendor'][0]
if insnDef['mnemonic'] in ('lds', 'les'):
#
# Massage lds and les, which share the same prefix as AVX
# instructions, to work well with the opcode tree.
#
opcexts['/vex'] = 'none'
elif '/vex' in opcexts:
# A proper avx instruction definition; make sure there are
# no legacy opcode extensions
assert '/sse' not in opcodes
# make sure the opcode definitions don't already include
# the avx prefixes.
assert opcodes[0] not in ('c4', 'c5')
# An avx only instruction is defined by the /vex= opcode
# extension. They do not include the c4 (long form) or
# c5 (short form) prefix. As part of opcode table generate,
# here we create the long form definition, and then patch
# the table for c5 in a later stage.
# Construct a long-form definition of the avx instruction
opcodes.insert(0, 'c4')
elif (opcodes[0] == '0f' and opcodes[1] != '0f' and
'/sse' not in opcexts):
# Make all 2-byte opcode form isntructions play nice with sse
# opcode maps.
opcexts['/sse'] = 'none'
# legacy sse defs that get promoted to avx
fn = self.addInsn
if 'avx' in insnDef['cpuid'] and '/sse' in opcexts:
fn = self.addSSE2AVXInsn
fn(mnemonic = insnDef['mnemonic'],
eflags = insnDef['eflags'],
firstOpAccess = insnDef['firstOpAccess'],
secondOpAccess = insnDef['secondOpAccess'],
implicitRegUse = insnDef['implicitRegUse'],
implicitRegDef = insnDef['implicitRegDef'],
prefixes = insnDef['prefixes'],
opcodes = opcodes,
opcexts = opcexts,
operands = insnDef['operands'],
cpuid = insnDef['cpuid'])
def addSSE2AVXInsn(self, **insnDef):
"""Add an instruction definition containing an avx cpuid bit, but
declared in its legacy SSE form. The function splits the
definition to create two new definitions, one for SSE and one
promoted to an AVX form.
"""
# SSE
ssemnemonic = insnDef['mnemonic']
sseeflags = insnDef['eflags']
ssefirstOpAccess = insnDef['firstOpAccess']
ssesecondOpAccess = insnDef['secondOpAccess']
sseimplicitRegUse = insnDef['implicitRegUse']
sseimplicitRegDef = insnDef['implicitRegDef']
sseopcodes = insnDef['opcodes']
# remove vex opcode extensions
sseopcexts = dict([(e, v) for e, v in insnDef['opcexts'].iteritems()
if not e.startswith('/vex')])
# strip out avx operands, preserving relative ordering
# of remaining operands
sseoperands = [opr for opr in insnDef['operands']
if opr not in ('H', 'L')]
# strip out avx prefixes
sseprefixes = [pfx for pfx in insnDef['prefixes']
if not pfx.startswith('vex')]
# strip out avx bits from cpuid
ssecpuid = [flag for flag in insnDef['cpuid']
if not flag.startswith('avx')]
self.addInsn(mnemonic = ssemnemonic,
eflags = sseeflags,
firstOpAccess = ssefirstOpAccess,
secondOpAccess = ssesecondOpAccess,
implicitRegUse = sseimplicitRegUse,
implicitRegDef = sseimplicitRegDef,
prefixes = sseprefixes,
opcodes = sseopcodes,
opcexts = sseopcexts,
operands = sseoperands,
cpuid = ssecpuid)
# AVX
vexmnemonic = 'v' + insnDef['mnemonic']
vexeflags = insnDef['eflags']
vexfirstOpAccess = insnDef['firstOpAccess']
vexsecondOpAccess = insnDef['secondOpAccess']
veximplicitRegUse = insnDef['implicitRegUse']
veximplicitRegDef = insnDef['implicitRegDef']
vexprefixes = insnDef['prefixes']
vexopcodes = ['c4']
vexopcexts = dict([(e, insnDef['opcexts'][e])
for e in insnDef['opcexts'] if e != '/sse'])
vexopcexts['/vex'] = insnDef['opcexts']['/sse'] + '_' + '0f'
if insnDef['opcodes'][1] == '38' or insnDef['opcodes'][1] == '3a':
vexopcexts['/vex'] += insnDef['opcodes'][1]
vexopcodes.extend(insnDef['opcodes'][2:])
else:
vexopcodes.extend(insnDef['opcodes'][1:])
vexoperands = []
for o in insnDef['operands']:
# make the operand size explicit: x
if o in ('V', 'W', 'H', 'U'):
o = o + 'x'
vexoperands.append(o)
vexcpuid = [flag for flag in insnDef['cpuid']
if not flag.startswith('sse')]
self.addInsn(mnemonic = vexmnemonic,
eflags = vexeflags,
firstOpAccess = vexfirstOpAccess,
secondOpAccess = vexsecondOpAccess,
implicitRegUse = veximplicitRegUse,
implicitRegDef = veximplicitRegDef,
prefixes = vexprefixes,
opcodes = vexopcodes,
opcexts = vexopcexts,
operands = vexoperands,
cpuid = vexcpuid)
def getInsnList(self):
"""Returns a list of all instructions in the collection"""
return self._insns
def getTableList(self):
"""Returns a list of all tables in the collection"""
return self._tables
def getMnemonicsList(self):
"""Returns a sorted list of mnemonics"""
return sorted(self._mnemonics.keys())
def pprint(self):
def printWalk(tbl, indent=""):
entries = tbl.entries()
for k, e in entries:
if isinstance(e, UdOpcodeTable):
self.log("%s |-<%02x> %s" % (indent, k, e))
printWalk(e, indent + " |")
elif isinstance(e, UdInsnDef):
self.log("%s |-<%02x> %s" % (indent, k, e))
printWalk(self.root)
def printStats(self):
tables = self.getTableList()
self.log("stats: ")
self.log(" Num tables = %d" % len(tables))
self.log(" Num insnDefs = %d" % len(self.getInsnList()))
self.log(" Num insns = %d" % len(self.getMnemonicsList()))
totalSize = 0
totalEntries = 0
for table in tables:
totalSize += table.size()
totalEntries += table.numEntries()
self.log(" Packing Ratio = %d%%" % ((totalEntries * 100) / totalSize))
self.log("--------------------")
self.pprint()
@staticmethod
def parseOptableXML(xml):
"""Parse udis86 optable.xml file and return list of
instruction definitions.
"""
from xml.dom import minidom
xmlDoc = minidom.parse(xml)
tlNode = xmlDoc.firstChild
insns = []
while tlNode and tlNode.localName != "x86optable":
tlNode = tlNode.nextSibling
for insnNode in tlNode.childNodes:
if not insnNode.localName:
continue
if insnNode.localName != "instruction":
raise Exception("warning: invalid insn node - %s" % insnNode.localName)
mnemonic = insnNode.getElementsByTagName('mnemonic')[0].firstChild.data
vendor, cpuid = '', []
global_eflags = "___________"
global_firstOpAccess = "R"
global_secondOpAccess = "R"
global_implicitRegUse = []
global_implicitRegDef = []
for node in insnNode.childNodes:
if node.localName == 'vendor':
vendor = node.firstChild.data.split()
elif node.localName == 'cpuid':
cpuid = node.firstChild.data.split()
elif node.localName == 'eflags':
global_eflags = node.firstChild.data
elif node.localName == 'first_operand_access':
global_firstOpAccess = node.firstChild.data
elif node.localName == 'second_operand_access':
global_secondOpAccess = node.firstChild.data
elif node.localName == 'implicit_register_use':
global_implicitRegUse.append(node.firstChild.data)
elif node.localName == 'implicit_register_def':
global_implicitRegDef.append(node.firstChild.data)
for node in insnNode.childNodes:
if node.localName == 'def':
eflags = copy.deepcopy(global_eflags)
firstOpAccess = copy.deepcopy(global_firstOpAccess)
secondOpAccess = copy.deepcopy(global_secondOpAccess)
implicitRegUse = copy.deepcopy(global_implicitRegUse)
implicitRegDef = copy.deepcopy(global_implicitRegDef)
insnDef = { 'pfx' : [] }
for node in node.childNodes:
if not node.localName:
continue
if node.localName in ('pfx', 'opc', 'opr', 'vendor', 'cpuid'):
insnDef[node.localName] = node.firstChild.data.split()
elif node.localName == 'eflags':
eflags = node.firstChild.data
elif node.localName == 'first_operand_access':
firstOpAccess = node.firstChild.data
elif node.localName == 'second_operand_access':
secondOpAccess = node.firstChild.data
elif node.localName == 'implicit_register_use':
implicitRegUse.append(node.firstChild.data)
elif node.localName == 'implicit_register_def':
implicitRegDef.append(node.firstChild.data)
elif node.localName == 'mode':
insnDef['pfx'].extend(node.firstChild.data.split())
insns.append({'prefixes' : insnDef.get('pfx', []),
'mnemonic' : mnemonic,
'eflags' : eflags,
'firstOpAccess' : firstOpAccess,
'secondOpAccess' : secondOpAccess,
'implicitRegUse' : implicitRegUse,
'implicitRegDef' : implicitRegDef,
'opcodes' : insnDef.get('opc', []),
'operands' : insnDef.get('opr', []),
'vendor' : insnDef.get('vendor', vendor),
'cpuid' : insnDef.get('cpuid', cpuid)})
return insns
| bsd-2-clause | -4,087,577,388,770,248,000 | 37.168091 | 115 | 0.506382 | false |
JioCloud/oslo-incubator | tests/unit/test_authutils.py | 1 | 1104 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2011 OpenStack Foundation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from openstack.common import authutils
from openstack.common import test
class AuthUtilsTest(test.BaseTestCase):
def test_auth_str_equal(self):
self.assertTrue(authutils.auth_str_equal('abc123', 'abc123'))
self.assertFalse(authutils.auth_str_equal('a', 'aaaaa'))
self.assertFalse(authutils.auth_str_equal('aaaaa', 'a'))
self.assertFalse(authutils.auth_str_equal('ABC123', 'abc123'))
| apache-2.0 | 7,319,244,362,919,340,000 | 38.428571 | 78 | 0.722826 | false |
levythu/ThuCloudDisk | ThuCloudDisk/ThuCloudDisk/settings.py | 1 | 6136 | # Django settings for ThuCloudDisk project.
import os.path
dirname = os.path.dirname(__file__).replace("\\", "/")
ROOT_PATH = os.path.dirname(dirname)
DEBUG = True
TEMPLATE_DEBUG = DEBUG
ADMINS = (
('thuclouddisk', '[email protected]'),
)
MANAGERS = ADMINS
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql', # Add 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
'NAME': 'ThuCloudDisk', # Or path to database file if using sqlite3.
# The following settings are not used with sqlite3:
'USER': 'root',
'PASSWORD': 'Zstbj2013',
'HOST': '', # Empty for localhost through domain sockets or '127.0.0.1' for localhost through TCP.
'PORT': '', # Set to empty string for default.
}
}
# Hosts/domain names that are valid for this site; required if DEBUG is False
# See https://docs.djangoproject.com/en/1.5/ref/settings/#allowed-hosts
ALLOWED_HOSTS = []
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# In a Windows environment this must be set to your system time zone.
TIME_ZONE = 'Asia/Shanghai'
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'en-us'
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# If you set this to False, Django will not format dates, numbers and
# calendars according to the current locale.
USE_L10N = True
# If you set this to False, Django will not use timezone-aware datetimes.
USE_TZ = True
# Absolute filesystem path to the directory that will hold user-uploaded files.
# Example: "/var/www/example.com/media/"
MEDIA_ROOT = os.path.join(ROOT_PATH, 'media').replace('\\','/')
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash.
# Examples: "http://example.com/media/", "http://media.example.com/"
MEDIA_URL = '/media/'
# Absolute path to the directory static files should be collected to.
# Don't put anything in this directory yourself; store your static files
# in apps' "static/" subdirectories and in STATICFILES_DIRS.
# Example: "/var/www/example.com/static/"
STATIC_ROOT = ''
# URL prefix for static files.
# Example: "http://example.com/static/", "http://static.example.com/"
STATIC_URL = '/static/'
# Additional locations of static files
STATICFILES_DIRS = (
os.path.join(ROOT_PATH, 'static').replace('\\','/'),
# Put strings here, like "/home/html/static" or "C:/www/django/static".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
)
# List of finder classes that know how to find static files in
# various locations.
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
# 'django.contrib.staticfiles.finders.DefaultStorageFinder',
)
# Make this unique, and don't share it with anybody.
SECRET_KEY = 'mg1=t7!v5f7*#6p2b+#+o_fiqja0w4w#15m604oo+=(w1glvdk'
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
# 'django.template.loaders.eggs.Loader',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
#'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
# Uncomment the next line for simple clickjacking protection:
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'ThuCloudDisk.urls'
# Python dotted path to the WSGI application used by Django's runserver.
WSGI_APPLICATION = 'ThuCloudDisk.wsgi.application'
TEMPLATE_DIRS = (
os.path.join(ROOT_PATH, 'templates').replace('\\','/'),
# Put strings here, like "/home/html/django_templates" or "C:/www/django/templates".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
)
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
# Uncomment the next line to enable the admin:
'django.contrib.admin',
# Uncomment the next line to enable admin documentation:
# 'django.contrib.admindocs',
'web',
)
# A sample logging configuration. The only tangible logging
# performed by this configuration is to send an email to
# the site admins on every HTTP 500 error when DEBUG=False.
# See http://docs.djangoproject.com/en/dev/topics/logging for
# more details on how to customize your logging configuration.
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse'
}
},
'handlers': {
'mail_admins': {
'level': 'ERROR',
'filters': ['require_debug_false'],
'class': 'django.utils.log.AdminEmailHandler'
}
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
}
}
USE_SWIFT = True
WEB_RSYNC = False
AUTH_USER_MODEL = 'web.MyUser'
EMAIL_HOST='smtp.163.com'
EMAIL_HOST_USER='[email protected]'
EMAIL_HOST_PASSWORD='jzfiiczzaziqhymt'
EMAIL_USE_TLS = True
SITE_URL='thucloud.com'
SITE_NAME='thuclouddisk'
SWIFT_HOST='192.168.56.101:5000'
SWIFT_URL='192.168.56.101'
SWIFT_TENANT='demo'
SWIFT_USER = 'demo'
SWIFT_SECRET = 'DEMO_PASS'
LOCAL_BUFFER_PATH = os.path.join(ROOT_PATH, 'media/buffer').replace('\\','/') | apache-2.0 | -3,915,417,579,704,217,000 | 32.71978 | 127 | 0.688233 | false |
romanz/electrum | plugins/digitalbitbox/qt.py | 1 | 1522 | from ..hw_wallet.qt import QtHandlerBase, QtPluginBase
from .digitalbitbox import DigitalBitboxPlugin
from electrum.i18n import _
from electrum.plugins import hook
from electrum.wallet import Standard_Wallet
class Plugin(DigitalBitboxPlugin, QtPluginBase):
icon_unpaired = ":icons/digitalbitbox_unpaired.png"
icon_paired = ":icons/digitalbitbox.png"
def create_handler(self, window):
return DigitalBitbox_Handler(window)
@hook
def receive_menu(self, menu, addrs, wallet):
if type(wallet) is not Standard_Wallet:
return
keystore = wallet.get_keystore()
if type(keystore) is not self.keystore_class:
return
if not self.is_mobile_paired():
return
if not keystore.is_p2pkh():
return
if len(addrs) == 1:
def show_address():
change, index = wallet.get_address_index(addrs[0])
keypath = '%s/%d/%d' % (keystore.derivation, change, index)
xpub = self.get_client(keystore)._get_xpub(keypath)
verify_request_payload = {
"type": 'p2pkh',
"echo": xpub['echo'],
}
self.comserver_post_notification(verify_request_payload)
menu.addAction(_("Show on {}").format(self.device), show_address)
class DigitalBitbox_Handler(QtHandlerBase):
def __init__(self, win):
super(DigitalBitbox_Handler, self).__init__(win, 'Digital Bitbox')
| mit | -7,596,609,756,621,793,000 | 30.708333 | 77 | 0.605782 | false |
ExCiteS/geokey-dataimports | geokey_dataimports/tests/test_views.py | 1 | 108533 | """All tests for views."""
import os
import json
from django.core.files import File
from django.core.urlresolvers import reverse
from django.http import HttpRequest
from django.template.loader import render_to_string
from django.test import TestCase, RequestFactory
from django.contrib.messages import get_messages
from django.contrib.messages.storage.fallback import FallbackStorage
from django.contrib.auth.models import AnonymousUser
from django.contrib.sites.shortcuts import get_current_site
from geokey import version
from geokey.core.tests.helpers import render_helpers
from geokey.users.tests.model_factories import UserFactory
from geokey.projects.tests.model_factories import ProjectFactory
from geokey.categories.tests.model_factories import (
CategoryFactory,
TextFieldFactory
)
from geokey.contributions.models import Observation
from .helpers import file_helpers
from .model_factories import DataImportFactory
from ..helpers.context_helpers import does_not_exist_msg
from ..models import DataImport, DataField, DataFeature
from ..forms import CategoryForm, DataImportForm
from ..views import (
IndexPage,
AllDataImportsPage,
AddDataImportPage,
SingleDataImportPage,
DataImportCreateCategoryPage,
DataImportAssignFieldsPage,
DataImportAllDataFeaturesPage,
RemoveDataImportPage
)
no_rights_to_access_msg = 'You are not member of the administrators group ' \
'of this project and therefore not allowed to ' \
'alter the settings of the project'
# ###########################
# TESTS FOR ADMIN PAGES
# ###########################
class IndexPageTest(TestCase):
"""Test index page."""
def setUp(self):
"""Set up test."""
self.request = HttpRequest()
self.request.method = 'GET'
self.view = IndexPage.as_view()
self.filters = {
'without-data-imports-only': 'Without data imports',
'with-data-imports-only': 'With data imports'
}
self.user = UserFactory.create()
self.project_1 = ProjectFactory.create(add_admins=[self.user])
self.project_2 = ProjectFactory.create(add_admins=[self.user])
self.project_3 = ProjectFactory.create(add_admins=[self.user])
self.project_4 = ProjectFactory.create(add_contributors=[self.user])
self.project_5 = ProjectFactory.create()
DataImportFactory.create(project=self.project_2)
DataImportFactory.create(project=self.project_4)
di_to_delete = DataImportFactory.create(project=self.project_3)
if os.path.isfile(di_to_delete.file.path):
os.remove(di_to_delete.file.path)
di_to_delete.delete()
self.project_1.dataimports_count = 0 # none added
self.project_2.dataimports_count = 1 # added
self.project_3.dataimports_count = 0 # added but deleted
self.project_4.dataimports_count = 1 # added
setattr(self.request, 'session', 'session')
messages = FallbackStorage(self.request)
setattr(self.request, '_messages', messages)
def tearDown(self):
"""Tear down test."""
for dataimport in DataImport.objects.all():
if dataimport.file:
dataimport.file.delete()
def test_get_with_anonymous(self):
"""
Test GET with with anonymous.
It should redirect to login page.
"""
self.request.user = AnonymousUser()
response = self.view(self.request)
self.assertEqual(response.status_code, 302)
self.assertIn('/admin/account/login/', response['location'])
def test_get_with_user(self):
"""
Test GET with with user.
It should render the page with all projects, where user is an
administrator.
"""
projects = [self.project_1, self.project_2, self.project_3]
self.request.user = self.user
response = self.view(self.request).render()
rendered = render_to_string(
'di_index.html',
{
'PLATFORM_NAME': get_current_site(self.request).name,
'GEOKEY_VERSION': version.get_version(),
'user': self.request.user,
'messages': get_messages(self.request),
'filters': self.filters,
'projects': projects
}
)
self.assertEqual(response.status_code, 200)
self.assertEqual(
render_helpers.remove_csrf(response.content.decode('utf-8')),
rendered
)
def test_get_with_user_only_without_dataimports(self):
"""
Test GET with with user, but only projects without data imports.
It should render the page with all projects, where user is an
administrator. Those projects must also not have data imports.
"""
projects = [self.project_1, self.project_3]
self.request.user = self.user
self.request.GET['filter'] = 'without-data-imports-only'
response = self.view(self.request).render()
rendered = render_to_string(
'di_index.html',
{
'PLATFORM_NAME': get_current_site(self.request).name,
'GEOKEY_VERSION': version.get_version(),
'user': self.request.user,
'messages': get_messages(self.request),
'filters': self.filters,
'projects': projects,
'request': {
'GET': {
'filter': self.request.GET.get('filter')
}
}
}
)
self.assertEqual(response.status_code, 200)
self.assertEqual(
render_helpers.remove_csrf(response.content.decode('utf-8')),
rendered
)
def test_get_with_user_only_with_dataimports(self):
"""
Test GET with with user, but only projects with data imports.
It should render the page with all projects, where user is an
administrator. Those projects must also have data imports
"""
projects = [self.project_2]
self.request.user = self.user
self.request.GET['filter'] = 'with-data-imports-only'
response = self.view(self.request).render()
rendered = render_to_string(
'di_index.html',
{
'PLATFORM_NAME': get_current_site(self.request).name,
'GEOKEY_VERSION': version.get_version(),
'user': self.request.user,
'messages': get_messages(self.request),
'filters': self.filters,
'projects': projects,
'request': {
'GET': {
'filter': self.request.GET.get('filter')
}
}
}
)
self.assertEqual(response.status_code, 200)
self.assertEqual(
render_helpers.remove_csrf(response.content.decode('utf-8')),
rendered
)
class AllDataImportsPageTest(TestCase):
"""Test all data imports page."""
def setUp(self):
"""Set up test."""
self.request = HttpRequest()
self.request.method = 'GET'
self.view = AllDataImportsPage.as_view()
self.user = UserFactory.create()
self.contributor = UserFactory.create()
self.admin = UserFactory.create()
self.project = ProjectFactory.create(
add_admins=[self.admin],
add_contributors=[self.contributor]
)
setattr(self.request, 'session', 'session')
messages = FallbackStorage(self.request)
setattr(self.request, '_messages', messages)
def test_get_with_anonymous(self):
"""
Test GET with with anonymous.
It should redirect to login page.
"""
self.request.user = AnonymousUser()
response = self.view(self.request, project_id=self.project.id)
self.assertEqual(response.status_code, 302)
self.assertIn('/admin/account/login/', response['location'])
def test_get_with_user(self):
"""
Test GET with with user.
It should not allow to access the page, when user is not an
administrator.
"""
self.request.user = self.user
response = self.view(self.request, project_id=self.project.id).render()
rendered = render_to_string(
'di_all_dataimports.html',
{
'GEOKEY_VERSION': version.get_version(),
'PLATFORM_NAME': get_current_site(self.request).name,
'user': self.request.user,
'messages': get_messages(self.request),
'error': 'Not found.',
'error_description': does_not_exist_msg('Project')
}
)
self.assertEqual(response.status_code, 200)
self.assertEqual(
render_helpers.remove_csrf(response.content.decode('utf-8')),
rendered
)
def test_get_with_contributor(self):
"""
Test GET with with contributor.
It should not allow to access the page, when user is not an
administrator.
"""
self.request.user = self.contributor
response = self.view(self.request, project_id=self.project.id).render()
rendered = render_to_string(
'di_all_dataimports.html',
{
'GEOKEY_VERSION': version.get_version(),
'PLATFORM_NAME': get_current_site(self.request).name,
'user': self.request.user,
'messages': get_messages(self.request),
'error': 'Permission denied.',
'error_description': no_rights_to_access_msg
}
)
self.assertEqual(response.status_code, 200)
self.assertEqual(
render_helpers.remove_csrf(response.content.decode('utf-8')),
rendered
)
def test_get_with_admin(self):
"""
Test GET with with admin.
It should render the page with a project.
"""
self.request.user = self.admin
response = self.view(self.request, project_id=self.project.id).render()
rendered = render_to_string(
'di_all_dataimports.html',
{
'GEOKEY_VERSION': version.get_version(),
'PLATFORM_NAME': get_current_site(self.request).name,
'user': self.request.user,
'messages': get_messages(self.request),
'project': self.project
}
)
self.assertEqual(response.status_code, 200)
self.assertEqual(
render_helpers.remove_csrf(response.content.decode('utf-8')),
rendered
)
def test_get_when_no_project(self):
"""
Test GET with with admin, when project does not exist.
It should inform user that project does not exist.
"""
self.request.user = self.admin
response = self.view(
self.request,
project_id=self.project.id + 123
).render()
rendered = render_to_string(
'di_all_dataimports.html',
{
'GEOKEY_VERSION': version.get_version(),
'PLATFORM_NAME': get_current_site(self.request).name,
'user': self.request.user,
'messages': get_messages(self.request),
'error': 'Not found.',
'error_description': does_not_exist_msg('Project')
}
)
self.assertEqual(response.status_code, 200)
self.assertEqual(
render_helpers.remove_csrf(response.content.decode('utf-8')),
rendered
)
class AddDataImportPageTest(TestCase):
"""Test add data import page."""
def setUp(self):
"""Set up test."""
self.factory = RequestFactory()
self.request = HttpRequest()
self.view = AddDataImportPage.as_view()
self.user = UserFactory.create()
self.contributor = UserFactory.create()
self.admin = UserFactory.create()
self.project = ProjectFactory.create(
add_admins=[self.admin],
add_contributors=[self.contributor]
)
self.category = CategoryFactory.create(
project=self.project
)
self.data = {
'name': 'Test Import',
'description': '',
'file': File(open(file_helpers.get_csv_file().name)),
'category_create': 'true'
}
self.url = reverse('geokey_dataimports:dataimport_add', kwargs={
'project_id': self.project.id
})
setattr(self.request, 'session', 'session')
messages = FallbackStorage(self.request)
setattr(self.request, '_messages', messages)
def tearDown(self):
"""Tear down test."""
for dataimport in DataImport.objects.all():
if dataimport.file:
dataimport.file.delete()
def test_get_with_anonymous(self):
"""
Test GET with with anonymous.
It should redirect to login page.
"""
self.request.user = AnonymousUser()
self.request.method = 'GET'
response = self.view(self.request, project_id=self.project.id)
self.assertEqual(response.status_code, 302)
self.assertIn('/admin/account/login/', response['location'])
def test_get_with_user(self):
"""
Test GET with with user.
It should not allow to access the page, when user is not an
administrator.
"""
self.request.user = self.user
self.request.method = 'GET'
response = self.view(self.request, project_id=self.project.id).render()
form = DataImportForm()
rendered = render_to_string(
'di_add_dataimport.html',
{
'GEOKEY_VERSION': version.get_version(),
'PLATFORM_NAME': get_current_site(self.request).name,
'user': self.request.user,
'messages': get_messages(self.request),
'form': form,
'error': 'Not found.',
'error_description': does_not_exist_msg('Project')
}
)
self.assertEqual(response.status_code, 200)
self.assertEqual(
render_helpers.remove_csrf(response.content.decode('utf-8')),
rendered
)
def test_get_with_contributor(self):
"""
Test GET with with contributor.
It should not allow to access the page, when user is not an
administrator.
"""
self.request.user = self.contributor
self.request.method = 'GET'
response = self.view(self.request, project_id=self.project.id).render()
form = DataImportForm()
rendered = render_to_string(
'di_add_dataimport.html',
{
'GEOKEY_VERSION': version.get_version(),
'PLATFORM_NAME': get_current_site(self.request).name,
'user': self.request.user,
'messages': get_messages(self.request),
'form': form,
'error': 'Permission denied.',
'error_description': no_rights_to_access_msg
}
)
self.assertEqual(response.status_code, 200)
self.assertEqual(
render_helpers.remove_csrf(response.content.decode('utf-8')),
rendered
)
def test_get_with_admin(self):
"""
Test GET with with admin.
It should render the page with a project.
"""
self.request.user = self.admin
self.request.method = 'GET'
response = self.view(self.request, project_id=self.project.id).render()
form = DataImportForm()
rendered = render_to_string(
'di_add_dataimport.html',
{
'GEOKEY_VERSION': version.get_version(),
'PLATFORM_NAME': get_current_site(self.request).name,
'user': self.request.user,
'messages': get_messages(self.request),
'form': form,
'project': self.project
}
)
self.assertEqual(response.status_code, 200)
self.assertEqual(
render_helpers.remove_csrf(response.content.decode('utf-8')),
rendered
)
def test_get_when_no_project(self):
"""
Test GET with with admin, when project does not exist.
It should inform user that project does not exist.
"""
self.request.user = self.admin
self.request.method = 'GET'
response = self.view(
self.request,
project_id=self.project.id + 123
).render()
form = DataImportForm()
rendered = render_to_string(
'di_add_dataimport.html',
{
'GEOKEY_VERSION': version.get_version(),
'PLATFORM_NAME': get_current_site(self.request).name,
'user': self.request.user,
'messages': get_messages(self.request),
'form': form,
'error': 'Not found.',
'error_description': does_not_exist_msg('Project'),
}
)
self.assertEqual(response.status_code, 200)
self.assertEqual(
render_helpers.remove_csrf(response.content.decode('utf-8')),
rendered
)
def test_post_with_anonymous(self):
"""
Test POST with with anonymous.
It should redirect to login page.
"""
request = self.factory.post(self.url, self.data)
request.user = AnonymousUser()
setattr(request, 'session', 'session')
messages = FallbackStorage(request)
setattr(request, '_messages', messages)
response = self.view(request, project_id=self.project.id)
self.assertEqual(response.status_code, 302)
self.assertIn('/admin/account/login/', response['location'])
def test_post_with_user(self):
"""
Test POST with with user.
It should not allow to add new data imports, when user is not an
administrator.
"""
request = self.factory.post(self.url, self.data)
request.user = self.user
setattr(request, 'session', 'session')
messages = FallbackStorage(request)
setattr(request, '_messages', messages)
response = self.view(request, project_id=self.project.id).render()
form = DataImportForm(data=self.data)
rendered = render_to_string(
'di_add_dataimport.html',
{
'GEOKEY_VERSION': version.get_version(),
'PLATFORM_NAME': get_current_site(request).name,
'user': request.user,
'messages': get_messages(request),
'form': form,
'error': 'Not found.',
'error_description': does_not_exist_msg('Project')
}
)
self.assertEqual(response.status_code, 200)
self.assertEqual(
render_helpers.remove_csrf(response.content.decode('utf-8')),
rendered
)
self.assertEqual(DataImport.objects.count(), 0)
self.assertEqual(DataField.objects.count(), 0)
self.assertEqual(DataFeature.objects.count(), 0)
def test_post_with_contributor(self):
"""
Test POST with with contributor.
It should not allow to add new data imports, when user is not an
administrator.
"""
request = self.factory.post(self.url, self.data)
request.user = self.contributor
setattr(request, 'session', 'session')
messages = FallbackStorage(request)
setattr(request, '_messages', messages)
response = self.view(request, project_id=self.project.id).render()
form = DataImportForm(data=self.data)
rendered = render_to_string(
'di_add_dataimport.html',
{
'GEOKEY_VERSION': version.get_version(),
'PLATFORM_NAME': get_current_site(request).name,
'user': request.user,
'messages': get_messages(request),
'form': form,
'error': 'Permission denied.',
'error_description': no_rights_to_access_msg
}
)
self.assertEqual(response.status_code, 200)
self.assertEqual(
render_helpers.remove_csrf(response.content.decode('utf-8')),
rendered
)
self.assertEqual(DataImport.objects.count(), 0)
self.assertEqual(DataField.objects.count(), 0)
self.assertEqual(DataFeature.objects.count(), 0)
def test_post_with_admin_when_creating_new_category(self):
"""
Test POST with with admin, when creating a new category.
It should add new data import, when user is an administrator. Also, it
should redirect to a page to add a new category.
"""
self.data['category_create'] = 'true'
self.data['category'] = None
request = self.factory.post(self.url, self.data)
request.user = self.admin
setattr(request, 'session', 'session')
messages = FallbackStorage(request)
setattr(request, '_messages', messages)
response = self.view(request, project_id=self.project.id)
self.assertEqual(response.status_code, 302)
self.assertIn(
reverse(
'geokey_dataimports:dataimport_create_category',
kwargs={
'project_id': self.project.id,
'dataimport_id': DataImport.objects.first().id
}
),
response['location']
)
self.assertEqual(DataImport.objects.count(), 1)
self.assertEqual(DataField.objects.count(), 3)
self.assertEqual(DataFeature.objects.count(), 3)
def test_post_with_admin_when_attaching_existing_category(self):
"""
Test POST with with admin, when selecting category.
It should add new data import, when user is an administrator. Also, it
should redirect to a page to assing fields.
"""
self.data['category_create'] = 'false'
self.data['category'] = self.category.id
request = self.factory.post(self.url, self.data)
request.user = self.admin
setattr(request, 'session', 'session')
messages = FallbackStorage(request)
setattr(request, '_messages', messages)
response = self.view(request, project_id=self.project.id)
self.assertEqual(response.status_code, 302)
self.assertIn(
reverse(
'geokey_dataimports:dataimport_assign_fields',
kwargs={
'project_id': self.project.id,
'dataimport_id': DataImport.objects.first().id
}
),
response['location']
)
self.assertEqual(DataImport.objects.count(), 1)
self.assertEqual(DataField.objects.count(), 3)
self.assertEqual(DataFeature.objects.count(), 3)
def test_post_when_wrong_data(self):
"""
Test POST with with admin, when data is wrong.
It should inform user that data is wrong.
"""
self.data['name'] = ''
request = self.factory.post(self.url, self.data)
request.user = self.admin
setattr(request, 'session', 'session')
messages = FallbackStorage(request)
setattr(request, '_messages', messages)
response = self.view(request, project_id=self.project.id).render()
self.assertEqual(response.status_code, 200)
self.assertEqual(DataImport.objects.count(), 0)
self.assertEqual(DataField.objects.count(), 0)
self.assertEqual(DataFeature.objects.count(), 0)
def test_post_when_no_project(self):
"""
Test POST with with admin, when project does not exist.
It should inform user that project does not exist.
"""
self.data['category_create'] = 'true'
self.data['category'] = None
request = self.factory.post(self.url, self.data)
request.user = self.admin
setattr(request, 'session', 'session')
messages = FallbackStorage(request)
setattr(request, '_messages', messages)
response = self.view(
request,
project_id=self.project.id + 123
).render()
form = DataImportForm(data=self.data)
rendered = render_to_string(
'di_add_dataimport.html',
{
'GEOKEY_VERSION': version.get_version(),
'PLATFORM_NAME': get_current_site(request).name,
'user': request.user,
'messages': get_messages(request),
'form': form,
'error': 'Not found.',
'error_description': does_not_exist_msg('Project')
}
)
self.assertEqual(response.status_code, 200)
self.assertEqual(
render_helpers.remove_csrf(response.content.decode('utf-8')),
rendered
)
self.assertEqual(DataImport.objects.count(), 0)
self.assertEqual(DataField.objects.count(), 0)
self.assertEqual(DataFeature.objects.count(), 0)
def test_post_when_no_category(self):
"""
Test POST with with admin, when category does not exist.
It should add new data import, when user is an administrator. Also, it
should redirect to a page to create a new category and inform user that
category was not found.
"""
self.data['category_create'] = 'false'
self.data['category'] = self.category.id + 123
request = self.factory.post(self.url, self.data)
request.user = self.admin
setattr(request, 'session', 'session')
messages = FallbackStorage(request)
setattr(request, '_messages', messages)
response = self.view(request, project_id=self.project.id)
self.assertEqual(response.status_code, 302)
self.assertIn(
reverse(
'geokey_dataimports:dataimport_create_category',
kwargs={
'project_id': self.project.id,
'dataimport_id': DataImport.objects.first().id
}
),
response['location']
)
self.assertEqual(DataImport.objects.count(), 1)
self.assertEqual(DataField.objects.count(), 3)
self.assertEqual(DataFeature.objects.count(), 3)
def test_post_when_project_is_locked(self):
"""
Test POST with with admin, when project is locked.
It should not add new data import, when project is locked.
"""
self.project.islocked = True
self.project.save()
self.data['category_create'] = 'true'
self.data['category'] = None
request = self.factory.post(self.url, self.data)
request.user = self.admin
setattr(request, 'session', 'session')
messages = FallbackStorage(request)
setattr(request, '_messages', messages)
response = self.view(request, project_id=self.project.id).render()
self.assertEqual(response.status_code, 200)
self.assertEqual(DataImport.objects.count(), 0)
self.assertEqual(DataField.objects.count(), 0)
self.assertEqual(DataFeature.objects.count(), 0)
class SingleDataImportPageTest(TestCase):
"""Test single data import page."""
def setUp(self):
"""Set up test."""
self.factory = RequestFactory()
self.request = HttpRequest()
self.view = SingleDataImportPage.as_view()
self.user = UserFactory.create()
self.contributor = UserFactory.create()
self.admin = UserFactory.create()
self.project = ProjectFactory.create(
add_admins=[self.admin],
add_contributors=[self.contributor]
)
self.category = CategoryFactory.create(
project=self.project
)
self.dataimport = DataImportFactory.create(
project=self.project,
category=None
)
self.data = {
'name': 'Test Import',
'description': '',
'category': self.category.id
}
self.url = reverse('geokey_dataimports:single_dataimport', kwargs={
'project_id': self.project.id,
'dataimport_id': self.dataimport.id
})
setattr(self.request, 'session', 'session')
messages = FallbackStorage(self.request)
setattr(self.request, '_messages', messages)
def tearDown(self):
"""Tear down test."""
for dataimport in DataImport.objects.all():
if dataimport.file:
dataimport.file.delete()
def test_get_with_anonymous(self):
"""
Test GET with with anonymous.
It should redirect to login page.
"""
self.request.user = AnonymousUser()
self.request.method = 'GET'
response = self.view(
self.request,
project_id=self.project.id,
dataimport_id=self.dataimport.id
)
self.assertEqual(response.status_code, 302)
self.assertIn('/admin/account/login/', response['location'])
def test_get_with_user(self):
"""
Test GET with with user.
It should not allow to access the page, when user is not an
administrator.
"""
self.request.user = self.user
self.request.method = 'GET'
response = self.view(
self.request,
project_id=self.project.id,
dataimport_id=self.dataimport.id
).render()
form = DataImportForm()
rendered = render_to_string(
'di_single_dataimport.html',
{
'GEOKEY_VERSION': version.get_version(),
'PLATFORM_NAME': get_current_site(self.request).name,
'user': self.request.user,
'messages': get_messages(self.request),
'form': form,
'error': 'Not found.',
'error_description': does_not_exist_msg('Data import')
}
)
self.assertEqual(response.status_code, 200)
self.assertEqual(
render_helpers.remove_csrf(response.content.decode('utf-8')),
rendered
)
def test_get_with_contributor(self):
"""
Test GET with with contributor.
It should not allow to access the page, when user is not an
administrator.
"""
self.request.user = self.contributor
self.request.method = 'GET'
response = self.view(
self.request,
project_id=self.project.id,
dataimport_id=self.dataimport.id
).render()
form = DataImportForm()
rendered = render_to_string(
'di_single_dataimport.html',
{
'GEOKEY_VERSION': version.get_version(),
'PLATFORM_NAME': get_current_site(self.request).name,
'user': self.request.user,
'messages': get_messages(self.request),
'form': form,
'error': 'Not found.',
'error_description': does_not_exist_msg('Data import')
}
)
self.assertEqual(response.status_code, 200)
self.assertEqual(
render_helpers.remove_csrf(response.content.decode('utf-8')),
rendered
)
def test_get_with_admin(self):
"""
Test GET with with admin.
It should render the page with a project and data import.
"""
self.request.user = self.admin
self.request.method = 'GET'
response = self.view(
self.request,
project_id=self.project.id,
dataimport_id=self.dataimport.id
).render()
form = DataImportForm()
rendered = render_to_string(
'di_single_dataimport.html',
{
'GEOKEY_VERSION': version.get_version(),
'PLATFORM_NAME': get_current_site(self.request).name,
'user': self.request.user,
'messages': get_messages(self.request),
'form': form,
'project': self.project,
'dataimport': self.dataimport
}
)
self.assertEqual(response.status_code, 200)
self.assertEqual(
render_helpers.remove_csrf(response.content.decode('utf-8')),
rendered
)
def test_get_when_no_project(self):
"""
Test GET with with admin, when project does not exist.
It should inform user that data import does not exist.
"""
self.request.user = self.admin
self.request.method = 'GET'
response = self.view(
self.request,
project_id=self.project.id + 123,
dataimport_id=self.dataimport.id
).render()
form = DataImportForm()
rendered = render_to_string(
'di_single_dataimport.html',
{
'GEOKEY_VERSION': version.get_version(),
'PLATFORM_NAME': get_current_site(self.request).name,
'user': self.request.user,
'messages': get_messages(self.request),
'form': form,
'error': 'Not found.',
'error_description': does_not_exist_msg('Data import'),
}
)
self.assertEqual(response.status_code, 200)
self.assertEqual(
render_helpers.remove_csrf(response.content.decode('utf-8')),
rendered
)
def test_get_when_no_dataimport(self):
"""
Test GET with with admin, when data import does not exist.
It should inform user that data import does not exist.
"""
self.request.user = self.admin
self.request.method = 'GET'
response = self.view(
self.request,
project_id=self.project.id,
dataimport_id=self.dataimport.id + 123
).render()
form = DataImportForm()
rendered = render_to_string(
'di_single_dataimport.html',
{
'GEOKEY_VERSION': version.get_version(),
'PLATFORM_NAME': get_current_site(self.request).name,
'user': self.request.user,
'messages': get_messages(self.request),
'form': form,
'error': 'Not found.',
'error_description': does_not_exist_msg('Data import'),
}
)
self.assertEqual(response.status_code, 200)
self.assertEqual(
render_helpers.remove_csrf(response.content.decode('utf-8')),
rendered
)
def test_post_with_anonymous(self):
"""
Test POST with with anonymous.
It should redirect to login page.
"""
request = self.factory.post(self.url, self.data)
request.user = AnonymousUser()
setattr(request, 'session', 'session')
messages = FallbackStorage(request)
setattr(request, '_messages', messages)
response = self.view(
request,
project_id=self.project.id,
dataimport_id=self.dataimport.id
)
self.assertEqual(response.status_code, 302)
self.assertIn('/admin/account/login/', response['location'])
reference = DataImport.objects.get(pk=self.dataimport.id)
self.assertEqual(reference.name, self.dataimport.name)
self.assertEqual(reference.description, self.dataimport.description)
def test_post_with_user(self):
"""
Test POST with with user.
It should not allow to edit a data import, when user is not an
administrator.
"""
request = self.factory.post(self.url, self.data)
request.user = self.user
setattr(request, 'session', 'session')
messages = FallbackStorage(request)
setattr(request, '_messages', messages)
response = self.view(
request,
project_id=self.project.id,
dataimport_id=self.dataimport.id
).render()
form = DataImportForm(data=self.data)
rendered = render_to_string(
'di_single_dataimport.html',
{
'GEOKEY_VERSION': version.get_version(),
'PLATFORM_NAME': get_current_site(request).name,
'user': request.user,
'messages': get_messages(request),
'form': form,
'error': 'Not found.',
'error_description': does_not_exist_msg('Data import')
}
)
self.assertEqual(response.status_code, 200)
self.assertEqual(
render_helpers.remove_csrf(response.content.decode('utf-8')),
rendered
)
reference = DataImport.objects.get(pk=self.dataimport.id)
self.assertEqual(reference.name, self.dataimport.name)
self.assertEqual(reference.description, self.dataimport.description)
def test_post_with_contributor(self):
"""
Test POST with with contributor.
It should not allow to edit a data import, when user is not an
administrator.
"""
request = self.factory.post(self.url, self.data)
request.user = self.contributor
setattr(request, 'session', 'session')
messages = FallbackStorage(request)
setattr(request, '_messages', messages)
response = self.view(
request,
project_id=self.project.id,
dataimport_id=self.dataimport.id
).render()
form = DataImportForm(data=self.data)
rendered = render_to_string(
'di_single_dataimport.html',
{
'GEOKEY_VERSION': version.get_version(),
'PLATFORM_NAME': get_current_site(request).name,
'user': request.user,
'messages': get_messages(request),
'form': form,
'error': 'Not found.',
'error_description': does_not_exist_msg('Data import')
}
)
self.assertEqual(response.status_code, 200)
self.assertEqual(
render_helpers.remove_csrf(response.content.decode('utf-8')),
rendered
)
reference = DataImport.objects.get(pk=self.dataimport.id)
self.assertEqual(reference.name, self.dataimport.name)
self.assertEqual(reference.description, self.dataimport.description)
def test_post_with_admin(self):
"""
Test POST with with admin.
It should edit a data import, when user is an administrator.
"""
self.dataimport.category = self.category
self.dataimport.save()
request = self.factory.post(self.url, self.data)
request.user = self.admin
setattr(request, 'session', 'session')
messages = FallbackStorage(request)
setattr(request, '_messages', messages)
response = self.view(
request,
project_id=self.project.id,
dataimport_id=self.dataimport.id
).render()
form = DataImportForm(data=self.data)
rendered = render_to_string(
'di_single_dataimport.html',
{
'GEOKEY_VERSION': version.get_version(),
'PLATFORM_NAME': get_current_site(self.request).name,
'user': request.user,
'messages': get_messages(request),
'form': form,
'project': self.project,
'dataimport': self.dataimport
}
)
self.assertEqual(response.status_code, 200)
self.assertEqual(
render_helpers.remove_csrf(response.content.decode('utf-8')),
rendered
)
reference = DataImport.objects.get(pk=self.dataimport.id)
self.assertEqual(reference.name, self.data.get('name'))
self.assertEqual(reference.description, self.data.get('description'))
def test_post_with_admin_when_selecting_category(self):
"""
Test POST with with admin, when selecting category.
It should edit a data import and select a category, when user is an
administrator. Also, it should redirect to a page to assign fields.
"""
self.data['category'] = self.category.id
request = self.factory.post(self.url, self.data)
request.user = self.admin
setattr(request, 'session', 'session')
messages = FallbackStorage(request)
setattr(request, '_messages', messages)
response = self.view(
request,
project_id=self.project.id,
dataimport_id=self.dataimport.id
)
self.assertEqual(response.status_code, 302)
self.assertIn(
reverse(
'geokey_dataimports:dataimport_assign_fields',
kwargs={
'project_id': self.project.id,
'dataimport_id': self.dataimport.id
}
),
response['location']
)
reference = DataImport.objects.get(pk=self.dataimport.id)
self.assertEqual(reference.name, self.data.get('name'))
self.assertEqual(reference.description, self.data.get('description'))
def test_post_when_wrong_data(self):
"""
Test POST with with admin, when data is wrong.
It should inform user that data is wrong.
"""
self.data['name'] = ''
request = self.factory.post(self.url, self.data)
request.user = self.admin
setattr(request, 'session', 'session')
messages = FallbackStorage(request)
setattr(request, '_messages', messages)
response = self.view(
request,
project_id=self.project.id,
dataimport_id=self.dataimport.id
).render()
form = DataImportForm(data=self.data)
rendered = render_to_string(
'di_single_dataimport.html',
{
'GEOKEY_VERSION': version.get_version(),
'PLATFORM_NAME': get_current_site(request).name,
'user': request.user,
'messages': get_messages(request),
'form': form,
'project': self.project,
'dataimport': self.dataimport
}
)
self.assertEqual(response.status_code, 200)
self.assertEqual(
render_helpers.remove_csrf(response.content.decode('utf-8')),
rendered
)
reference = DataImport.objects.get(pk=self.dataimport.id)
self.assertEqual(reference.name, self.dataimport.name)
self.assertEqual(reference.description, self.dataimport.description)
def test_post_when_no_project(self):
"""
Test POST with with admin, when project does not exist.
It should inform user that data import does not exist.
"""
request = self.factory.post(self.url, self.data)
request.user = self.admin
setattr(request, 'session', 'session')
messages = FallbackStorage(request)
setattr(request, '_messages', messages)
response = self.view(
request,
project_id=self.project.id + 123,
dataimport_id=self.dataimport.id
).render()
form = DataImportForm(data=self.data)
rendered = render_to_string(
'di_single_dataimport.html',
{
'GEOKEY_VERSION': version.get_version(),
'PLATFORM_NAME': get_current_site(request).name,
'user': request.user,
'messages': get_messages(request),
'form': form,
'error': 'Not found.',
'error_description': does_not_exist_msg('Data import')
}
)
self.assertEqual(response.status_code, 200)
self.assertEqual(
render_helpers.remove_csrf(response.content.decode('utf-8')),
rendered
)
reference = DataImport.objects.get(pk=self.dataimport.id)
self.assertEqual(reference.name, self.dataimport.name)
self.assertEqual(reference.description, self.dataimport.description)
def test_post_when_no_dataimport(self):
"""
Test POST with with admin, when data import does not exist.
It should inform user that data import does not exist.
"""
request = self.factory.post(self.url, self.data)
request.user = self.admin
setattr(request, 'session', 'session')
messages = FallbackStorage(request)
setattr(request, '_messages', messages)
response = self.view(
request,
project_id=self.project.id,
dataimport_id=self.dataimport.id + 123
).render()
form = DataImportForm(data=self.data)
rendered = render_to_string(
'di_single_dataimport.html',
{
'GEOKEY_VERSION': version.get_version(),
'PLATFORM_NAME': get_current_site(request).name,
'user': request.user,
'messages': get_messages(request),
'form': form,
'error': 'Not found.',
'error_description': does_not_exist_msg('Data import')
}
)
self.assertEqual(response.status_code, 200)
self.assertEqual(
render_helpers.remove_csrf(response.content.decode('utf-8')),
rendered
)
reference = DataImport.objects.get(pk=self.dataimport.id)
self.assertEqual(reference.name, self.dataimport.name)
self.assertEqual(reference.description, self.dataimport.description)
def test_post_when_no_category(self):
"""
Test POST with with admin, when category does not exist.
It should inform user that category does not exist. Also, it should
redirect to a page to create a new category.
"""
self.data['category'] = self.category.id + 123
request = self.factory.post(self.url, self.data)
request.user = self.admin
setattr(request, 'session', 'session')
messages = FallbackStorage(request)
setattr(request, '_messages', messages)
response = self.view(
request,
project_id=self.project.id,
dataimport_id=self.dataimport.id
)
self.assertEqual(response.status_code, 302)
self.assertIn(
reverse(
'geokey_dataimports:dataimport_create_category',
kwargs={
'project_id': self.project.id,
'dataimport_id': self.dataimport.id
}
),
response['location']
)
reference = DataImport.objects.get(pk=self.dataimport.id)
self.assertEqual(reference.name, self.data.get('name'))
self.assertEqual(reference.description, self.data.get('description'))
def test_post_when_project_is_locked(self):
"""
Test POST with with admin, when project is locked.
It should not edit a data import, when project is locked.
"""
self.project.islocked = True
self.project.save()
request = self.factory.post(self.url, self.data)
request.user = self.admin
setattr(request, 'session', 'session')
messages = FallbackStorage(request)
setattr(request, '_messages', messages)
response = self.view(
request,
project_id=self.project.id,
dataimport_id=self.dataimport.id
).render()
form = DataImportForm(data=self.data)
rendered = render_to_string(
'di_single_dataimport.html',
{
'GEOKEY_VERSION': version.get_version(),
'PLATFORM_NAME': get_current_site(self.request).name,
'user': request.user,
'messages': get_messages(request),
'form': form,
'project': self.project,
'dataimport': self.dataimport
}
)
self.assertEqual(response.status_code, 200)
self.assertEqual(
render_helpers.remove_csrf(response.content.decode('utf-8')),
rendered
)
reference = DataImport.objects.get(pk=self.dataimport.id)
self.assertEqual(reference.name, self.dataimport.name)
self.assertEqual(reference.description, self.dataimport.description)
class DataImportCreateCategoryPageTest(TestCase):
"""Test data import create category page."""
def setUp(self):
"""Set up test."""
self.factory = RequestFactory()
self.request = HttpRequest()
self.view = DataImportCreateCategoryPage.as_view()
self.user = UserFactory.create()
self.contributor = UserFactory.create()
self.admin = UserFactory.create()
self.project = ProjectFactory.create(
add_admins=[self.admin],
add_contributors=[self.contributor]
)
self.category = CategoryFactory.create(
project=self.project
)
self.dataimport = DataImportFactory.create(
project=self.project,
category=None
)
self.data = {}
self.url = reverse(
'geokey_dataimports:dataimport_create_category',
kwargs={
'project_id': self.project.id,
'dataimport_id': self.dataimport.id
}
)
setattr(self.request, 'session', 'session')
messages = FallbackStorage(self.request)
setattr(self.request, '_messages', messages)
def tearDown(self):
"""Tear down test."""
for dataimport in DataImport.objects.all():
if dataimport.file:
dataimport.file.delete()
def test_get_with_anonymous(self):
"""
Test GET with with anonymous.
It should redirect to login page.
"""
self.request.user = AnonymousUser()
self.request.method = 'GET'
response = self.view(
self.request,
project_id=self.project.id,
dataimport_id=self.dataimport.id
)
self.assertEqual(response.status_code, 302)
self.assertIn('/admin/account/login/', response['location'])
def test_get_with_user(self):
"""
Test GET with with user.
It should not allow to access the page, when user is not an
administrator.
"""
self.request.user = self.user
self.request.method = 'GET'
response = self.view(
self.request,
project_id=self.project.id,
dataimport_id=self.dataimport.id
).render()
form = CategoryForm()
rendered = render_to_string(
'di_create_category.html',
{
'GEOKEY_VERSION': version.get_version(),
'PLATFORM_NAME': get_current_site(self.request).name,
'user': self.request.user,
'messages': get_messages(self.request),
'form': form,
'error': 'Not found.',
'error_description': does_not_exist_msg('Data import')
}
)
self.assertEqual(response.status_code, 200)
self.assertEqual(
render_helpers.remove_csrf(response.content.decode('utf-8')),
rendered
)
def test_get_with_contributor(self):
"""
Test GET with with contributor.
It should not allow to access the page, when user is not an
administrator.
"""
self.request.user = self.contributor
self.request.method = 'GET'
response = self.view(
self.request,
project_id=self.project.id,
dataimport_id=self.dataimport.id
).render()
form = CategoryForm()
rendered = render_to_string(
'di_create_category.html',
{
'GEOKEY_VERSION': version.get_version(),
'PLATFORM_NAME': get_current_site(self.request).name,
'user': self.request.user,
'messages': get_messages(self.request),
'form': form,
'error': 'Not found.',
'error_description': does_not_exist_msg('Data import')
}
)
self.assertEqual(response.status_code, 200)
self.assertEqual(
render_helpers.remove_csrf(response.content.decode('utf-8')),
rendered
)
def test_get_with_admin(self):
"""
Test GET with with admin.
It should render the page with a project and data import.
"""
self.request.user = self.admin
self.request.method = 'GET'
response = self.view(
self.request,
project_id=self.project.id,
dataimport_id=self.dataimport.id
).render()
form = CategoryForm()
rendered = render_to_string(
'di_create_category.html',
{
'GEOKEY_VERSION': version.get_version(),
'PLATFORM_NAME': get_current_site(self.request).name,
'user': self.request.user,
'messages': get_messages(self.request),
'form': form,
'project': self.project,
'dataimport': self.dataimport
}
)
self.assertEqual(response.status_code, 200)
self.assertEqual(
render_helpers.remove_csrf(response.content.decode('utf-8')),
rendered
)
def test_get_when_no_project(self):
"""
Test GET with with admin, when project does not exist.
It should inform user that data import does not exist.
"""
self.request.user = self.admin
self.request.method = 'GET'
response = self.view(
self.request,
project_id=self.project.id + 123,
dataimport_id=self.dataimport.id
).render()
form = CategoryForm()
rendered = render_to_string(
'di_create_category.html',
{
'GEOKEY_VERSION': version.get_version(),
'PLATFORM_NAME': get_current_site(self.request).name,
'user': self.request.user,
'messages': get_messages(self.request),
'form': form,
'error': 'Not found.',
'error_description': does_not_exist_msg('Data import'),
}
)
self.assertEqual(response.status_code, 200)
self.assertEqual(
render_helpers.remove_csrf(response.content.decode('utf-8')),
rendered
)
def test_get_when_no_dataimport(self):
"""
Test GET with with admin, when data import does not exist.
It should inform user that data import does not exist.
"""
self.request.user = self.admin
self.request.method = 'GET'
response = self.view(
self.request,
project_id=self.project.id,
dataimport_id=self.dataimport.id + 123
).render()
form = CategoryForm()
rendered = render_to_string(
'di_create_category.html',
{
'GEOKEY_VERSION': version.get_version(),
'PLATFORM_NAME': get_current_site(self.request).name,
'user': self.request.user,
'messages': get_messages(self.request),
'form': form,
'error': 'Not found.',
'error_description': does_not_exist_msg('Data import'),
}
)
self.assertEqual(response.status_code, 200)
self.assertEqual(
render_helpers.remove_csrf(response.content.decode('utf-8')),
rendered
)
def test_post_with_anonymous(self):
"""
Test POST with with anonymous.
It should redirect to login page.
"""
request = self.factory.post(self.url, self.data)
request.user = AnonymousUser()
setattr(request, 'session', 'session')
messages = FallbackStorage(request)
setattr(request, '_messages', messages)
response = self.view(
request,
project_id=self.project.id,
dataimport_id=self.dataimport.id
)
self.assertEqual(response.status_code, 302)
self.assertIn('/admin/account/login/', response['location'])
reference = DataImport.objects.get(pk=self.dataimport.id)
self.assertIsNone(reference.category)
self.assertIsNone(reference.keys)
def test_post_with_user(self):
"""
Test POST with with user.
It should not allow to create category, when user is not an
administrator.
"""
request = self.factory.post(self.url, self.data)
request.user = self.user
setattr(request, 'session', 'session')
messages = FallbackStorage(request)
setattr(request, '_messages', messages)
response = self.view(
request,
project_id=self.project.id,
dataimport_id=self.dataimport.id
).render()
form = CategoryForm(data=self.data)
rendered = render_to_string(
'di_create_category.html',
{
'GEOKEY_VERSION': version.get_version(),
'PLATFORM_NAME': get_current_site(request).name,
'user': request.user,
'messages': get_messages(request),
'form': form,
'error': 'Not found.',
'error_description': does_not_exist_msg('Data import')
}
)
self.assertEqual(response.status_code, 200)
self.assertEqual(
render_helpers.remove_csrf(response.content.decode('utf-8')),
rendered
)
reference = DataImport.objects.get(pk=self.dataimport.id)
self.assertIsNone(reference.category)
self.assertIsNone(reference.keys)
def test_post_with_contributor(self):
"""
Test POST with with contributor.
It should not allow to create category, when user is not an
administrator.
"""
request = self.factory.post(self.url, self.data)
request.user = self.contributor
setattr(request, 'session', 'session')
messages = FallbackStorage(request)
setattr(request, '_messages', messages)
response = self.view(
request,
project_id=self.project.id,
dataimport_id=self.dataimport.id
).render()
form = CategoryForm(data=self.data)
rendered = render_to_string(
'di_create_category.html',
{
'GEOKEY_VERSION': version.get_version(),
'PLATFORM_NAME': get_current_site(request).name,
'user': request.user,
'messages': get_messages(request),
'form': form,
'error': 'Not found.',
'error_description': does_not_exist_msg('Data import')
}
)
self.assertEqual(response.status_code, 200)
self.assertEqual(
render_helpers.remove_csrf(response.content.decode('utf-8')),
rendered
)
reference = DataImport.objects.get(pk=self.dataimport.id)
self.assertIsNone(reference.category)
self.assertIsNone(reference.keys)
def test_post_when_no_project(self):
"""
Test POST with with admin, when project does not exist.
It should inform user that data import does not exist.
"""
request = self.factory.post(self.url, self.data)
request.user = self.admin
setattr(request, 'session', 'session')
messages = FallbackStorage(request)
setattr(request, '_messages', messages)
response = self.view(
request,
project_id=self.project.id + 123,
dataimport_id=self.dataimport.id
).render()
form = CategoryForm(data=self.data)
rendered = render_to_string(
'di_create_category.html',
{
'GEOKEY_VERSION': version.get_version(),
'PLATFORM_NAME': get_current_site(request).name,
'user': request.user,
'messages': get_messages(request),
'form': form,
'error': 'Not found.',
'error_description': does_not_exist_msg('Data import')
}
)
self.assertEqual(response.status_code, 200)
self.assertEqual(
render_helpers.remove_csrf(response.content.decode('utf-8')),
rendered
)
reference = DataImport.objects.get(pk=self.dataimport.id)
self.assertIsNone(reference.category)
self.assertIsNone(reference.keys)
def test_post_when_no_dataimport(self):
"""
Test POST with with admin, when data import does not exist.
It should inform user that data import does not exist.
"""
request = self.factory.post(self.url, self.data)
request.user = self.admin
setattr(request, 'session', 'session')
messages = FallbackStorage(request)
setattr(request, '_messages', messages)
response = self.view(
request,
project_id=self.project.id,
dataimport_id=self.dataimport.id + 123
).render()
form = CategoryForm(data=self.data)
rendered = render_to_string(
'di_create_category.html',
{
'GEOKEY_VERSION': version.get_version(),
'PLATFORM_NAME': get_current_site(request).name,
'user': request.user,
'messages': get_messages(request),
'form': form,
'error': 'Not found.',
'error_description': does_not_exist_msg('Data import')
}
)
self.assertEqual(response.status_code, 200)
self.assertEqual(
render_helpers.remove_csrf(response.content.decode('utf-8')),
rendered
)
reference = DataImport.objects.get(pk=self.dataimport.id)
self.assertIsNone(reference.category)
self.assertIsNone(reference.keys)
def test_post_when_project_is_locked(self):
"""
Test POST with with admin, when project is locked.
It should not create category, when project is locked.
"""
self.project.islocked = True
self.project.save()
request = self.factory.post(self.url, self.data)
request.user = self.admin
setattr(request, 'session', 'session')
messages = FallbackStorage(request)
setattr(request, '_messages', messages)
response = self.view(
request,
project_id=self.project.id,
dataimport_id=self.dataimport.id
).render()
form = CategoryForm(data=self.data)
rendered = render_to_string(
'di_create_category.html',
{
'GEOKEY_VERSION': version.get_version(),
'PLATFORM_NAME': get_current_site(self.request).name,
'user': request.user,
'messages': get_messages(request),
'form': form,
'project': self.project,
'dataimport': self.dataimport
}
)
self.assertEqual(response.status_code, 200)
self.assertEqual(
render_helpers.remove_csrf(response.content.decode('utf-8')),
rendered
)
reference = DataImport.objects.get(pk=self.dataimport.id)
self.assertIsNone(reference.category)
self.assertIsNone(reference.keys)
class DataImportAssignFieldsPageTest(TestCase):
"""Test data import assign fields page."""
def setUp(self):
"""Set up test."""
self.factory = RequestFactory()
self.request = HttpRequest()
self.view = DataImportAssignFieldsPage.as_view()
self.user = UserFactory.create()
self.contributor = UserFactory.create()
self.admin = UserFactory.create()
self.project = ProjectFactory.create(
add_admins=[self.admin],
add_contributors=[self.contributor]
)
self.category = CategoryFactory.create(
project=self.project
)
self.dataimport = DataImportFactory.create(
project=self.project
)
self.data = {}
self.url = reverse(
'geokey_dataimports:dataimport_assign_fields',
kwargs={
'project_id': self.project.id,
'dataimport_id': self.dataimport.id
}
)
setattr(self.request, 'session', 'session')
messages = FallbackStorage(self.request)
setattr(self.request, '_messages', messages)
def tearDown(self):
"""Tear down test."""
for dataimport in DataImport.objects.all():
if dataimport.file:
dataimport.file.delete()
def test_get_with_anonymous(self):
"""
Test GET with with anonymous.
It should redirect to login page.
"""
self.request.user = AnonymousUser()
self.request.method = 'GET'
response = self.view(
self.request,
project_id=self.project.id,
dataimport_id=self.dataimport.id
)
self.assertEqual(response.status_code, 302)
self.assertIn('/admin/account/login/', response['location'])
def test_get_with_user(self):
"""
Test GET with with user.
It should not allow to access the page, when user is not an
administrator.
"""
self.request.user = self.user
self.request.method = 'GET'
response = self.view(
self.request,
project_id=self.project.id,
dataimport_id=self.dataimport.id
).render()
rendered = render_to_string(
'di_assign_fields.html',
{
'GEOKEY_VERSION': version.get_version(),
'PLATFORM_NAME': get_current_site(self.request).name,
'user': self.request.user,
'messages': get_messages(self.request),
'error': 'Not found.',
'error_description': does_not_exist_msg('Data import')
}
)
self.assertEqual(response.status_code, 200)
self.assertEqual(
render_helpers.remove_csrf(response.content.decode('utf-8')),
rendered
)
def test_get_with_contributor(self):
"""
Test GET with with contributor.
It should not allow to access the page, when user is not an
administrator.
"""
self.request.user = self.contributor
self.request.method = 'GET'
response = self.view(
self.request,
project_id=self.project.id,
dataimport_id=self.dataimport.id
).render()
rendered = render_to_string(
'di_assign_fields.html',
{
'GEOKEY_VERSION': version.get_version(),
'PLATFORM_NAME': get_current_site(self.request).name,
'user': self.request.user,
'messages': get_messages(self.request),
'error': 'Not found.',
'error_description': does_not_exist_msg('Data import')
}
)
self.assertEqual(response.status_code, 200)
self.assertEqual(
render_helpers.remove_csrf(response.content.decode('utf-8')),
rendered
)
def test_get_with_admin(self):
"""
Test GET with with admin.
It should render the page with a project and data import.
"""
self.request.user = self.admin
self.request.method = 'GET'
response = self.view(
self.request,
project_id=self.project.id,
dataimport_id=self.dataimport.id
).render()
rendered = render_to_string(
'di_assign_fields.html',
{
'GEOKEY_VERSION': version.get_version(),
'PLATFORM_NAME': get_current_site(self.request).name,
'user': self.request.user,
'messages': get_messages(self.request),
'project': self.project,
'dataimport': self.dataimport
}
)
self.assertEqual(response.status_code, 200)
self.assertEqual(
render_helpers.remove_csrf(response.content.decode('utf-8')),
rendered
)
def test_get_when_no_project(self):
"""
Test GET with with admin, when project does not exist.
It should inform user that data import does not exist.
"""
self.request.user = self.admin
self.request.method = 'GET'
response = self.view(
self.request,
project_id=self.project.id + 123,
dataimport_id=self.dataimport.id
).render()
rendered = render_to_string(
'di_assign_fields.html',
{
'GEOKEY_VERSION': version.get_version(),
'PLATFORM_NAME': get_current_site(self.request).name,
'user': self.request.user,
'messages': get_messages(self.request),
'error': 'Not found.',
'error_description': does_not_exist_msg('Data import'),
}
)
self.assertEqual(response.status_code, 200)
self.assertEqual(
render_helpers.remove_csrf(response.content.decode('utf-8')),
rendered
)
def test_get_when_no_dataimport(self):
"""
Test GET with with admin, when data import does not exist.
It should inform user that data import does not exist.
"""
self.request.user = self.admin
self.request.method = 'GET'
response = self.view(
self.request,
project_id=self.project.id,
dataimport_id=self.dataimport.id + 123
).render()
rendered = render_to_string(
'di_assign_fields.html',
{
'GEOKEY_VERSION': version.get_version(),
'PLATFORM_NAME': get_current_site(self.request).name,
'user': self.request.user,
'messages': get_messages(self.request),
'error': 'Not found.',
'error_description': does_not_exist_msg('Data import'),
}
)
self.assertEqual(response.status_code, 200)
self.assertEqual(
render_helpers.remove_csrf(response.content.decode('utf-8')),
rendered
)
def test_post_with_anonymous(self):
"""
Test POST with with anonymous.
It should redirect to login page.
"""
request = self.factory.post(self.url, self.data)
request.user = AnonymousUser()
setattr(request, 'session', 'session')
messages = FallbackStorage(request)
setattr(request, '_messages', messages)
response = self.view(
request,
project_id=self.project.id,
dataimport_id=self.dataimport.id
)
self.assertEqual(response.status_code, 302)
self.assertIn('/admin/account/login/', response['location'])
reference = DataImport.objects.get(pk=self.dataimport.id)
self.assertIsNone(reference.keys)
def test_post_with_user(self):
"""
Test POST with with user.
It should not allow to assign fields, when user is not an
administrator.
"""
request = self.factory.post(self.url, self.data)
request.user = self.user
setattr(request, 'session', 'session')
messages = FallbackStorage(request)
setattr(request, '_messages', messages)
response = self.view(
request,
project_id=self.project.id,
dataimport_id=self.dataimport.id
).render()
rendered = render_to_string(
'di_assign_fields.html',
{
'GEOKEY_VERSION': version.get_version(),
'PLATFORM_NAME': get_current_site(request).name,
'user': request.user,
'messages': get_messages(request),
'error': 'Not found.',
'error_description': does_not_exist_msg('Data import')
}
)
self.assertEqual(response.status_code, 200)
self.assertEqual(
render_helpers.remove_csrf(response.content.decode('utf-8')),
rendered
)
reference = DataImport.objects.get(pk=self.dataimport.id)
self.assertIsNone(reference.keys)
def test_post_with_contributor(self):
"""
Test POST with with contributor.
It should not allow to assign fields, when user is not an
administrator.
"""
request = self.factory.post(self.url, self.data)
request.user = self.contributor
setattr(request, 'session', 'session')
messages = FallbackStorage(request)
setattr(request, '_messages', messages)
response = self.view(
request,
project_id=self.project.id,
dataimport_id=self.dataimport.id
).render()
rendered = render_to_string(
'di_assign_fields.html',
{
'GEOKEY_VERSION': version.get_version(),
'PLATFORM_NAME': get_current_site(request).name,
'user': request.user,
'messages': get_messages(request),
'error': 'Not found.',
'error_description': does_not_exist_msg('Data import')
}
)
self.assertEqual(response.status_code, 200)
self.assertEqual(
render_helpers.remove_csrf(response.content.decode('utf-8')),
rendered
)
reference = DataImport.objects.get(pk=self.dataimport.id)
self.assertIsNone(reference.keys)
def test_post_when_no_project(self):
"""
Test POST with with admin, when project does not exist.
It should inform user that data import does not exist.
"""
request = self.factory.post(self.url, self.data)
request.user = self.admin
setattr(request, 'session', 'session')
messages = FallbackStorage(request)
setattr(request, '_messages', messages)
response = self.view(
request,
project_id=self.project.id + 123,
dataimport_id=self.dataimport.id
).render()
rendered = render_to_string(
'di_assign_fields.html',
{
'GEOKEY_VERSION': version.get_version(),
'PLATFORM_NAME': get_current_site(request).name,
'user': request.user,
'messages': get_messages(request),
'error': 'Not found.',
'error_description': does_not_exist_msg('Data import')
}
)
self.assertEqual(response.status_code, 200)
self.assertEqual(
render_helpers.remove_csrf(response.content.decode('utf-8')),
rendered
)
reference = DataImport.objects.get(pk=self.dataimport.id)
self.assertIsNone(reference.keys)
def test_post_when_no_dataimport(self):
"""
Test POST with with admin, when data import does not exist.
It should inform user that data import does not exist.
"""
request = self.factory.post(self.url, self.data)
request.user = self.admin
setattr(request, 'session', 'session')
messages = FallbackStorage(request)
setattr(request, '_messages', messages)
response = self.view(
request,
project_id=self.project.id,
dataimport_id=self.dataimport.id + 123
).render()
rendered = render_to_string(
'di_assign_fields.html',
{
'GEOKEY_VERSION': version.get_version(),
'PLATFORM_NAME': get_current_site(request).name,
'user': request.user,
'messages': get_messages(request),
'error': 'Not found.',
'error_description': does_not_exist_msg('Data import')
}
)
self.assertEqual(response.status_code, 200)
self.assertEqual(
render_helpers.remove_csrf(response.content.decode('utf-8')),
rendered
)
reference = DataImport.objects.get(pk=self.dataimport.id)
self.assertIsNone(reference.keys)
def test_post_when_no_category(self):
"""
Test POST with with admin, when category is not associated.
It should not assign fields, when category is not associated.
"""
self.dataimport.category = None
self.dataimport.save()
request = self.factory.post(self.url, self.data)
request.user = self.admin
setattr(request, 'session', 'session')
messages = FallbackStorage(request)
setattr(request, '_messages', messages)
response = self.view(
request,
project_id=self.project.id,
dataimport_id=self.dataimport.id
).render()
rendered = render_to_string(
'di_assign_fields.html',
{
'GEOKEY_VERSION': version.get_version(),
'PLATFORM_NAME': get_current_site(self.request).name,
'user': request.user,
'messages': get_messages(request),
'project': self.project,
'dataimport': self.dataimport
}
)
self.assertEqual(response.status_code, 200)
self.assertEqual(
render_helpers.remove_csrf(response.content.decode('utf-8')),
rendered
)
reference = DataImport.objects.get(pk=self.dataimport.id)
self.assertIsNone(reference.keys)
def test_post_when_project_is_locked(self):
"""
Test POST with with admin, when project is locked.
It should not assign fields, when project is locked.
"""
self.project.islocked = True
self.project.save()
request = self.factory.post(self.url, self.data)
request.user = self.admin
setattr(request, 'session', 'session')
messages = FallbackStorage(request)
setattr(request, '_messages', messages)
response = self.view(
request,
project_id=self.project.id,
dataimport_id=self.dataimport.id
).render()
rendered = render_to_string(
'di_assign_fields.html',
{
'GEOKEY_VERSION': version.get_version(),
'PLATFORM_NAME': get_current_site(self.request).name,
'user': request.user,
'messages': get_messages(request),
'project': self.project,
'dataimport': self.dataimport
}
)
self.assertEqual(response.status_code, 200)
self.assertEqual(
render_helpers.remove_csrf(response.content.decode('utf-8')),
rendered
)
reference = DataImport.objects.get(pk=self.dataimport.id)
self.assertIsNone(reference.keys)
class DataImportAllDataFeaturesPageTest(TestCase):
"""Test data import all data features page."""
def setUp(self):
"""Set up test."""
self.factory = RequestFactory()
self.request = HttpRequest()
self.view = DataImportAllDataFeaturesPage.as_view()
self.user = UserFactory.create()
self.contributor = UserFactory.create()
self.admin = UserFactory.create()
self.project = ProjectFactory.create(
add_admins=[self.admin],
add_contributors=[self.contributor]
)
self.category = CategoryFactory.create(project=self.project)
self.dataimport = DataImportFactory.create(
keys=['Name'],
project=self.project,
category=self.category
)
TextFieldFactory.create(
key='Name',
category=self.category
)
ids = []
self.datafeatures = {
'type': 'FeatureCollection',
'features': []
}
for datafeature in self.dataimport.datafeatures.all():
self.datafeatures['features'].append({
'type': 'Feature',
'id': datafeature.id,
'geometry': json.loads(datafeature.geometry.json)
})
ids.append(datafeature.id)
self.data = {
'ids': json.dumps(ids)
}
self.url = reverse(
'geokey_dataimports:dataimport_all_datafeatures',
kwargs={
'project_id': self.project.id,
'dataimport_id': self.dataimport.id
}
)
setattr(self.request, 'session', 'session')
messages = FallbackStorage(self.request)
setattr(self.request, '_messages', messages)
def tearDown(self):
"""Tear down test."""
for dataimport in DataImport.objects.all():
if dataimport.file:
dataimport.file.delete()
def test_get_with_anonymous(self):
"""
Test GET with with anonymous.
It should redirect to login page.
"""
self.request.user = AnonymousUser()
self.request.method = 'GET'
response = self.view(
self.request,
project_id=self.project.id,
dataimport_id=self.dataimport.id
)
self.assertEqual(response.status_code, 302)
self.assertIn('/admin/account/login/', response['location'])
def test_get_with_user(self):
"""
Test GET with with user.
It should not allow to access the page, when user is not an
administrator.
"""
self.request.user = self.user
self.request.method = 'GET'
response = self.view(
self.request,
project_id=self.project.id,
dataimport_id=self.dataimport.id
).render()
rendered = render_to_string(
'di_all_datafeatures.html',
{
'GEOKEY_VERSION': version.get_version(),
'PLATFORM_NAME': get_current_site(self.request).name,
'user': self.request.user,
'messages': get_messages(self.request),
'error': 'Not found.',
'error_description': does_not_exist_msg('Data import')
}
)
self.assertEqual(response.status_code, 200)
self.assertEqual(
render_helpers.remove_csrf(response.content.decode('utf-8')),
rendered
)
def test_get_with_contributor(self):
"""
Test GET with with contributor.
It should not allow to access the page, when user is not an
administrator.
"""
self.request.user = self.contributor
self.request.method = 'GET'
response = self.view(
self.request,
project_id=self.project.id,
dataimport_id=self.dataimport.id
).render()
rendered = render_to_string(
'di_all_datafeatures.html',
{
'GEOKEY_VERSION': version.get_version(),
'PLATFORM_NAME': get_current_site(self.request).name,
'user': self.request.user,
'messages': get_messages(self.request),
'error': 'Not found.',
'error_description': does_not_exist_msg('Data import')
}
)
self.assertEqual(response.status_code, 200)
self.assertEqual(
render_helpers.remove_csrf(response.content.decode('utf-8')),
rendered
)
def test_get_with_admin(self):
"""
Test GET with with admin.
It should render the page with a project, data import and all data
features.
"""
self.request.user = self.admin
self.request.method = 'GET'
response = self.view(
self.request,
project_id=self.project.id,
dataimport_id=self.dataimport.id
).render()
rendered = render_to_string(
'di_all_datafeatures.html',
{
'GEOKEY_VERSION': version.get_version(),
'PLATFORM_NAME': get_current_site(self.request).name,
'user': self.request.user,
'messages': get_messages(self.request),
'project': self.project,
'dataimport': self.dataimport,
'datafeatures': self.datafeatures
}
)
self.assertEqual(response.status_code, 200)
self.assertEqual(
render_helpers.remove_csrf(response.content.decode('utf-8')),
rendered
)
def test_get_when_no_project(self):
"""
Test GET with with admin, when project does not exist.
It should inform user that data import does not exist.
"""
self.request.user = self.admin
self.request.method = 'GET'
response = self.view(
self.request,
project_id=self.project.id + 123,
dataimport_id=self.dataimport.id
).render()
rendered = render_to_string(
'di_all_datafeatures.html',
{
'GEOKEY_VERSION': version.get_version(),
'PLATFORM_NAME': get_current_site(self.request).name,
'user': self.request.user,
'messages': get_messages(self.request),
'error': 'Not found.',
'error_description': does_not_exist_msg('Data import'),
}
)
self.assertEqual(response.status_code, 200)
self.assertEqual(
render_helpers.remove_csrf(response.content.decode('utf-8')),
rendered
)
def test_get_when_no_dataimport(self):
"""
Test GET with with admin, when data import does not exist.
It should inform user that data import does not exist.
"""
self.request.user = self.admin
self.request.method = 'GET'
response = self.view(
self.request,
project_id=self.project.id,
dataimport_id=self.dataimport.id + 123
).render()
rendered = render_to_string(
'di_all_datafeatures.html',
{
'GEOKEY_VERSION': version.get_version(),
'PLATFORM_NAME': get_current_site(self.request).name,
'user': self.request.user,
'messages': get_messages(self.request),
'error': 'Not found.',
'error_description': does_not_exist_msg('Data import'),
}
)
self.assertEqual(response.status_code, 200)
self.assertEqual(
render_helpers.remove_csrf(response.content.decode('utf-8')),
rendered
)
def test_post_with_anonymous(self):
"""
Test POST with with anonymous.
It should redirect to login page.
"""
request = self.factory.post(self.url, self.data)
request.user = AnonymousUser()
setattr(request, 'session', 'session')
messages = FallbackStorage(request)
setattr(request, '_messages', messages)
response = self.view(
request,
project_id=self.project.id,
dataimport_id=self.dataimport.id
)
self.assertEqual(response.status_code, 302)
self.assertIn('/admin/account/login/', response['location'])
self.assertEqual(DataFeature.objects.filter(imported=True).count(), 0)
self.assertEqual(Observation.objects.count(), 0)
def test_post_with_user(self):
"""
Test POST with with user.
It should not allow to convert data features to contributions, when
user is not an administrator.
"""
request = self.factory.post(self.url, self.data)
request.user = self.user
setattr(request, 'session', 'session')
messages = FallbackStorage(request)
setattr(request, '_messages', messages)
response = self.view(
request,
project_id=self.project.id,
dataimport_id=self.dataimport.id
).render()
rendered = render_to_string(
'di_all_datafeatures.html',
{
'GEOKEY_VERSION': version.get_version(),
'PLATFORM_NAME': get_current_site(request).name,
'user': request.user,
'messages': get_messages(request),
'error': 'Not found.',
'error_description': does_not_exist_msg('Data import')
}
)
self.assertEqual(response.status_code, 200)
self.assertEqual(
render_helpers.remove_csrf(response.content.decode('utf-8')),
rendered
)
self.assertEqual(DataFeature.objects.filter(imported=True).count(), 0)
self.assertEqual(Observation.objects.count(), 0)
def test_post_with_contributor(self):
"""
Test POST with with contributor.
It should not allow to convert data features to contributions, when
user is not an administrator.
"""
request = self.factory.post(self.url, self.data)
request.user = self.contributor
setattr(request, 'session', 'session')
messages = FallbackStorage(request)
setattr(request, '_messages', messages)
response = self.view(
request,
project_id=self.project.id,
dataimport_id=self.dataimport.id
).render()
rendered = render_to_string(
'di_all_datafeatures.html',
{
'GEOKEY_VERSION': version.get_version(),
'PLATFORM_NAME': get_current_site(request).name,
'user': request.user,
'messages': get_messages(request),
'error': 'Not found.',
'error_description': does_not_exist_msg('Data import')
}
)
self.assertEqual(response.status_code, 200)
self.assertEqual(
render_helpers.remove_csrf(response.content.decode('utf-8')),
rendered
)
self.assertEqual(DataFeature.objects.filter(imported=True).count(), 0)
self.assertEqual(Observation.objects.count(), 0)
def test_post_with_admin(self):
"""
Test POST with with admin.
It should convert data features to contributions, when user is an
administrator.
"""
request = self.factory.post(self.url, self.data)
request.user = self.admin
setattr(request, 'session', 'session')
messages = FallbackStorage(request)
setattr(request, '_messages', messages)
response = self.view(
request,
project_id=self.project.id,
dataimport_id=self.dataimport.id
)
self.assertEqual(response.status_code, 302)
self.assertIn(
reverse(
'geokey_dataimports:single_dataimport',
kwargs={
'project_id': self.project.id,
'dataimport_id': self.dataimport.id
}
),
response['location']
)
self.assertEqual(DataFeature.objects.filter(imported=True).count(), 3)
self.assertEqual(Observation.objects.count(), 3)
def test_post_when_no_ids(self):
"""
Test POST with with admin, when no IDs are provided.
It should not allow to convert data features to contributions, when
no IDs are provided in the request.
"""
self.data = {}
request = self.factory.post(self.url, self.data)
request.user = self.admin
setattr(request, 'session', 'session')
messages = FallbackStorage(request)
setattr(request, '_messages', messages)
response = self.view(
request,
project_id=self.project.id,
dataimport_id=self.dataimport.id
)
self.assertEqual(response.status_code, 302)
self.assertIn(
reverse(
'geokey_dataimports:single_dataimport',
kwargs={
'project_id': self.project.id,
'dataimport_id': self.dataimport.id
}
),
response['location']
)
self.assertEqual(DataFeature.objects.filter(imported=True).count(), 0)
self.assertEqual(Observation.objects.count(), 0)
def test_post_when_no_project(self):
"""
Test POST with with admin, when project does not exist.
It should inform user that data import does not exist.
"""
request = self.factory.post(self.url, self.data)
request.user = self.admin
setattr(request, 'session', 'session')
messages = FallbackStorage(request)
setattr(request, '_messages', messages)
response = self.view(
request,
project_id=self.project.id + 123,
dataimport_id=self.dataimport.id
).render()
rendered = render_to_string(
'di_all_datafeatures.html',
{
'GEOKEY_VERSION': version.get_version(),
'PLATFORM_NAME': get_current_site(request).name,
'user': request.user,
'messages': get_messages(request),
'error': 'Not found.',
'error_description': does_not_exist_msg('Data import')
}
)
self.assertEqual(response.status_code, 200)
self.assertEqual(
render_helpers.remove_csrf(response.content.decode('utf-8')),
rendered
)
self.assertEqual(DataFeature.objects.filter(imported=True).count(), 0)
self.assertEqual(Observation.objects.count(), 0)
def test_post_when_no_dataimport(self):
"""
Test POST with with admin, when data import does not exist.
It should inform user that data import does not exist.
"""
request = self.factory.post(self.url, self.data)
request.user = self.admin
setattr(request, 'session', 'session')
messages = FallbackStorage(request)
setattr(request, '_messages', messages)
response = self.view(
request,
project_id=self.project.id,
dataimport_id=self.dataimport.id + 123
).render()
rendered = render_to_string(
'di_all_datafeatures.html',
{
'GEOKEY_VERSION': version.get_version(),
'PLATFORM_NAME': get_current_site(request).name,
'user': request.user,
'messages': get_messages(request),
'error': 'Not found.',
'error_description': does_not_exist_msg('Data import')
}
)
self.assertEqual(response.status_code, 200)
self.assertEqual(
render_helpers.remove_csrf(response.content.decode('utf-8')),
rendered
)
self.assertEqual(DataFeature.objects.filter(imported=True).count(), 0)
self.assertEqual(Observation.objects.count(), 0)
def test_post_when_no_category(self):
"""
Test POST with with admin, when category is not associated.
It should not allow to convert data features to contributions, when
category is not associated.
"""
self.dataimport.category = None
self.dataimport.save()
request = self.factory.post(self.url, self.data)
request.user = self.admin
setattr(request, 'session', 'session')
messages = FallbackStorage(request)
setattr(request, '_messages', messages)
response = self.view(
request,
project_id=self.project.id,
dataimport_id=self.dataimport.id
).render()
rendered = render_to_string(
'di_all_datafeatures.html',
{
'GEOKEY_VERSION': version.get_version(),
'PLATFORM_NAME': get_current_site(self.request).name,
'user': request.user,
'messages': get_messages(request),
'project': self.project,
'dataimport': self.dataimport,
'datafeatures': self.datafeatures
}
)
self.assertEqual(response.status_code, 200)
self.assertEqual(
render_helpers.remove_csrf(response.content.decode('utf-8')),
rendered
)
self.assertEqual(DataFeature.objects.filter(imported=True).count(), 0)
self.assertEqual(Observation.objects.count(), 0)
def test_post_when_no_fields(self):
"""
Test POST with with admin, when fields are not assigned.
It should not allow to convert data features to contributions, when
fields are not assigned.
"""
self.dataimport.keys = None
self.dataimport.save()
request = self.factory.post(self.url, self.data)
request.user = self.admin
setattr(request, 'session', 'session')
messages = FallbackStorage(request)
setattr(request, '_messages', messages)
response = self.view(
request,
project_id=self.project.id,
dataimport_id=self.dataimport.id
).render()
rendered = render_to_string(
'di_all_datafeatures.html',
{
'GEOKEY_VERSION': version.get_version(),
'PLATFORM_NAME': get_current_site(self.request).name,
'user': request.user,
'messages': get_messages(request),
'project': self.project,
'dataimport': self.dataimport,
'datafeatures': self.datafeatures
}
)
self.assertEqual(response.status_code, 200)
self.assertEqual(
render_helpers.remove_csrf(response.content.decode('utf-8')),
rendered
)
self.assertEqual(DataFeature.objects.filter(imported=True).count(), 0)
self.assertEqual(Observation.objects.count(), 0)
def test_post_when_project_is_locked(self):
"""
Test POST with with admin, when project is locked.
It should not assign fields, when project is locked.
"""
self.project.islocked = True
self.project.save()
request = self.factory.post(self.url, self.data)
request.user = self.admin
setattr(request, 'session', 'session')
messages = FallbackStorage(request)
setattr(request, '_messages', messages)
response = self.view(
request,
project_id=self.project.id,
dataimport_id=self.dataimport.id
).render()
rendered = render_to_string(
'di_all_datafeatures.html',
{
'GEOKEY_VERSION': version.get_version(),
'PLATFORM_NAME': get_current_site(self.request).name,
'user': request.user,
'messages': get_messages(request),
'project': self.project,
'dataimport': self.dataimport,
'datafeatures': self.datafeatures
}
)
self.assertEqual(response.status_code, 200)
self.assertEqual(
render_helpers.remove_csrf(response.content.decode('utf-8')),
rendered
)
self.assertEqual(DataFeature.objects.filter(imported=True).count(), 0)
self.assertEqual(Observation.objects.count(), 0)
class RemoveDataImportPageTest(TestCase):
"""Test remove data import page."""
def setUp(self):
"""Set up test."""
self.request = HttpRequest()
self.request.method = 'GET'
self.view = RemoveDataImportPage.as_view()
self.user = UserFactory.create()
self.contributor = UserFactory.create()
self.admin = UserFactory.create()
self.project = ProjectFactory.create(
add_admins=[self.admin],
add_contributors=[self.contributor]
)
self.dataimport = DataImportFactory.create(project=self.project)
self.file = self.dataimport.file.path
setattr(self.request, 'session', 'session')
messages = FallbackStorage(self.request)
setattr(self.request, '_messages', messages)
def tearDown(self):
"""Tear down test."""
for dataimport in DataImport.objects.all():
if dataimport.file:
dataimport.file.delete()
if os.path.isfile(self.file):
os.remove(self.file)
def test_get_with_anonymous(self):
"""
Test GET with with anonymous.
It should redirect to login page.
"""
self.request.user = AnonymousUser()
response = self.view(
self.request,
project_id=self.project.id,
dataimport_id=self.dataimport.id
)
self.assertEqual(response.status_code, 302)
self.assertIn('/admin/account/login/', response['location'])
def test_get_with_user(self):
"""
Test GET with with user.
It should not allow to access the page, when user is not an
administrator.
"""
self.request.user = self.user
response = self.view(
self.request,
project_id=self.project.id,
dataimport_id=self.dataimport.id
).render()
rendered = render_to_string(
'base.html',
{
'GEOKEY_VERSION': version.get_version(),
'PLATFORM_NAME': get_current_site(self.request).name,
'user': self.request.user,
'messages': get_messages(self.request),
'error': 'Not found.',
'error_description': does_not_exist_msg('Data import')
}
)
self.assertEqual(response.status_code, 200)
self.assertEqual(
render_helpers.remove_csrf(response.content.decode('utf-8')),
rendered
)
self.assertEqual(DataImport.objects.count(), 1)
def test_get_with_contributor(self):
"""
Test GET with with contributor.
It should not allow to access the page, when user is not an
administrator.
"""
self.request.user = self.contributor
response = self.view(
self.request,
project_id=self.project.id,
dataimport_id=self.dataimport.id
).render()
rendered = render_to_string(
'base.html',
{
'GEOKEY_VERSION': version.get_version(),
'PLATFORM_NAME': get_current_site(self.request).name,
'user': self.request.user,
'messages': get_messages(self.request),
'error': 'Not found.',
'error_description': does_not_exist_msg('Data import')
}
)
self.assertEqual(response.status_code, 200)
self.assertEqual(
render_helpers.remove_csrf(response.content.decode('utf-8')),
rendered
)
self.assertEqual(DataImport.objects.count(), 1)
def test_get_with_admin(self):
"""
Test GET with with admin.
It should remove import and redirect to all imports of a project.
"""
self.request.user = self.admin
response = self.view(
self.request,
project_id=self.project.id,
dataimport_id=self.dataimport.id
)
self.assertEqual(response.status_code, 302)
self.assertIn(
reverse(
'geokey_dataimports:all_dataimports',
kwargs={'project_id': self.project.id}
),
response['location']
)
self.assertEqual(DataImport.objects.count(), 0)
def test_get_when_no_project(self):
"""
Test GET with with admin, when project does not exist.
It should inform user that data import does not exist.
"""
self.request.user = self.admin
response = self.view(
self.request,
project_id=self.project.id + 123,
dataimport_id=self.dataimport.id
).render()
rendered = render_to_string(
'base.html',
{
'GEOKEY_VERSION': version.get_version(),
'PLATFORM_NAME': get_current_site(self.request).name,
'user': self.request.user,
'messages': get_messages(self.request),
'error': 'Not found.',
'error_description': does_not_exist_msg('Data import')
}
)
self.assertEqual(response.status_code, 200)
self.assertEqual(
render_helpers.remove_csrf(response.content.decode('utf-8')),
rendered
)
self.assertEqual(DataImport.objects.count(), 1)
def test_get_when_no_import(self):
"""
Test GET with with admin, when import does not exist.
It should inform user that data import does not exist.
"""
self.request.user = self.admin
response = self.view(
self.request,
project_id=self.project.id,
dataimport_id=self.dataimport.id + 123
).render()
rendered = render_to_string(
'base.html',
{
'GEOKEY_VERSION': version.get_version(),
'PLATFORM_NAME': get_current_site(self.request).name,
'user': self.request.user,
'messages': get_messages(self.request),
'error': 'Not found.',
'error_description': does_not_exist_msg('Data import')
}
)
self.assertEqual(response.status_code, 200)
self.assertEqual(
render_helpers.remove_csrf(response.content.decode('utf-8')),
rendered
)
self.assertEqual(DataImport.objects.count(), 1)
def test_get_when_project_is_locked(self):
"""
Test GET with with admin, when project is locked.
It should inform user that the project is locked and redirect to the
same data import.
"""
self.project.islocked = True
self.project.save()
self.request.user = self.admin
response = self.view(
self.request,
project_id=self.project.id,
dataimport_id=self.dataimport.id
)
self.assertEqual(response.status_code, 302)
self.assertIn(
reverse(
'geokey_dataimports:single_dataimport',
kwargs={
'project_id': self.project.id,
'dataimport_id': self.dataimport.id
}
),
response['location']
)
self.assertEqual(DataImport.objects.count(), 1)
| mit | -7,359,887,042,110,271,000 | 31.908733 | 79 | 0.557987 | false |
chadmv/cmt | scripts/cmt/rig/swingtwist.py | 1 | 14549 | """Creates a node network to extract swing/twist rotation of a transform to drive
another transforms offsetParentMatrix.
The network calculates the local rotation swing and twist offset around the specified
twist axis relative to the local rest orientation. This allows users to specify how
much swing and twist they want to propagate to another transform. Uses include driving
an upper arm twist joint from the shoulder and driving forearm twist joints from the
wrist.
.. raw:: html
<div style="position: relative; padding-bottom: 56.25%; height: 0; overflow: hidden;">
<iframe src="https://www.youtube.com/embed/12tyQc93Y7A" style="position: absolute; top: 0; left: 0; width: 100%; height: 100%; border:0;" allowfullscreen title="YouTube Video"></iframe>
</div>
Since the network uses quaternions, partial swing and twist values between 0.0 and 1.0
will see a flip when the driver transform rotates past 180 degrees.
The setup can either be made with several standard Maya nodes, or the compiled plug-in
can be used to create a single node. Setting cmt.settings.ENABLE_PLUGINS to False will
use vanilla Maya nodes. Otherwise, the compiled plug-in will be used.
Example Usage
=============
The twist decomposition network can be accessed in the cmt menu::
CMT > Rigging > Connect Twist Joint
Twist child of shoulder::
shoulder
|- twist_joint1
|- twist_joint2
|- elbow
create_swing_twist(shoulder, twist_joint1, twist_weight=-1.0, swing_weight=0.0)
create_swing_twist(shoulder, twist_joint2, twist_weight=-0.5, swing_weight=0.0)
Twist forearm from wrist::
elbow
|- twist_joint1
|- twist_joint2
|- wrist
create_swing_twist(wrist, twist_joint1, twist_weight=0.5, swing_weight=0.0)
create_swing_twist(wrist, twist_joint2, twist_weight=1.0, swing_weight=0.0)
Use no plugins::
import cmt.settings as settings
settings.ENABLE_PLUGINS = False
create_swing_twist(wrist, twist_joint1, twist_weight=0.5, swing_weight=0.0)
create_swing_twist(wrist, twist_joint2, twist_weight=1.0, swing_weight=0.0)
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import logging
import maya.cmds as cmds
import maya.mel as mel
import maya.api.OpenMaya as OpenMaya
from cmt.ui.optionbox import OptionBox
from cmt.settings import DOCUMENTATION_ROOT
import cmt.settings as settings
from cmt.dge import dge
import cmt.shortcuts as shortcuts
import math
logger = logging.getLogger(__name__)
# User defined attribute names used in the network
TWIST_WEIGHT = "twist"
SWING_WEIGHT = "swing"
TWIST_OUTPUT = "twistOutput"
INV_TWIST_OUTPUT = "invertedTwistOutput"
SWING_OUTPUT = "swingOutput"
INV_SWING_OUTPUT = "invertedSwingOutput"
HELP_URL = "{}/rig/swingtwist.html".format(DOCUMENTATION_ROOT)
def create_swing_twist(
driver, driven, twist_weight=1.0, swing_weight=1.0, twist_axis=0
):
"""Create a node network to drive a transforms offsetParentMatrix from the
decomposed swing/twist of another transform.
Setting cmt.settings.ENABLE_PLUGINS to False will use vanilla Maya nodes. Otherwise,
the compiled plug-in will be used.
:param driver: Driver transform
:param driven: Driven transform
:param twist_weight: -1 to 1 twist scalar
:param swing_weight: -1 to 1 swing scalar
:param twist_axis: Local twist axis on driver (0: X, 1: Y, 2: Z)
"""
if settings.ENABLE_PLUGINS:
cmds.loadPlugin("cmt", qt=True)
cmds.swingTwist(
driver, driven, twist=twist_weight, swing=swing_weight, twistAxis=twist_axis
)
return
for attr in [TWIST_OUTPUT, INV_TWIST_OUTPUT, SWING_OUTPUT, INV_SWING_OUTPUT]:
if not cmds.objExists("{}.{}".format(driver, attr)):
cmds.addAttr(driver, ln=attr, at="message")
if not _twist_network_exists(driver):
_create_twist_decomposition_network(driver, twist_axis)
for attr in [TWIST_WEIGHT, SWING_WEIGHT]:
if not cmds.objExists("{}.{}".format(driven, attr)):
cmds.addAttr(
driven,
ln=attr,
keyable=True,
minValue=0,
maxValue=1,
defaultValue=math.fabs(twist_weight),
)
twist, inv_twist, swing, inv_swing = _get_swing_twist_attributes(driver)
twist_slerp = _create_slerp(driven, twist_weight, twist, inv_twist, TWIST_WEIGHT)
swing_slerp = _create_slerp(driven, swing_weight, swing, inv_swing, SWING_WEIGHT)
rotation = cmds.createNode("quatProd", name="{}_rotation".format(driver))
cmds.connectAttr(
"{}.outputQuat".format(twist_slerp), "{}.input1Quat".format(rotation)
)
cmds.connectAttr(
"{}.outputQuat".format(swing_slerp), "{}.input2Quat".format(rotation)
)
rotation_matrix = cmds.createNode(
"composeMatrix", name="{}_rotation_matrix".format(driver)
)
cmds.setAttr("{}.useEulerRotation".format(rotation_matrix), 0)
cmds.connectAttr(
"{}.outputQuat".format(rotation), "{}.inputQuat".format(rotation_matrix)
)
mult = cmds.createNode("multMatrix", name="{}_offset_parent_matrix".format(driven))
cmds.connectAttr(
"{}.outputMatrix".format(rotation_matrix), "{}.matrixIn[0]".format(mult)
)
pinv = OpenMaya.MMatrix(cmds.getAttr("{}.parentInverseMatrix[0]".format(driven)))
m = OpenMaya.MMatrix(cmds.getAttr("{}.worldMatrix[0]".format(driven)))
local_rest_matrix = m * pinv
cmds.setAttr("{}.matrixIn[1]".format(mult), list(local_rest_matrix), type="matrix")
cmds.connectAttr(
"{}.matrixSum".format(mult), "{}.offsetParentMatrix".format(driven)
)
# Zero out local xforms to prevent double xform
for attr in ["{}{}".format(x, y) for x in ["t", "r", "jo"] for y in "xyz"]:
is_locked = cmds.getAttr("{}.{}".format(driven, attr), lock=True)
if is_locked:
cmds.setAttr("{}.{}".format(driven, attr), lock=False)
cmds.setAttr("{}.{}".format(driven, attr), 0.0)
if is_locked:
cmds.setAttr("{}.{}".format(driven, attr), lock=True)
logger.info(
"Created swing twist network to drive {} from {}".format(driven, driver)
)
def _twist_network_exists(driver):
"""Test whether the twist decomposition network already exists on driver.
:param driver: Driver transform
:return: True or False
"""
has_twist_attribute = cmds.objExists("{}.{}".format(driver, TWIST_OUTPUT))
if not has_twist_attribute:
return False
twist_node = cmds.listConnections("{}.{}".format(driver, TWIST_OUTPUT), d=False)
return True if twist_node else False
def _create_twist_decomposition_network(driver, twist_axis):
"""Create the twist decomposition network for driver.
:param driver: Driver transform
:param twist_axis: Local twist axis on driver
"""
# Connect message attributes to the decomposed twist nodes so we can reuse them
# if the network is driving multiple nodes
mult = cmds.createNode("multMatrix", name="{}_local_matrix".format(driver))
parent_inverse = "{}.parentInverseMatrix[0]".format(driver)
world_matrix = "{}.worldMatrix[0]".format(driver)
cmds.connectAttr(world_matrix, "{}.matrixIn[0]".format(mult))
cmds.connectAttr(parent_inverse, "{}.matrixIn[1]".format(mult))
pinv = OpenMaya.MMatrix(cmds.getAttr(parent_inverse))
m = OpenMaya.MMatrix(cmds.getAttr(world_matrix))
inv_local_rest_matrix = (m * pinv).inverse()
cmds.setAttr(
"{}.matrixIn[2]".format(mult), list(inv_local_rest_matrix), type="matrix"
)
rotation = cmds.createNode("decomposeMatrix", name="{}_rotation".format(driver))
cmds.connectAttr("{}.matrixSum".format(mult), "{}.inputMatrix".format(rotation))
twist = cmds.createNode("quatNormalize", name="{}_twist".format(driver))
cmds.connectAttr(
"{}.outputQuat.outputQuatW".format(rotation),
"{}.inputQuat.inputQuatW".format(twist),
)
axis = "XYZ"[twist_axis]
cmds.connectAttr(
"{}.outputQuat.outputQuat{}".format(rotation, axis),
"{}.inputQuat.inputQuat{}".format(twist, axis),
)
# swing = twist.inverse() * rotation
inv_twist = cmds.createNode("quatInvert", name="{}_inverse_twist".format(driver))
cmds.connectAttr("{}.outputQuat".format(twist), "{}.inputQuat".format(inv_twist))
swing = cmds.createNode("quatProd", name="{}_swing".format(driver))
cmds.connectAttr("{}.outputQuat".format(inv_twist), "{}.input1Quat".format(swing))
cmds.connectAttr("{}.outputQuat".format(rotation), "{}.input2Quat".format(swing))
inv_swing = cmds.createNode("quatInvert", name="{}_inverse_swing".format(driver))
cmds.connectAttr("{}.outputQuat".format(swing), "{}.inputQuat".format(inv_swing))
# Connect the nodes to the driver so we can find and reuse them for multiple setups
for node, attr in [
(twist, TWIST_OUTPUT),
(inv_twist, INV_TWIST_OUTPUT),
(swing, SWING_OUTPUT),
(inv_swing, INV_SWING_OUTPUT),
]:
cmds.connectAttr("{}.message".format(node), "{}.{}".format(driver, attr))
def _get_swing_twist_attributes(driver):
"""Get the quaternion output attribute of the twist decomposition network.
:param driver: Driver transform
:param invert: True to get the inverted twist attribute
:param twist_axis: Local twist axis of driver
:return: The quaternion output attribute
"""
nodes = []
for attr in [TWIST_OUTPUT, INV_TWIST_OUTPUT, SWING_OUTPUT, INV_SWING_OUTPUT]:
node = cmds.listConnections("{}.{}".format(driver, attr), d=False)
if not node:
# The network isn't connected so create it
_create_twist_decomposition_network(driver, twist_axis)
return _get_swing_twist_attributes(driver)
nodes.append(node[0])
return ["{}.outputQuat".format(node) for node in nodes]
def _create_slerp(driven, weight, rotation, inv_rotation, attribute):
slerp = cmds.createNode("quatSlerp", name="{}_{}_slerp".format(driven, attribute))
cmds.setAttr("{}.{}".format(driven, attribute), math.fabs(weight))
cmds.connectAttr("{}.{}".format(driven, attribute), "{}.inputT".format(slerp))
cmds.setAttr("{}.input1QuatW".format(slerp), 1)
if weight >= 0.0:
cmds.connectAttr(rotation, "{}.input2Quat".format(slerp))
else:
cmds.connectAttr(inv_rotation, "{}.input2Quat".format(slerp))
return slerp
def create_from_menu(*args, **kwargs):
sel = cmds.ls(sl=True)
if len(sel) != 2:
raise RuntimeError("Select driver transform then driven transform.")
driver, driven = sel
kwargs = Options.get_kwargs()
create_swing_twist(driver, driven, **kwargs)
def display_menu_options(*args, **kwargs):
options = Options("Swing Twist Options", HELP_URL)
options.show()
class Options(OptionBox):
SWING_WEIGHT_WIDGET = "cmt_swing_weight"
TWIST_WEIGHT_WIDGET = "cmt_twist_weight"
TWIST_AXIS_WIDGET = "cmt_twist_axis"
@classmethod
def get_kwargs(cls):
"""Gets the function arguments either from the option box widgets or the saved
option vars. If the widgets exist, their values will be saved to the option
vars.
:return: A dictionary of the arguments to the create_twist_decomposition
function."""
kwargs = {}
if cmds.floatSliderGrp(Options.TWIST_WEIGHT_WIDGET, exists=True):
kwargs["twist_weight"] = cmds.floatSliderGrp(
Options.TWIST_WEIGHT_WIDGET, q=True, value=True
)
cmds.optionVar(fv=(Options.TWIST_WEIGHT_WIDGET, kwargs["twist_weight"]))
else:
kwargs["twist_weight"] = cmds.optionVar(q=Options.TWIST_WEIGHT_WIDGET)
if cmds.floatSliderGrp(Options.SWING_WEIGHT_WIDGET, exists=True):
kwargs["swing_weight"] = cmds.floatSliderGrp(
Options.SWING_WEIGHT_WIDGET, q=True, value=True
)
cmds.optionVar(fv=(Options.SWING_WEIGHT_WIDGET, kwargs["swing_weight"]))
else:
kwargs["twist_weight"] = cmds.optionVar(q=Options.TWIST_WEIGHT_WIDGET)
if cmds.optionMenuGrp(Options.TWIST_AXIS_WIDGET, exists=True):
value = cmds.optionMenuGrp(Options.TWIST_AXIS_WIDGET, q=True, sl=True)
kwargs["twist_axis"] = value - 1
cmds.optionVar(iv=(Options.TWIST_AXIS_WIDGET, kwargs["twist_axis"]))
else:
kwargs["twist_axis"] = cmds.optionVar(q=Options.TWIST_AXIS_WIDGET)
return kwargs
def create_ui(self):
cmds.columnLayout(adj=True)
for widget in [
Options.SWING_WEIGHT_WIDGET,
Options.TWIST_WEIGHT_WIDGET,
Options.TWIST_AXIS_WIDGET,
]:
# Delete the widgets so we don't create multiple controls with the same name
try:
cmds.deleteUI(widget, control=True)
except RuntimeError:
pass
swing_weight = cmds.optionVar(q=Options.SWING_WEIGHT_WIDGET)
cmds.floatSliderGrp(
Options.SWING_WEIGHT_WIDGET,
label="Swing weight",
field=True,
minValue=-1.0,
maxValue=1.0,
fieldMinValue=-1.0,
fieldMaxValue=1.0,
value=swing_weight,
step=0.1,
precision=2,
)
twist_weight = cmds.optionVar(q=Options.TWIST_WEIGHT_WIDGET)
cmds.floatSliderGrp(
Options.TWIST_WEIGHT_WIDGET,
label="Twist weight",
field=True,
minValue=-1.0,
maxValue=1.0,
fieldMinValue=-1.0,
fieldMaxValue=1.0,
value=twist_weight,
step=0.1,
precision=2,
)
twist_axis = cmds.optionVar(q=Options.TWIST_AXIS_WIDGET)
twist_axis = 1 if not twist_axis else twist_axis + 1
cmds.optionMenuGrp(Options.TWIST_AXIS_WIDGET, l="Twist Axis")
cmds.menuItem(label="X")
cmds.menuItem(label="Y")
cmds.menuItem(label="Z")
cmds.optionMenuGrp(Options.TWIST_AXIS_WIDGET, e=True, sl=twist_axis)
def on_apply(self):
create_from_menu()
def on_reset(self):
cmds.floatSliderGrp(Options.SWING_WEIGHT_WIDGET, e=True, value=1)
cmds.floatSliderGrp(Options.TWIST_WEIGHT_WIDGET, e=True, value=1)
cmds.optionMenuGrp(Options.TWIST_AXIS_WIDGET, e=True, sl=1)
def on_save(self):
Options.get_kwargs()
| mit | -4,035,239,516,782,056,000 | 36.594315 | 191 | 0.652279 | false |
TurkuNLP/SRNNMT | old_/big_run.py | 1 | 12851 | from keras.models import Sequential, Graph, Model, model_from_json
from keras.layers import Dense, Dropout, Activation, Merge, Input, merge, Flatten
from keras.layers.recurrent import GRU
from keras.callbacks import Callback,ModelCheckpoint
from keras.layers.embeddings import Embedding
import numpy as np
import sys
import math
import json
import gzip
import conllutil3 as cu
import data_dense
from test import load_model
from dictionary_baseline import build_dictionary
min_len=5
max_len=30
def read_fin_parsebank(fname,max_sent=10000):
# sentences=[]
counter=0
for comm, sent in cu.read_conllu(gzip.open(fname,"rt",encoding="utf-8")):
if min_len<=len(sent)<=max_len:
txt=" ".join(line[cu.FORM] for line in sent)
yield txt
# sentences.append(txt)
counter+=1
if counter==max_sent:
break
# return sentences
def sent_reader(f):
words=[]
for line in f:
line=line.strip()
if line=="</s>": # end of sentence
if words:
yield words
words=[]
cols=line.split("\t")
if len(cols)==1:
continue
words.append(cols[0])
def read_eng_parsebank(fname,max_sent=10000):
counter=0
for sent in sent_reader(gzip.open(fname,"rt",encoding="utf-8")):
if min_len<=len(sent)<=max_len:
txt=" ".join(sent)
yield txt
counter+=1
if counter==max_sent:
break
def fill_batch(minibatch_size,max_sent_len,vs,data_iterator,ngrams):
""" Iterates over the data_iterator and fills the index matrices with fresh data
ms = matrices, vs = vocabularies
"""
# custom fill_batch to return also sentences...
ms=data_dense.Matrices(minibatch_size,max_sent_len,ngrams)
batchsize,max_sentence_len=ms.source_ngrams[ngrams[0]].shape #just pick any one of these really
row=0
src_sents=[]
trg_sents=[]
for (sent_src,sent_target),target in data_iterator:
src_sents.append(sent_src)
trg_sents.append(sent_target)
for N in ngrams:
for j,ngram in enumerate(data_dense.ngram_iterator(sent_src,N,max_sent_len)):
ms.source_ngrams[N][row,j]=vs.get_id(ngram,vs.source_ngrams[N])
for j,ngram in enumerate(data_dense.ngram_iterator(sent_target,N,max_sent_len)):
ms.target_ngrams[N][row,j]=vs.get_id(ngram,vs.target_ngrams[N])
ms.src_len[row]=len(sent_src.strip().split())
ms.trg_len[row]=len(sent_target.strip().split())
ms.targets[row]=target
row+=1
if row==batchsize:
# print(ms.matrix_dict, ms.targets)
yield ms.matrix_dict, ms.targets, src_sents, trg_sents
src_sents=[]
trg_sents=[]
row=0
ms=data_dense.Matrices(minibatch_size,max_sent_len,ngrams)
def iter_wrapper(src_fname,trg_fname,max_sent=10000):
for fin_sent,eng_sent in zip(read_fin_parsebank(src_fname,max_sent=max_sent),read_eng_parsebank(trg_fname,max_sent=max_sent)):
yield (fin_sent,eng_sent),1.0
#def iter_wrapper(src_fname,trg_fname,max_sent=1000):
# count=0
# for fin_sent,eng_sent in zip(open(src_fname),open(trg_fname)):
# fin_sent=fin_sent.strip()
# eng_sent=eng_sent.strip()
# yield (fin_sent,eng_sent),1.0
# count+=1
# if count==max_sent:
# break
def vectorize(voc_name,mname,src_fname,trg_fname,max_pairs):
# create files
file_dict={}
for i in range(min_len,max_len+1):
file_dict["fi_sent_len{N}".format(N=i)]=gzip.open("vdata/fi_sent_len{N}.txt.gz".format(N=i),"wt",encoding="utf-8")
file_dict["fi_vec_len{N}".format(N=i)]=open("vdata/fi_vec_len{N}.npy".format(N=i),"wb")
file_dict["en_sent_len{N}".format(N=i)]=gzip.open("vdata/en_sent_len{N}.txt.gz".format(N=i),"wt",encoding="utf-8")
file_dict["en_vec_len{N}".format(N=i)]=open("vdata/en_vec_len{N}.npy".format(N=i),"wb")
minibatch_size=100
ngrams=(4,) # TODO: read this from somewhere
#Read vocabularies
vs=data_dense.read_vocabularies(voc_name,"xxx","xxx",False,ngrams)
vs.trainable=False
# load model
trained_model=load_model(mname)
output_size=trained_model.get_layer('source_dense').output_shape[1]
max_sent_len=trained_model.get_layer('source_ngrams_{n}'.format(n=ngrams[0])).output_shape[1]
print(output_size,max_sent_len)
# build matrices
ms=data_dense.Matrices(minibatch_size,max_sent_len,ngrams)
# get vectors
# for loop over minibatches
counter=0
for i,(mx,targets,src_data,trg_data) in enumerate(fill_batch(minibatch_size,max_sent_len,vs,iter_wrapper(src_fname,trg_fname,max_sent=max_pairs),ngrams)):
src,trg=trained_model.predict(mx) # shape = (minibatch_size,gru_width)
# loop over items in minibatch
for j,(src_v,trg_v) in enumerate(zip(src,trg)):
norm_src=src_v/np.linalg.norm(src_v)
norm_trg=trg_v/np.linalg.norm(trg_v)
fi_len=len(src_data[j].split())
en_len=len(trg_data[j].split())
norm_src.astype(np.float32).tofile(file_dict["fi_vec_len{N}".format(N=fi_len)])
print(src_data[j],file=file_dict["fi_sent_len{N}".format(N=fi_len)])
norm_trg.astype(np.float32).tofile(file_dict["en_vec_len{N}".format(N=en_len)])
print(trg_data[j],file=file_dict["en_sent_len{N}".format(N=en_len)])
counter+=1
if counter%100000==0:
print("Vectorized {c} sentence pairs".format(c=counter))
# print(type(norm_src[0].astype(np.float32)))
# counter+=1
# if counter==len(src_data):
# break
# if counter==len(src_data):
# break
for key,value in file_dict.items():
value.close()
# return src_vectors,trg_vectors
def rank_keras(src_vectors,trg_vectors,src_sentences,trg_sentences,verbose=True):
ranks=[]
all_similarities=[] # list of sorted lists
src_data=[s.strip() for s in gzip.open(src_sentences,"rt")]
trg_data=[s.strip() for s in gzip.open(trg_sentences,"rt")]
src_vectors=np.fromfile(src_vectors,np.float32).reshape(len(src_data),150)[:100000,:]
trg_vectors=np.fromfile(trg_vectors,np.float32).reshape(len(trg_data),150)[:100000,:]
src_data=src_data[:100000]
trg_data=trg_data[:100000]
print("#",len(src_data),len(trg_data),file=sys.stderr)
to_keep=[]
# dot product
sim_matrix=np.dot(src_vectors,trg_vectors.T)
print("dot product ready",file=sys.stderr)
# argpartition
partition_matrix=np.argpartition(sim_matrix,-3000)#[-N-1:]
print("partition ready",file=sys.stderr)
results=[]
for i,row in enumerate(partition_matrix):
results.append((src_data[i],[(sim_matrix[i,idx],trg_data[idx]) for idx in row[-3000:]]))
# for i in range(5):
# print(results[i][0],results[i][1][:5])
return results
# for i in range(len(src_vectors)):
# sims=trg_vectors.dot(src_vectors[i])
# all_similarities.append(sims)
# N=10
## results=sorted(((sims[idx],idx,trg_data[idx]) for idx in np.argpartition(sims,-N-1)), reverse=True)#[-N-1:]), reverse=True)
# results=sorted(((sims[idx],idx,trg_data[idx]) for idx,s in enumerate(sims)), reverse=True)#[-N-1:]), reverse=True)
# if results[0][0]<0.6:
# continue
# result_idx=[idx for (sim,idx,txt) in results]
# ranks.append(result_idx.index(i)+1)
# to_keep.append((src_data[i],[(s,txt) for s,idx,txt in results[:1000]]))
# if verbose:
# print("source:",i,src_data[i],np.dot(src_vectors[i],trg_vectors[i]))
## print("reference:",trg_data[i])
## print("rank:",result_idx.index(i)+1)
# for s,idx,txt in results[:10]:
# print(idx,s,txt)
# print("****")
# print
# print("Keras:")
# print("Avg:",sum(ranks)/len(ranks))
# print("#num:",len(ranks))
#
## return all_similarities
# return to_keep
def rank_dictionary(keras_results,verbose=True):
f2e_dictionary=build_dictionary("lex.f2e", "uniq.train.tokens.fi.100K")
e2f_dictionary=build_dictionary("lex.e2f", "uniq.train.tokens.en.100K")
ranks=[]
na=0
all_scores=[]
for i, (src_sent,pairs) in enumerate(keras_results):
english_transl=set()
finnish_words=set(src_sent.lower().split())
for w in finnish_words:
if w in f2e_dictionary:
english_transl.update(f2e_dictionary[w])
combined=[]
for j,(s,trg_sent) in enumerate(pairs):
count=0
english_words=set(trg_sent.strip().lower().split())
score=len(english_words&english_transl)/len(english_words)
# scores.append((j,score/len(english_words)))
finnish_transl=set()
for w in english_words:
if w in e2f_dictionary:
finnish_transl.update(e2f_dictionary[w])
score2=len(finnish_words&finnish_transl)/len(finnish_words)
# scores2.append((j,score2/len(finnish_words)))
avg=(s+score+score2)/3
combined.append((avg,trg_sent))
# combined=[(x,(f+e)/2) for (x,f),(y,e) in zip(scores,scores2)]
results=sorted(combined, key=lambda x:x[0], reverse=True)
# if combined[0][0]<0.4:
# continue
all_scores.append((results[0][0],src_sent,results))
# all_scores.append(combined)
# if combined[i][1]==0.0: # TODO
# ranks.append(len(src_data)/2)
# na+=1
# continue
# result_idx=[idx for idx,score in results]
# ranks.append(result_idx.index(i)+1)
if verbose:
print("Source:",i,src_sent)
# print("Reference:",trg_data[i],combined[i][1])
# print("Rank:",result_idx.index(i)+1)
for s,txt in results[:10]:
print(txt,s)
print("*"*20)
print()
for (best_sim,src_sent,translations) in sorted(all_scores, key=lambda x:x[0], reverse=True):
print("source:",src_sent)
for (s,trg_sent) in translations[:20]:
print(trg_sent,s)
print("")
# print("Dictionary baseline:")
# print("Avg:",sum(ranks)/len(ranks))
print("# num:",len(all_scores),file=sys.stderr)
# print("n/a:",na)
# return all_scores
def test(src_fname,trg_fname,mname,voc_name,max_pairs):
# read sentences
src_data=[]
trg_data=[]
for src_line,trg_line in data_dense.iter_data(src_fname,trg_fname,max_pairs=max_pairs):
src_data.append(src_line.strip())
trg_data.append(trg_line.strip())
src_vectors,trg_vectors=vectorize(voc_name,src_data,trg_data,mname)
similarities=rank(src_vectors,trg_vectors,src_data,trg_data)
if __name__=="__main__":
import argparse
parser = argparse.ArgumentParser(description='')
g=parser.add_argument_group("Reguired arguments")
g.add_argument('-m', '--model', type=str, help='Give model name')
g.add_argument('-v', '--vocabulary', type=str, help='Give vocabulary file')
g.add_argument('--max_pairs', type=int, default=1000, help='Give vocabulary file, default={n}'.format(n=1000))
g.add_argument('--fi_len', type=int, help='Finnish matrix len')
g.add_argument('--en_len', type=int, help='English matrix len')
args = parser.parse_args()
if args.model==None or args.vocabulary==None:
parser.print_help()
sys.exit(1)
# vectorize(args.vocabulary,args.model,"pbv4_ud.part-00.gz","encow14ax01.xml.gz",args.max_pairs)
# vectorize(args.vocabulary,args.model,"data/all.test.fi.tokenized","data/all.test.en.tokenized")
# to_keep=rank_keras("finnish_vectors.npy","english_vectors.npy","finnish_sentences.txt.gz","english_sentences.txt.gz",verbose=False)
# results=rank_keras("vdata/fi_vec_len15.npy","vdata/en_vec_len15.npy","vdata/fi_sent_len15.txt.gz","vdata/en_sent_len15.txt.gz",verbose=False)
keras_results=rank_keras("vdata/fi_vec_len{n}.npy".format(n=args.fi_len),"vdata/en_vec_len{n}.npy".format(n=args.en_len),"vdata/fi_sent_len{n}.txt.gz".format(n=args.fi_len),"vdata/en_sent_len{n}.txt.gz".format(n=args.en_len),verbose=False)
rank_dictionary(keras_results,verbose=False)
# test("data/all.test.fi.tokenized","data/all.test.en.tokenized",args.model,args.vocabulary,args.max_pairs)
#for mx,targets in batch_iter: # input is shuffled!!!
# src,trg=model.predict(mx)
# print(targets,np.dot(src[0],trg[0]))
| apache-2.0 | -3,734,053,887,119,361,000 | 35.717143 | 243 | 0.604155 | false |
warrenspe/hconf | hconf/Config.py | 1 | 6230 | """
Copyright (C) 2016 Warren Spencer [email protected]
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
Author: Warren Spencer
Email: [email protected]
"""
# Standard imports
import re
# Project imports
from .Exceptions import *
from .Subparsers._subparser import Subparser
__all__ = [
'ConfigManager',
]
class _Config(object):
"""
Config object which will be populated and returned as the config object holding all the configuration options.
"""
def __getitem__(self, name):
if hasattr(self, name):
return getattr(self, name)
raise KeyError(str(name))
class ConfigManager(object):
"""
Config manager which can have a sequence of subparsers assigned to it in order to delegate configuration parsing.
Expected configuration options are set on the object explicitely.
"""
configNameRE = re.compile("^[a-zA-Z][\w\-_]*$")
def __init__(self, *args):
"""
Initializes a ConfigManager.
Inputs: args - ConfigManagers can be optionally initialized with a sequence of dictionaries representing
configuration options to add to the ConfigManager.
"""
self.configs = dict()
self.parsers = list()
self._config = None
for arg in args:
self.addConfig(**arg)
def registerParser(self, parser):
"""
Registers a parser to parse configuration inputs.
"""
if not isinstance(parser, Subparser):
raise TypeError("%s is not an instance of a subparser." % parser)
self.parsers.append(parser)
def addConfig(self, name, default=None, cast=None, required=False, description=None):
"""
Adds the given configuration option to the ConfigManager.
Inputs: name - The configuration name to accept.
required - A boolean indicating whether or not the configuration option is required or not.
cast - A type (or function accepting 1 argument and returning an object) to cast the input as.
If any error occurs during casting an InvalidConfigurationException will be raised.
default - The default value to assign to this configuration option. Note that None is not a valid
default if required=True.
description - A human readable description of this configuration parameter. Will be displayed when the
program is run with a -h flag.
"""
# Validate the name
if not self.configNameRE.match(name):
raise InvalidConfigurationException("Invalid configuration name: %s" % name)
self.configs[self._sanitizeName(name)] = {
'default': default,
'cast': cast,
'required': required,
'description': description
}
def parse(self):
"""
Executes the registered parsers to parse input configurations.
"""
self._config = _Config()
self._setDefaults()
for parser in self.parsers:
for key, value in parser.parse(self, self._config).items():
key = self._sanitizeName(key)
if key not in self.configs:
raise UnknownConfigurationException(key)
if value is not None:
self._setConfig(key, value)
self._ensureRequired()
self._cast()
return self._config
def _setDefaults(self):
"""
Sets all the expected configuration options on the config object as either the requested default value, or None.
"""
for configName, configDict in self.configs.items():
self._setConfig(configName, configDict['default'])
def _ensureRequired(self):
"""
Ensures that all configuration options marked as being required have been passed (ie are non-None).
Raises a MissingConfigurationException if a required configuration option was not passed.
"""
for configName, configDict in self.configs.items():
if configDict['required']:
if getattr(self._config, configName) is None:
raise MissingConfigurationException(configName)
def _cast(self):
"""
Iterates through our parsed configuration options and cast any options with marked cast types.
"""
for configName, configDict in self.configs.items():
if configDict['cast'] is not None:
configValue = getattr(self._config, configName)
if configValue is not None:
try:
self._setConfig(configName, configDict['cast'](configValue))
except:
raise InvalidConfigurationException("%s: %r" % (configName, configValue))
def _setConfig(self, name, value):
"""
Sets the configuration option on the current configuration object being populated.
Inputs: name - The name of the configuration option to set.
value - The value of the configuration option to set.
"""
setattr(self._config, name, value)
def _sanitizeName(self, name):
"""
Sanitizes a configuration name so that it can be set onto the Config object safely (ex: replacing -'s with _'s).
Inputs: name - The string containing the name to sanitize.
Outputs: A string containing the sanitized string.
"""
return name.replace('-', '_')
| gpl-3.0 | -5,497,231,904,780,135,000 | 34.6 | 120 | 0.619904 | false |
tgcmteam/tgcmlinux | src/tgcm/contrib/freedesktopnet/networkmanager/activeconnection.py | 1 | 2450 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (C) 2009 Martin Vidner
#
#
# Authors:
# Martin Vidner <martin at vidnet.net>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2, or (at your option)
# any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301
# USA
import dbus
from freedesktopnet.dbusclient import DBusClient, object_path
from freedesktopnet.dbusclient.func import *
from applet import Connection
from device import Device
from accesspoint import AccessPoint
from util import Enum
class ActiveConnection(DBusClient):
"""
Signals:
PropertiesChanged ( a{sv}: properties )
Properties:
ServiceName - s - (read)
Connection - o - (read)
SpecificObject - o - (read)
Devices - ao - (read)
State - u - (read) (NM_ACTIVE_CONNECTION_STATE)
Default - b - (read)
Enumerated types:
NM_ACTIVE_CONNECTION_STATE
"""
SERVICE = "org.freedesktop.NetworkManager"
IFACE = "org.freedesktop.NetworkManager.Connection.Active"
def __init__(self, opath):
super(ActiveConnection, self).__init__(dbus.SystemBus(), self.SERVICE, opath, default_interface=self.IFACE)
class State(Enum):
UNKNOWN = 0
ACTIVATING = 1
ACTIVATED = 2
def __getitem__(self, key):
"Implement Connection by adding the required ServiceName"
v = super(ActiveConnection, self).__getitem__(key)
if key == "Connection":
sn="org.freedesktop.NetworkManager"
v = Connection(sn, v)
return v
ActiveConnection._add_adaptors(
PropertiesChanged = SA(identity),
# ServiceName = PA(identity),
# Connection = PA(Connection), # implemented in __getitem__
SpecificObject = PA(AccessPoint), #in most cases. figure out.
Devices = PA(seq_adaptor(Device._create)),
State = PA(ActiveConnection.State),
Default = PA(bool),
)
| gpl-2.0 | 4,414,699,881,224,779,000 | 30.818182 | 115 | 0.682449 | false |
derekgreene/twitter-jsonl-tools | jsonl-tweet-export.py | 1 | 2962 | #!/usr/bin/env python
"""
A very simple script to export tweets from a JSONL file in CSV format.
Sample usage:
python jsonl-tweet-export.py sample/sample-tweets-500.jsonl -o sample/sample-tweets.csv
"""
import sys, fileinput, codecs, re
from datetime import datetime
from optparse import OptionParser
import logging as log
try:
import ujson as json
except:
import json
from prettytable import PrettyTable
# --------------------------------------------------------------
def parse_twitter_date( s, ignore_time_zones = True ):
# hack for cases where timezone is not supported by Python strptime
if ignore_time_zones:
parts = s.split(" ")
smodified =" ".join( parts[0:4] + [ parts[-1] ] )
return datetime.strptime(smodified,'%a %b %d %H:%M:%S %Y')
return datetime.strptime(s,'%a %b %d %H:%M:%S %z %Y')
def fmt_id( x ):
return '"%s"' % x
def norm( s, sep ):
s = s.replace(sep, " ")
return re.sub("\s+", " ", s )
# --------------------------------------------------------------
def main():
parser = OptionParser(usage="usage: %prog [options] json_file1 json_file2 ...")
parser.add_option("-t", "--top", action="store", type="int", dest="top", help="number of top authors to display", default=10)
parser.add_option("-o", action="store", type="string", dest="out_path", help="output path for CSV file", default="tweets.csv")
parser.add_option("-s", action="store", type="string", dest="separator", help="separator character for output file (default is comma)", default=",")
(options, args) = parser.parse_args()
if( len(args) < 1 ):
parser.error( "Must specify at least one JSONL file" )
log.basicConfig(level=20, format='%(message)s')
sep = options.separator
log.info("Tweets will be written to %s ..." % options.out_path )
header = ["Tweet_ID", "Created_At", "Author_Screen_Name", "Author_Id", "Text" ]
fout = codecs.open( options.out_path, "w", encoding="utf-8", errors="ignore" )
fout.write("%s\n" % sep.join(header) )
for tweets_path in args:
log.info("Loading tweets from %s ..." % tweets_path)
# Process every line as JSON data
num_tweets, num_failed, line_number = 0, 0, 0
for l in fileinput.input(tweets_path):
l = l.strip()
if len(l) == 0:
continue
try:
line_number += 1
tweet = json.loads(l)
sdate = parse_twitter_date(tweet["created_at"]).strftime("%Y-%m-%d %H:%M:%S")
values = [ fmt_id(tweet["id"]), sdate, norm(tweet["user"]["screen_name"], sep).lower(), fmt_id(tweet["user"]["id"]), norm(tweet["text"], sep) ]
fout.write("%s\n" % sep.join(values) )
num_tweets += 1
if line_number % 50000 == 0:
log.info("Processed %d lines" % line_number)
except Exception as e:
log.error("Failed to parse tweet on line %d: %s" % ( line_number, e ) )
num_failed += 1
fileinput.close()
log.info("Wrote %d tweets" % num_tweets )
fout.flush()
fout.close()
# --------------------------------------------------------------
if __name__ == "__main__":
main()
| apache-2.0 | 4,448,869,735,812,144,000 | 34.686747 | 149 | 0.605334 | false |
AlexStarov/Shop | applications/delivery2/urls.py | 1 | 1073 | # -*- coding: utf-8 -*-
from django.conf.urls import url
from applications.delivery2 import views
__author__ = 'AlexStarov'
urlpatterns = [
# aaa=re.compile(r'^\?(?P<key>[a-zA-Z0-9]{64,64})$')
# print re.match(aaa, '?naJX2WhVXlGtXlg0NFhbY1hqXu51M8MyUzwxpLm1SBvdcWerdNDRJPmtXmZYR0qe').group('key')
# url(regex=r'^\?(?P<key>[a-zA-Z0-9]{64,64})$', --> это правильное выражение
url(regex=r'^$',
view=views.ClickView.as_view(),
name='click', ),
url(regex=r'^(?P<key>[a-zA-Z0-9]{64})/opened/$',
view=views.OpenView.as_view(),
name='open', ),
url(regex=r'^show_iframe/(?P<pk>[0-9]+)/$',
view=views.IFrameTemplateView.as_view(),
name='show_iframe', ),
url(regex=r'^(?P<pk_in_base62>[a-zA-Z0-9]*)/go/$',
view=views.GoView.as_view(),
name='go'),
url(regex=r'^(?P<key>[a-zA-Z0-9]{64})/show/$',
view=views.ShowView.as_view(),
name='show', ),
# url(r'^(?P<mid>[A-f0-9-]+)/(?P<hash>[A-f0-9]+)/$',
# views.ClickView.as_view(), name='click'),
]
| apache-2.0 | -3,864,737,801,507,028,000 | 32.903226 | 103 | 0.562322 | false |
gkadillak/rockstor-core | src/rockstor/fs/btrfs.py | 1 | 31825 | """
Copyright (c) 2012-2013 RockStor, Inc. <http://rockstor.com>
This file is part of RockStor.
RockStor is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published
by the Free Software Foundation; either version 2 of the License,
or (at your option) any later version.
RockStor is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
"""
system level helper methods to interact with the btrfs filesystem
"""
import re
import time
import os
import shutil
from system.osi import run_command, create_tmp_dir, is_share_mounted, \
is_mounted, get_dev_byid_name, convert_to_kib
from system.exceptions import (CommandException)
from pool_scrub import PoolScrub
from django_ztask.decorators import task
import logging
logger = logging.getLogger(__name__)
MKFS_BTRFS = '/sbin/mkfs.btrfs'
BTRFS = '/sbin/btrfs'
MOUNT = '/bin/mount'
UMOUNT = '/bin/umount'
DEFAULT_MNT_DIR = '/mnt2/'
RMDIR = '/bin/rmdir'
QID = '2015'
def add_pool(pool, disks):
"""
pool is a btrfs filesystem.
"""
disks_fp = ['/dev/disk/by-id/' + d for d in disks]
cmd = [MKFS_BTRFS, '-f', '-d', pool.raid, '-m', pool.raid, '-L',
pool.name, ]
cmd.extend(disks_fp)
out, err, rc = run_command(cmd)
enable_quota(pool)
return out, err, rc
def get_pool_info(disk):
"""
Extracts any pool information by running btrfs fi show <disk> and collates
the results by 'Label', 'uuid', and current boot disk name. The disk name
is then translated to the by-id type found in /dev/disk/by-id so that it's
counterparts in the db's Disk.name field can be found.
N.B. devices without serial may have no by-id counterpart.
Used by CommandView()._refresh_pool_state() and
DiskDetailView()._btrfs_disk_import
:param disk: by-id disk name without path
:return: a dictionary with keys of 'disks', 'label', and 'uuid';
disks keys a list of devices, while label and uuid keys are for strings.
"""
cmd = [BTRFS, 'fi', 'show', '/dev/disk/by-id/%s' % disk]
o, e, rc = run_command(cmd)
pool_info = {'disks': [],}
for l in o:
if (re.match('Label', l) is not None):
fields = l.split()
pool_info['label'] = fields[1].strip("'")
pool_info['uuid'] = fields[3]
elif (re.match('\tdevid', l) is not None):
# We have a line starting wth <tab>devid, extract the dev name.
# Previously this would have been sda and used as is but we need
# it's by-id references as that is the new format for Disks.name.
# Original sda extraction:
# pool_info['disks'].append(l.split()[-1].split('/')[-1])
# Updated '/dev/sda' extraction to save on a split we no longer need
# and use this 'now' name to get our by-id name with path removed.
# This is required as that is how device names are stored in the
# db Disk.name so that we can locate a drive and update it's pool
# field reference.
dev_byid, is_byid = get_dev_byid_name(l.split()[-1], True)
pool_info['disks'].append(dev_byid)
return pool_info
def pool_raid(mnt_pt):
o, e, rc = run_command([BTRFS, 'fi', 'df', mnt_pt])
# data, system, metadata, globalreserve
raid_d = {}
for l in o:
fields = l.split()
if (len(fields) > 1):
block = fields[0][:-1].lower()
raid = fields[1][:-1].lower()
if not block in raid_d and raid is not 'DUP':
raid_d[block] = raid
if (raid_d['metadata'] == 'single'):
raid_d['data'] = raid_d['metadata']
return raid_d
def cur_devices(mnt_pt):
"""
When given a btrfs mount point a list containing the full path of all
devices is generated by wrapping the btrfs fi show <mnt_pt> command and
parsing the devid line.
Used by resize_pool() to ascertain membership status of a device in a pool
:param mnt_pt: btrfs mount point
:return: list containing the current reported devices associated with a
btrfs mount point in by-id (with full path) format.
"""
dev_list_byid = []
o, e, rc = run_command([BTRFS, 'fi', 'show', mnt_pt])
for l in o:
l = l.strip()
if (re.match('devid ', l) is not None):
# The following extracts the devices from the above command output,
# ie /dev/sda type names, but these are transient and we use their
# by-id type counterparts in the db and our logging hence the
# call to convert the 'now' names to by-id type names.
# N.B. As opposed to get_pool_info we want to preserve the path as
# our caller expects this full path format.
dev_byid, is_byid = get_dev_byid_name(l.split()[-1])
dev_list_byid.append(dev_byid)
return dev_list_byid
def resize_pool(pool, dev_list_byid, add=True):
"""
Acts on a given pool and list of device names by generating and then
executing the appropriate:-
"btrfs <device list> add(default)/delete root_mnt_pt(pool)"
command, or returning None if a disk member sanity check fails ie if
all the supplied devices are either not already a member of the pool
(when adding) or are already members of the pool (when deleting).
If any device in the supplied dev_list fails this test then no command is
executed and None is returned.
:param pool: btrfs pool name
:param dev_list_byid: list of devices to add/delete in by-id (without path).
:param add: when true (default) or not specified then attempt to add
dev_list devices to pool, or when specified as True attempt to delete
dev_list devices from pool.
:return: Tuple of results from run_command(generated command) or None if
the device member/pool sanity check fails.
"""
dev_list_byid = ['/dev/disk/by-id/' + d for d in dev_list_byid]
root_mnt_pt = mount_root(pool)
cur_dev = cur_devices(root_mnt_pt)
resize_flag = 'add'
if (not add):
resize_flag = 'delete'
resize_cmd = [BTRFS, 'device', resize_flag, ]
# Until we verify that all devices are or are not already members of the
# given pools depending on if we are adding (default) or removing
# (add=False) those devices we set our resize flag to false.
resize = False
for d in dev_list_byid:
if (((resize_flag == 'add' and (d not in cur_dev)) or
(resize_flag == 'delete' and (d in cur_dev)))):
resize = True # Basic disk member of pool sanity check passed.
resize_cmd.append(d)
if (not resize):
return None
resize_cmd.append(root_mnt_pt)
return run_command(resize_cmd)
#Try mounting by-label first. If that is not possible, mount using every device
#in the set, one by one until success.
def mount_root(pool):
root_pool_mnt = DEFAULT_MNT_DIR + pool.name
if (is_share_mounted(pool.name)):
return root_pool_mnt
create_tmp_dir(root_pool_mnt)
mnt_device = '/dev/disk/by-label/%s' % pool.name
mnt_cmd = [MOUNT, mnt_device, root_pool_mnt, ]
mnt_options = ''
if (pool.mnt_options is not None):
mnt_options = pool.mnt_options
if (pool.compression is not None):
if (re.search('compress', mnt_options) is None):
mnt_options = ('%s,compress=%s' % (mnt_options, pool.compression))
if (os.path.exists(mnt_device)):
if (len(mnt_options) > 0):
mnt_cmd.extend(['-o', mnt_options])
run_command(mnt_cmd)
return root_pool_mnt
#If we cannot mount by-label, let's try mounting by device one by one
#until we get our first success.
if (pool.disk_set.count() < 1):
raise Exception('Cannot mount Pool(%s) as it has no disks in it.' % pool.name)
last_device = pool.disk_set.last()
for device in pool.disk_set.all():
mnt_device = ('/dev/disk/by-id/%s' % device.name)
if (os.path.exists(mnt_device)):
mnt_cmd = [MOUNT, mnt_device, root_pool_mnt, ]
if (len(mnt_options) > 0):
mnt_cmd.extend(['-o', mnt_options])
try:
run_command(mnt_cmd)
return root_pool_mnt
except Exception, e:
if (device.name == last_device.name):
#exhausted mounting using all devices in the pool
raise e
logger.error('Error mouting: %s. Will try using another device.' % mnt_cmd)
logger.exception(e)
raise Exception('Failed to mount Pool(%s) due to an unknown reason.' % pool.name)
def umount_root(root_pool_mnt):
if (not os.path.exists(root_pool_mnt)):
return
try:
o, e, rc = run_command([UMOUNT, '-l', root_pool_mnt])
except CommandException, ce:
if (ce.rc == 32):
for l in ce.err:
l = l.strip()
if (re.search('not mounted$', l) is not None):
return
raise ce
for i in range(20):
if (not is_mounted(root_pool_mnt)):
run_command([RMDIR, root_pool_mnt])
return
time.sleep(2)
run_command([UMOUNT, '-f', root_pool_mnt])
run_command([RMDIR, root_pool_mnt])
return
def is_subvol(mnt_pt):
show_cmd = [BTRFS, 'subvolume', 'show', mnt_pt]
o, e, rc = run_command(show_cmd, throw=False)
if (rc == 0):
return True
return False
def subvol_info(mnt_pt):
info = {}
o, e, rc = run_command([BTRFS, 'subvolume', 'show', mnt_pt], throw=False)
if (rc == 0):
for i in o:
fields = i.strip().split(':')
if (len(fields) > 1):
info[fields[0].strip()] = fields[1].strip()
return info
def add_share(pool, share_name, qid):
"""
share is a subvolume in btrfs.
"""
root_pool_mnt = mount_root(pool)
subvol_mnt_pt = root_pool_mnt + '/' + share_name
show_cmd = [BTRFS, 'subvolume', 'show', subvol_mnt_pt]
o, e, rc = run_command(show_cmd, throw=False)
if (rc == 0):
return o, e, rc
if (not is_subvol(subvol_mnt_pt)):
sub_vol_cmd = [BTRFS, 'subvolume', 'create', '-i', qid, subvol_mnt_pt]
return run_command(sub_vol_cmd)
return True
def mount_share(share, mnt_pt):
if (is_mounted(mnt_pt)):
return
mount_root(share.pool)
pool_device = ('/dev/disk/by-id/%s' % share.pool.disk_set.first().name)
subvol_str = 'subvol=%s' % share.subvol_name
create_tmp_dir(mnt_pt)
mnt_cmd = [MOUNT, '-t', 'btrfs', '-o', subvol_str, pool_device, mnt_pt]
return run_command(mnt_cmd)
def mount_snap(share, snap_name, snap_mnt=None):
pool_device = ('/dev/disk/by-id/%s' % share.pool.disk_set.first().name)
share_path = ('%s%s' % (DEFAULT_MNT_DIR, share.name))
rel_snap_path = ('.snapshots/%s/%s' % (share.name, snap_name))
snap_path = ('%s%s/%s' %
(DEFAULT_MNT_DIR, share.pool.name, rel_snap_path))
if (snap_mnt is None):
snap_mnt = ('%s/.%s' % (share_path, snap_name))
if (is_mounted(snap_mnt)):
return
mount_share(share, share_path)
if (is_subvol(snap_path)):
create_tmp_dir(snap_mnt)
return run_command([MOUNT, '-o', 'subvol=%s' % rel_snap_path,
pool_device, snap_mnt])
def subvol_list_helper(mnt_pt):
"""
temporary solution until btrfs is fixed. wait upto 30 secs :(
"""
num_tries = 0
while (True):
try:
return run_command([BTRFS, 'subvolume', 'list', mnt_pt])
except CommandException, ce:
if (ce.rc != 19):
# rc == 19 is due to the slow kernel cleanup thread. It should
# eventually succeed.
raise ce
time.sleep(1)
num_tries = num_tries + 1
if (num_tries > 30):
raise ce
def snapshot_list(mnt_pt):
o, e, rc = run_command([BTRFS, 'subvolume', 'list', '-s', mnt_pt])
snaps = []
for s in o:
snaps.append(s.split()[-1])
return snaps
def shares_info(pool):
# return a list of share names under this mount_point.
# useful to gather names of all shares in a pool
try:
mnt_pt = mount_root(pool)
except CommandException, e:
if (e.rc == 32):
#mount failed, so we just assume that something has gone wrong at a
#lower level, like a device failure. Return empty share map.
#application state can be removed. If the low level failure is
#recovered, state gets reconstructed anyway.
return {}
raise
o, e, rc = run_command([BTRFS, 'subvolume', 'list', '-s', mnt_pt])
snap_idmap = {}
for l in o:
if (re.match('ID ', l) is not None):
fields = l.strip().split()
snap_idmap[fields[1]] = fields[-1]
o, e, rc = run_command([BTRFS, 'subvolume', 'list', '-p', mnt_pt])
shares_d = {}
share_ids = []
for l in o:
if (re.match('ID ', l) is None):
continue
fields = l.split()
vol_id = fields[1]
if (vol_id in snap_idmap):
# snapshot
# if the snapshot directory is direct child of a pool and is rw,
# then it's a Share. (aka Rockstor Share clone).
clone = False
if (len(snap_idmap[vol_id].split('/')) == 1):
o, e, rc = run_command([BTRFS, 'property', 'get',
'%s/%s' % (mnt_pt, snap_idmap[vol_id])])
for l in o:
if (l == 'ro=false'):
clone = True
if (not clone):
continue
parent_id = fields[5]
if (parent_id in share_ids):
# subvol of subvol. add it so child subvols can also be ignored.
share_ids.append(vol_id)
elif (parent_id in snap_idmap):
# snapshot/subvol of snapshot.
# add it so child subvols can also be ignored.
snap_idmap[vol_id] = fields[-1]
else:
shares_d[fields[-1]] = '0/%s' % vol_id
share_ids.append(vol_id)
return shares_d
def parse_snap_details(mnt_pt, fields):
writable = True
snap_name = None
o1, e1, rc1 = run_command([BTRFS, 'property', 'get',
'%s/%s' % (mnt_pt, fields[-1])])
for l1 in o1:
if (re.match('ro=', l1) is not None):
if (l1.split('=')[1] == 'true'):
writable = False
if (writable is True):
if (len(fields[-1].split('/')) == 1):
# writable snapshot + direct child of pool.
# So we'll treat it as a share.
continue
snap_name = fields[-1].split('/')[-1]
return snap_name, writable
def snaps_info(mnt_pt, share_name):
o, e, rc = run_command([BTRFS, 'subvolume', 'list', '-u', '-p', '-q', mnt_pt])
share_id = share_uuid = None
for l in o:
if (re.match('ID ', l) is not None):
fields = l.split()
if (fields[-1] == share_name):
share_id = fields[1]
share_uuid = fields[12]
if (share_id is None): return {}
o, e, rc = run_command([BTRFS, 'subvolume', 'list', '-s', '-p', '-q',
'-u', mnt_pt])
snaps_d = {}
snap_uuids = []
for l in o:
if (re.match('ID ', l) is not None):
fields = l.split()
# parent uuid must be share_uuid or another snapshot's uuid
if (fields[7] != share_id and fields[15] != share_uuid and
fields[15] not in snap_uuids):
continue
snap_name, writable = parse_snap_details(mnt_pt, fields)
if (snap_name is not None):
snaps_d[snap_name] = ('0/%s' % fields[1], writable, )
# we rely on the observation that child snaps are listed after their
# parents, so no need to iterate through results separately.
# Instead, we add the uuid of a snap to the list and look up if
# it's a parent of subsequent entries.
snap_uuids.append(fields[17])
return snaps_d
def share_id(pool, share_name):
"""
returns the subvolume id, becomes the share's uuid.
@todo: this should be part of add_share -- btrfs create should atomically
return the id
"""
root_pool_mnt = mount_root(pool)
out, err, rc = subvol_list_helper(root_pool_mnt)
subvol_id = None
for line in out:
if (re.search(share_name + '$', line) is not None):
subvol_id = line.split()[1]
break
if (subvol_id is not None):
return subvol_id
raise Exception('subvolume id for share: %s not found.' % share_name)
def remove_share(pool, share_name, pqgroup, force=False):
"""
umount share if its mounted.
mount root pool
btrfs subvolume delete root_mnt/vol_name
umount root pool
"""
if (is_share_mounted(share_name)):
mnt_pt = ('%s%s' % (DEFAULT_MNT_DIR, share_name))
umount_root(mnt_pt)
root_pool_mnt = mount_root(pool)
subvol_mnt_pt = root_pool_mnt + '/' + share_name
if (not is_subvol(subvol_mnt_pt)):
return
if (force):
o, e, rc = run_command([BTRFS, 'subvolume', 'list', '-o', subvol_mnt_pt])
for l in o:
if (re.match('ID ', l) is not None):
subvol = root_pool_mnt + '/' + l.split()[-1]
run_command([BTRFS, 'subvolume', 'delete', subvol], log=True)
qgroup = ('0/%s' % share_id(pool, share_name))
delete_cmd = [BTRFS, 'subvolume', 'delete', subvol_mnt_pt]
run_command(delete_cmd, log=True)
qgroup_destroy(qgroup, root_pool_mnt)
return qgroup_destroy(pqgroup, root_pool_mnt)
def remove_snap(pool, share_name, snap_name):
root_mnt = mount_root(pool)
snap_path = ('%s/.snapshots/%s/%s' %
(root_mnt, share_name, snap_name))
if (is_mounted(snap_path)):
umount_root(snap_path)
if (is_subvol(snap_path)):
qgroup = ('0/%s' % share_id(pool, snap_name))
run_command([BTRFS, 'subvolume', 'delete', snap_path], log=True)
return qgroup_destroy(qgroup, root_mnt)
else:
o, e, rc = run_command([BTRFS, 'subvolume', 'list', '-s', root_mnt])
for l in o:
#just give the first match.
if (re.match('ID.*%s$' % snap_name, l) is not None):
snap = '%s/%s' % (root_mnt, l.split()[-1])
return run_command([BTRFS, 'subvolume', 'delete', snap], log=True)
def add_snap_helper(orig, snap, readonly=False):
cmd = [BTRFS, 'subvolume', 'snapshot', orig, snap]
if (readonly):
cmd.insert(3, '-r')
try:
return run_command(cmd)
except CommandException, ce:
if (ce.rc != 19):
# rc == 19 is due to the slow kernel cleanup thread. snapshot gets
# created just fine. lookup is delayed arbitrarily.
raise ce
def add_clone(pool, share, clone, snapshot=None):
"""
clones either a share or a snapshot
"""
pool_mnt = mount_root(pool)
orig_path = pool_mnt
if (snapshot is not None):
orig_path = ('%s/.snapshots/%s/%s' %
(orig_path, share, snapshot))
else:
orig_path = ('%s/%s' % (orig_path, share))
clone_path = ('%s/%s' % (pool_mnt, clone))
return add_snap_helper(orig_path, clone_path)
def add_snap(pool, share_name, snap_name, readonly=False):
"""
create a snapshot
"""
root_pool_mnt = mount_root(pool)
share_full_path = ('%s/%s' % (root_pool_mnt, share_name))
snap_dir = ('%s/.snapshots/%s' % (root_pool_mnt, share_name))
create_tmp_dir(snap_dir)
snap_full_path = ('%s/%s' % (snap_dir, snap_name))
return add_snap_helper(share_full_path, snap_full_path, readonly)
def rollback_snap(snap_name, sname, subvol_name, pool):
"""
1. validate destination snapshot and umount the share
2. remove the share
3. move the snapshot to share location and mount it.
"""
mnt_pt = ('%s%s' % (DEFAULT_MNT_DIR, sname))
snap_fp = ('%s/%s/.snapshots/%s/%s' % (DEFAULT_MNT_DIR, pool.name, sname,
snap_name))
if (not is_subvol(snap_fp)):
raise Exception('Snapshot(%s) does not exist. Rollback is not '
'possible' % snap_fp)
mount_root(pool)
if (is_share_mounted(sname)):
umount_root(mnt_pt)
remove_share(pool, subvol_name, '-1/-1')
shutil.move(snap_fp, '%s/%s/%s' % (DEFAULT_MNT_DIR, pool.name, sname))
create_tmp_dir(mnt_pt)
subvol_str = 'subvol=%s' % sname
dpath = '/dev/disk/by-id/%s' % pool.disk_set.first().name
mnt_cmd = [MOUNT, '-t', 'btrfs', '-o', subvol_str, dpath, mnt_pt]
run_command(mnt_cmd)
def switch_quota(pool, flag='enable'):
root_mnt_pt = mount_root(pool)
cmd = [BTRFS, 'quota', flag, root_mnt_pt]
return run_command(cmd)
def enable_quota(pool):
return switch_quota(pool)
def disable_quota(pool_name):
return switch_quota(pool_name, flag='disable')
def qgroup_id(pool, share_name):
sid = share_id(pool, share_name)
return '0/' + sid
def qgroup_max(mnt_pt):
o, e, rc = run_command([BTRFS, 'qgroup', 'show', mnt_pt], log=True)
res = 0
for l in o:
if (re.match('%s/' % QID, l) is not None):
cid = int(l.split()[0].split('/')[1])
if (cid > res):
res = cid
return res
def qgroup_create(pool):
# mount pool
mnt_pt = mount_root(pool)
qid = ('%s/%d' % (QID, qgroup_max(mnt_pt) + 1))
o, e, rc = run_command([BTRFS, 'qgroup', 'create', qid, mnt_pt], log=True)
return qid
def qgroup_destroy(qid, mnt_pt):
o, e, rc = run_command([BTRFS, 'qgroup', 'show', mnt_pt])
for l in o:
if (re.match(qid, l) is not None and l.split()[0] == qid):
return run_command([BTRFS, 'qgroup', 'destroy', qid, mnt_pt],
log=True)
return False
def qgroup_is_assigned(qid, pqid, mnt_pt):
# Returns true if the given qgroup qid is already assigned to pqid for the
# path(mnt_pt)
o, e, rc = run_command([BTRFS, 'qgroup', 'show', '-pc', mnt_pt])
for l in o:
fields = l.split()
if (len(fields) > 3 and
fields[0] == qid and
fields[3] == pqid):
return True
return False
def qgroup_assign(qid, pqid, mnt_pt):
if (qgroup_is_assigned(qid, pqid, mnt_pt)):
return True
# since btrfs-progs 4.2, qgroup assign succeeds but throws a warning:
# "WARNING: # quotas may be inconsistent, rescan needed" and returns with
# exit code 1.
try:
run_command([BTRFS, 'qgroup', 'assign', qid, pqid, mnt_pt])
except CommandException, e:
wmsg = 'WARNING: quotas may be inconsistent, rescan needed'
if (e.rc == 1 and e.err[0] == wmsg):
#schedule a rescan if one is not currently running.
dmsg = ('Quota inconsistency while assigning %s. Rescan scheduled.'
% qid)
try:
run_command([BTRFS, 'quota', 'rescan', mnt_pt])
return logger.debug(dmsg)
except CommandException, e2:
emsg = 'ERROR: quota rescan failed: Operation now in progress'
if (e2.rc == 1 and e2.err[0] == emsg):
return logger.debug('%s.. Another rescan already in progress.' % dmsg)
logger.exception(e2)
raise e2
logger.exception(e)
raise e
def update_quota(pool, qgroup, size_bytes):
root_pool_mnt = mount_root(pool)
# Until btrfs adds better support for qgroup limits. We'll not set limits.
# It looks like we'll see the fixes in 4.2 and final ones by 4.3.
# cmd = [BTRFS, 'qgroup', 'limit', str(size_bytes), qgroup, root_pool_mnt]
cmd = [BTRFS, 'qgroup', 'limit', 'none', qgroup, root_pool_mnt]
return run_command(cmd, log=True)
def share_usage(pool, share_id):
"""
Return the sum of the qgroup sizes of this share and any child subvolumes
"""
# Obtain path to share in pool
root_pool_mnt = mount_root(pool)
cmd = [BTRFS, 'subvolume', 'list', root_pool_mnt]
out, err, rc = run_command(cmd, log=True)
short_id = share_id.split('/')[1]
share_dir = ''
for line in out:
fields = line.split()
if (len(fields) > 0 and short_id in fields[1]):
share_dir = root_pool_mnt + '/' + fields[8]
break
# Obtain list of child subvolume qgroups
cmd = [BTRFS, 'subvolume', 'list', '-o', share_dir]
out, err, rc = run_command(cmd, log=True)
qgroups = [short_id]
for line in out:
fields = line.split()
if (len(fields) > 0):
qgroups.append(fields[1])
# Sum qgroup sizes
cmd = [BTRFS, 'qgroup', 'show', share_dir]
out, err, rc = run_command(cmd, log=True)
rusage = eusage = 0
for line in out:
fields = line.split()
qgroup = []
if (len(fields) > 0 and '/' in fields[0]):
qgroup = fields[0].split('/')
if (len(qgroup) > 0 and qgroup[1] in qgroups):
rusage += convert_to_kib(fields[1])
eusage += convert_to_kib(fields[2])
return (rusage, eusage)
def shares_usage(pool, share_map, snap_map):
# don't mount the pool if at least one share in the map is mounted.
usage_map = {}
mnt_pt = None
for s in share_map.keys():
if (is_share_mounted(share_map[s])):
mnt_pt = ('%s%s' % (DEFAULT_MNT_DIR, share_map[s]))
break
if (mnt_pt is None):
mnt_pt = mount_root(pool)
cmd = [BTRFS, 'qgroup', 'show', mnt_pt]
out, err, rc = run_command(cmd, log=True)
combined_map = dict(share_map, **snap_map)
for line in out:
fields = line.split()
if (len(fields) > 0 and fields[0] in combined_map):
r_usage = convert_to_kib(fields[-2])
e_usage = convert_to_kib(fields[-1])
usage_map[combined_map[fields[0]]] = (r_usage, e_usage)
return usage_map
def pool_usage(mnt_pt):
# @todo: remove temporary raid5/6 custom logic once fi usage
# supports raid5/6.
cmd = [BTRFS, 'fi', 'usage', '-b', mnt_pt]
total = 0
inuse = 0
free = 0
data_ratio = 1
raid56 = False
parity = 1
disks = set()
out, err, rc = run_command(cmd)
for e in err:
e = e.strip()
if (re.match('WARNING: RAID56', e) is not None):
raid56 = True
for o in out:
o = o.strip()
if (raid56 is True and re.match('/dev/', o) is not None):
disks.add(o.split()[0])
elif (raid56 is True and re.match('Data,RAID', o) is not None):
if (o[5:10] == 'RAID6'):
parity = 2
elif (re.match('Device size:', o) is not None):
total = int(o.split()[2]) / 1024
elif (re.match('Used:', o) is not None):
inuse = int(o.split()[1]) / 1024
elif (re.match('Free ', o) is not None):
free = int(o.split()[2]) / 1024
elif (re.match('Data ratio:', o) is not None):
data_ratio = float(o.split()[2])
if (data_ratio < 0.01):
data_ratio = 0.01
if (raid56 is True):
num_disks = len(disks)
if (num_disks > 0):
per_disk = total / num_disks
total = (num_disks - parity) * per_disk
else:
total = total / data_ratio
inuse = inuse / data_ratio
free = total - inuse
return (total, inuse, free)
def scrub_start(pool, force=False):
mnt_pt = mount_root(pool)
p = PoolScrub(mnt_pt)
p.start()
return p.pid
def scrub_status(pool):
stats = {'status': 'unknown', }
mnt_pt = mount_root(pool)
out, err, rc = run_command([BTRFS, 'scrub', 'status', '-R', mnt_pt])
if (len(out) > 1):
if (re.search('running', out[1]) is not None):
stats['status'] = 'running'
elif (re.search('finished', out[1]) is not None):
stats['status'] = 'finished'
dfields = out[1].split()[-1].split(':')
stats['duration'] = ((int(dfields[0]) * 60 * 60) +
(int(dfields[1]) * 60) + int(dfields[2]))
else:
return stats
else:
return stats
for l in out[2:-1]:
fields = l.strip().split(': ')
if (fields[0] == 'data_bytes_scrubbed'):
stats['kb_scrubbed'] = int(fields[1]) / 1024
else:
stats[fields[0]] = int(fields[1])
return stats
@task()
def start_balance(mnt_pt, force=False, convert=None):
cmd = ['btrfs', 'balance', 'start', mnt_pt]
if (force):
cmd.insert(3, '-f')
if (convert is not None):
cmd.insert(3, '-dconvert=%s' % convert)
cmd.insert(3, '-mconvert=%s' % convert)
run_command(cmd)
def balance_status(pool):
stats = {'status': 'unknown', }
mnt_pt = mount_root(pool)
out, err, rc = run_command([BTRFS, 'balance', 'status', mnt_pt],
throw=False)
if (len(out) > 0):
if (re.match('Balance', out[0]) is not None):
stats['status'] = 'running'
if ((len(out) > 1 and
re.search('chunks balanced', out[1]) is not None)):
percent_left = out[1].split()[-2][:-1]
try:
percent_left = int(percent_left)
stats['percent_done'] = 100 - percent_left
except:
pass
elif (re.match('No balance', out[0]) is not None):
stats['status'] = 'finished'
stats['percent_done'] = 100
return stats
def device_scan():
return run_command([BTRFS, 'device', 'scan'])
def btrfs_uuid(disk):
"""return uuid of a btrfs filesystem"""
o, e, rc = run_command(
[BTRFS, 'filesystem', 'show', '/dev/disk/by-id/%s' % disk])
return o[0].split()[3]
def set_property(mnt_pt, name, val, mount=True):
if (mount is not True or is_mounted(mnt_pt)):
cmd = [BTRFS, 'property', 'set', mnt_pt, name, val]
return run_command(cmd)
def get_snap(subvol_path, oldest=False, num_retain=None, regex=None):
if (not os.path.isdir(subvol_path)): return None
share_name = subvol_path.split('/')[-1]
cmd = [BTRFS, 'subvol', 'list', '-o', subvol_path]
o, e, rc = run_command(cmd)
snaps = {}
for l in o:
fields = l.split()
if (len(fields) > 0):
snap_fields = fields[-1].split('/')
if (len(snap_fields) != 3 or
snap_fields[1] != share_name):
#not the Share we are interested in.
continue
if (regex is not None and re.search(regex, snap_fields[2]) is None):
#regex not in the name
continue
snaps[int(fields[1])] = snap_fields[2]
snap_ids = sorted(snaps.keys())
if (oldest):
if(len(snap_ids) > num_retain):
return snaps[snap_ids[0]]
elif (len(snap_ids) > 0):
return snaps[snap_ids[-1]]
return None
def get_oldest_snap(subvol_path, num_retain, regex=None):
return get_snap(subvol_path, oldest=True, num_retain=num_retain, regex=regex)
def get_lastest_snap(subvol_path, regex=None):
return get_snap(subvol_path, regex=regex)
| gpl-3.0 | -7,107,197,275,919,733,000 | 35.123723 | 91 | 0.564996 | false |
nevins-b/lemur | lemur/plugins/lemur_kubernetes/plugin.py | 1 | 5423 | """
.. module: lemur.plugins.lemur_kubernetes.plugin
:platform: Unix
:copyright: (c) 2015 by Netflix Inc., see AUTHORS for more
:license: Apache, see LICENSE for more details.
The plugin inserts certificates and the private key as Kubernetes secret that
can later be used to secure service endpoints running in Kubernetes pods
.. moduleauthor:: Mikhail Khodorovskiy <[email protected]>
"""
import base64
import urllib
import requests
import itertools
from lemur.certificates.models import Certificate
from lemur.plugins.bases import DestinationPlugin
DEFAULT_API_VERSION = 'v1'
def ensure_resource(k8s_api, k8s_base_uri, namespace, kind, name, data):
# _resolve_uri(k8s_base_uri, namespace, kind, name, api_ver=DEFAULT_API_VERSION)
url = _resolve_uri(k8s_base_uri, namespace, kind)
create_resp = k8s_api.post(url, json=data)
if 200 <= create_resp.status_code <= 299:
return None
elif create_resp.json()['reason'] != 'AlreadyExists':
return create_resp.content
update_resp = k8s_api.put(_resolve_uri(k8s_base_uri, namespace, kind, name), json=data)
if not 200 <= update_resp.status_code <= 299:
return update_resp.content
return None
def _resolve_ns(k8s_base_uri, namespace, api_ver=DEFAULT_API_VERSION,):
api_group = 'api'
if '/' in api_ver:
api_group = 'apis'
return '{base}/{api_group}/{api_ver}/namespaces'.format(base=k8s_base_uri, api_group=api_group, api_ver=api_ver) + ('/' + namespace if namespace else '')
def _resolve_uri(k8s_base_uri, namespace, kind, name=None, api_ver=DEFAULT_API_VERSION):
if not namespace:
namespace = 'default'
return "/".join(itertools.chain.from_iterable([
(_resolve_ns(k8s_base_uri, namespace, api_ver=api_ver),),
((kind + 's').lower(),),
(name,) if name else (),
]))
class KubernetesDestinationPlugin(DestinationPlugin):
title = 'Kubernetes'
slug = 'kubernetes-destination'
description = 'Allow the uploading of certificates to Kubernetes as secret'
author = 'Mikhail Khodorovskiy'
author_url = 'https://github.com/mik373/lemur'
options = [
{
'name': 'kubernetesURL',
'type': 'str',
'required': True,
'validation': '@(https?|http)://(-\.)?([^\s/?\.#-]+\.?)+(/[^\s]*)?$@iS',
'helpMessage': 'Must be a valid Kubernetes server URL!',
},
{
'name': 'kubernetesAuthToken',
'type': 'str',
'required': True,
'validation': '/^$|\s+/',
'helpMessage': 'Must be a valid Kubernetes server Token!',
},
{
'name': 'kubernetesServerCertificate',
'type': 'str',
'required': True,
'validation': '/^$|\s+/',
'helpMessage': 'Must be a valid Kubernetes server Certificate!',
},
{
'name': 'kubernetesNamespace',
'type': 'str',
'required': True,
'validation': '/^$|\s+/',
'helpMessage': 'Must be a valid Kubernetes Namespace!',
},
]
def __init__(self, *args, **kwargs):
super(KubernetesDestinationPlugin, self).__init__(*args, **kwargs)
def upload(self, name, body, private_key, cert_chain, options, **kwargs):
k8_bearer = self.get_option('kubernetesAuthToken', options)
k8_cert = self.get_option('kubernetesServerCertificate', options)
k8_namespace = self.get_option('kubernetesNamespace', options)
k8_base_uri = self.get_option('kubernetesURL', options)
k8s_api = K8sSession(k8_bearer, k8_cert)
cert = Certificate(body=body)
# in the future once runtime properties can be passed-in - use passed-in secret name
secret_name = 'certs-' + urllib.quote_plus(cert.name)
err = ensure_resource(k8s_api, k8s_base_uri=k8_base_uri, namespace=k8_namespace, kind="secret", name=secret_name, data={
'apiVersion': 'v1',
'kind': 'Secret',
'metadata': {
'name': secret_name,
},
'data': {
'combined.pem': base64.b64encode(body + private_key),
'ca.crt': base64.b64encode(cert_chain),
'service.key': base64.b64encode(private_key),
'service.crt': base64.b64encode(body),
}
})
if err is not None:
raise Exception("Error uploading secret: " + err)
class K8sSession(requests.Session):
def __init__(self, bearer, cert):
super(K8sSession, self).__init__()
self.headers.update({
'Authorization': 'Bearer %s' % bearer
})
k8_ca = '/tmp/k8.cert'
with open(k8_ca, "w") as text_file:
text_file.write(cert)
self.verify = k8_ca
def request(self, method, url, params=None, data=None, headers=None, cookies=None, files=None, auth=None, timeout=30, allow_redirects=True, proxies=None,
hooks=None, stream=None, verify=None, cert=None, json=None):
"""
This method overrides the default timeout to be 10s.
"""
return super(K8sSession, self).request(method, url, params, data, headers, cookies, files, auth, timeout, allow_redirects, proxies, hooks, stream,
verify, cert, json)
| apache-2.0 | -2,376,839,072,637,838,300 | 33.987097 | 157 | 0.592476 | false |
jfinkels/turingmachine | test_turingmachine.py | 1 | 11279 | # test_turingmachine.py - tests for turingmachine.py
#
# Copyright 2014 Jeffrey Finkelstein.
#
# This file is part of turingmachine.
#
# turingmachine is free software: you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by the
# Free Software Foundation, either version 3 of the License, or (at your
# option) any later version.
#
# turingmachine is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# turingmachine. If not, see <http://www.gnu.org/licenses/>.
"""Provides tests for :mod:`turingmachine`."""
from collections import defaultdict
import logging
import unittest
from turingmachine import BadSymbol
from turingmachine import L
from turingmachine import logger
from turingmachine import R
from turingmachine import TuringMachine
from turingmachine import UnknownSymbol
from turingmachine import UnknownState
class TestTuringMachine(unittest.TestCase):
"""Unit tests for the :class:`turingmachine.TuringMachine` class."""
def setUp(self):
"""Disable verbose logging for tests."""
self.level = logger.getEffectiveLevel()
logger.setLevel(logging.INFO)
def tearDown(self):
"""Restore the original logging level for the :mod:`turingmachine`
module.
"""
logger.setLevel(self.level)
def test_unknown_symbol(self):
"""Tests that an error is raised when an unknown symbol (that is, a
symbol for which there is no entry in the transition function) is
encountered in the string.
"""
states = set(range(4))
initial_state = 0
accept_state = 2
reject_state = 3
transitions = {
# repeatedly move right, writing a bogus character as it goes
0: {
'0': (0, '0', R),
'1': (0, '?', R),
'_': (1, '_', L)
},
# accept on the last symbol
1: {
'0': (accept_state, '0', R),
'1': (accept_state, '1', R),
'_': (accept_state, '_', R)
},
#2: {}, # this is the accept state
#3: {} # this is the reject state
}
bogus_symbol = TuringMachine(states, initial_state, accept_state,
reject_state, transitions)
try:
bogus_symbol('_0101_')
assert False, 'Should have raised an exception'
except UnknownSymbol:
pass
def test_bad_symbol(self):
"""Tests that an error is raised when the user specifies a bad symbol
in the transition table.
"""
states = set(range(3))
initial_state = 0
accept_state = 1
reject_state = 2
transitions = {
0: {
'0': (0, '', R),
'1': (0, '', R),
'_': (1, '', R)
}
}
bad_symbol = TuringMachine(states, initial_state, accept_state,
reject_state, transitions)
try:
bad_symbol('_0_')
assert False, 'Should have raised an exception'
except BadSymbol:
pass
def test_bad_state(self):
"""Tests that an error is raised when the user specifies a bad state
in the transition table.
"""
bad_state = TuringMachine(set(range(3)), 0, 1, 2, {})
try:
bad_state('__')
assert False, 'Should have raised an exception'
except UnknownState:
pass
def test_move_left_and_right(self):
"""Tests the execution of a Turing machine that simply moves left and
right.
"""
states = set(range(17))
initial_state = 0
accept_state = 15
reject_state = 16
# Move left five cells then move right ten cells. Always accept.
transition = defaultdict(dict)
for state in range(5):
for symbol in '0', '1', '_':
transition[state][symbol] = (state + 1, symbol, L)
for state in range(5, 15):
for symbol in '0', '1', '_':
transition[state][symbol] = (state + 1, symbol, R)
move_left_right = TuringMachine(states, initial_state, accept_state,
reject_state, transition)
for s in '', '010', '000000':
assert move_left_right('_' + s + '_')
def test_is_even(self):
"""Tests the execution of a Turing machine that computes whether a
binary string represents an even number.
This Turing machine simply moves right repeatedly until it finds the
end of the input string, then checks if the rightmost (that is, least
significant) bit is a 0.
"""
states = set(range(4))
initial_state = 0
accept_state = 2
reject_state = 3
transition = {
# this state represents moving right all the way to the end
0: {
'0': (0, '0', R),
'1': (0, '1', R),
'_': (1, '_', L),
},
# this state represents looking at the rightmost symbol
1: {
'0': (accept_state, '0', L),
'1': (reject_state, '1', L),
'_': (reject_state, '_', R),
}
#2: {} # this is the accept state
#3: {} # this is the reject state
}
is_even = TuringMachine(states, initial_state, accept_state,
reject_state, transition)
for s in '011010', '0', '1100010':
assert is_even('_' + s + '_')
for s in '1101', '1', '', '01001':
assert not is_even('_' + s + '_')
def test_parity(self):
"""Tests the execution of a Turing machine that computes the parity of
a binary string, that is, whether the number of ones in the binary
strings is odd.
This Turing machine oscillates between two states, one of which
represents having seen an even number of 1s, the other an odd number.
Every time it sees a 1, it switches which of those two states it is in.
"""
states = set(range(4))
initial_state = 0
accept_state = 2
reject_state = 3
# begin in pre-reject state
# repeat:
# if reading a 1:
# if in pre-accept state, move to pre-reject state
# if in pre-reject state, move to pre-accept state
# move right
# if in pre-accept, accept
# if in pre-reject, reject
transition = {
# this state represents having read an even number of ones
0: {
'0': (0, '0', R),
'1': (1, '1', R),
'_': (reject_state, '_', L),
},
# this state represents having read an odd number of ones
1: {
'0': (1, '0', R),
'1': (0, '1', R),
'_': (accept_state, '_', R),
}
}
parity = TuringMachine(states, initial_state, accept_state,
reject_state, transition)
for s in '011010', '1', '1101011':
assert parity('_' + s + '_')
for s in '1001', '0', '', '001001':
assert not parity('_' + s + '_')
def test_is_palindrome(self):
"""Tests the execution of a Turing machine that computes whether a
binary string is a palindrome.
This Turing machine operates recursively. If the input string is an
empty string or a single bit, it accepts. If the input string has
length two or more, it determines if the first and last bits of the
input string are the same, then turns each of them into a blank. It
then recurses and runs the same algorithm on the new, smaller string.
"""
states = set(range(10))
initial_state = 0
accept_state = 8
reject_state = 9
# This is a description of the implementation of the Turing machine
# that decides whether a binary string is a palindrome.
#
# repeat the following steps:
# read a symbol
# if _: accept
# if 0:
# write blank
# move right
# if _: accept (because it is a single 0)
# otherwise repeatedly move right to end
# at terminal blank move left
# if 1: reject
# else: write blank
# if 1:
# write blank
# move right
# if _: accept (because it is a single 1)
# repeatedly move right to end
# at terminal blank move left
# if 0: reject
# else: write blank
# repeatedly move left to end
transition = {
# read the first symbol
0: {
'0': (6, '_', R),
'1': (7, '_', R),
'_': (accept_state, '_', R)
},
# string starts with 0; move repeatedly right
1: {
'0': (1, '0', R),
'1': (1, '1', R),
'_': (3, '_', L)
},
# string starts with 1; move repeatedly right
2: {
'0': (2, '0', R),
'1': (2, '1', R),
'_': (4, '_', L)
},
# rightmost symbol should be a 0
3: {
'0': (5, '_', L),
'1': (reject_state, '1', L),
'_': (reject_state, '_', L) # this situation is unreachable
},
# rightmost symbol should be a 1
4: {
'0': (reject_state, '0', L),
'1': (5, '_', L),
'_': (reject_state, '_', L) # this situation is unreachable
},
# repeatedly move left to the beginning of the string
5: {
'0': (5, '0', L),
'1': (5, '1', L),
'_': (0, '_', R)
},
# check if there is only one symbol left
6: {
'0': (1, '0', R),
'1': (1, '1', R),
'_': (accept_state, '_', L)
},
# check if there is only one symbol left
7: {
'0': (2, '0', R),
'1': (2, '1', R),
'_': (accept_state, '_', L)
}
#7: {} # this is the accept state
#8: {} # this is the reject state
}
is_palindrome = TuringMachine(states, initial_state, accept_state,
reject_state, transition)
for s in '', '0', '010', '111010111':
assert is_palindrome('_' + s + '_')
for s in '01', '110', '111100001':
assert not is_palindrome('_' + s + '_')
| gpl-3.0 | 6,797,504,506,111,407,000 | 34.806349 | 79 | 0.494193 | false |
FujitsuEnablingSoftwareTechnologyGmbH/tempest | tempest/api/monitoring/test_monitoring_alarming_api.py | 1 | 38475 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import time
import datetime
from tempest.api.monitoring import base
from tempest_lib.common.utils import data_utils
from tempest_lib import exceptions as lib_exc
from tempest import test
import json
class MonitoringAlarmingAPITestJSON(base.BaseMonitoringTest):
_interface = 'json'
@classmethod
def setUpClass(cls):
super(MonitoringAlarmingAPITestJSON, cls).setUpClass()
# cls.rule = {'expression':'cpu.idle_perc > 0'}
for i in range(1):
cls.create_alarm_definition(expression="cpu.idle_perc >= 10")
@test.attr(type="gate")
def test_alarm_definition_list(self):
# Test to check if all alarms definitions are listed
params = {}
body = self.monitoring_client.list_alarm_definitions(params)
self.assertEqual('200', body.response['status'])
response = json.loads(body.data)
self.assertGreater(len(response['elements']), 0, "Metric list is empty.")
# Verify created alarm in the list
fetched_ids = [a['id'] for a in response['elements']]
missing_alarms = [a for a in self.alarm_def_ids if a not in fetched_ids]
self.assertEqual(0, len(missing_alarms),
"Failed to find the following created alarm(s)"
" in a fetched list: %s" %
', '.join(str(a) for a in missing_alarms))
@test.attr(type="gate")
def test_create_update_get_delete_alarm_without_notification(self):
# Test to check if a new alarm definition is created
# Create an alarm definition
count = 0
alarm_def_name = data_utils.rand_name('test_monasca_alarm_definition')
body = self.monitoring_client.create_alarm_definition(name=alarm_def_name, expression="max(cpu.idle_perc) > 0")
self.assertEqual(alarm_def_name, body['name'])
alarm_def_id = body['id']
self.assertEqual("max(cpu.idle_perc) > 0", body['expression'])
# Get and verify details of an alarm definition
body = self.monitoring_client.get_alarm_definition(alarm_def_id)
self.assertEqual(alarm_def_name, body['name'])
self.assertEqual("max(cpu.idle_perc) > 0", body['expression'])
updated_alarm_def_name = data_utils.rand_name('test_monasca_alarm_definition')
# Update alarm definition
body = self.monitoring_client.update_alarm_definition(alarm_def_id,
name=updated_alarm_def_name,
expression="max(cpu.idle_perc) > 0",
actions_enabled="true")
self.assertEqual(updated_alarm_def_name, body['name'])
alarm_def_id = body['id']
# List alarms based on alarm_definition_id
while count < 60 :
body = self.monitoring_client.list_alarms(alarm_definition_id=alarm_def_id)
if len(body['elements']) > 0:
break
time.sleep(2)
count += 1
self.assertGreater(len(body['elements']), 0, "Alarm is not created.")
alarm_id = body['elements'][0]['id']
body = self.monitoring_client.update_alarm(alarm_id, state="UNDETERMINED", lifecycle_state="OPEN", link="http://pagerduty.com/")
self.assertEqual("UNDETERMINED", body['state'])
body = self.monitoring_client.get_alarm(alarm_id)
self.assertEqual("UNDETERMINED", body['state'])
# Delete alarm-definition and verify if deleted
self.monitoring_client.delete_alarm_definition(alarm_def_id)
self.assertRaises(lib_exc.NotFound,
self.monitoring_client.get_alarm_definition, alarm_def_id)
@test.attr(type="gate")
def test_update_alarm_definition(self):
# Create an alarm definition
alarm_def_name = data_utils.rand_name('monitoring_alarm_definition')
body = self.monitoring_client.create_alarm_definition(name=alarm_def_name, expression="cpu.idle_perc > 0")
self.assertEqual(alarm_def_name, body['name'])
alarm_def_id = body['id']
self.assertEqual("cpu.idle_perc > 0", body['expression'])
#Update alarm
alarm_def_name = data_utils.rand_name('monitoring_alarm_def_update')
body = self.monitoring_client.update_alarm_definition(
alarm_def_id,
name = alarm_def_name,
expression = "cpu.idle_perc < 0",
actions_enabled = 'true',
)
self.assertEqual(alarm_def_name, body['name'])
self.assertEqual("cpu.idle_perc < 0", body['expression'])
# Get and verify details of an alarm definition after update
body = self.monitoring_client.get_alarm_definition(alarm_def_id)
self.assertEqual(alarm_def_name, body['name'])
self.assertEqual("cpu.idle_perc < 0", body['expression'])
# Delete alarm defintion and verify if deleted
self.monitoring_client.delete_alarm_definition(alarm_def_id)
self.assertRaises(lib_exc.NotFound,
self.monitoring_client.get_alarm_definition, alarm_def_id)
@test.attr(type="gate")
def test_create_alarm_definition_with_notification(self):
# Test case to create alarm definition with notification method
notification_name = data_utils.rand_name('notification-')
notification_type = 'EMAIL'
# Replace below email with valid email address as required.
u_address = 'root@localhost'
body = self.monitoring_client.create_notification(name=notification_name, type=notification_type, address=u_address)
self.assertEqual(notification_name, body['name'])
notification_id = body['id']
# Create an alarm definition
alarm_def_name = data_utils.rand_name('monitoring_alarm_definition')
body = self.monitoring_client.create_alarm_definition(name=alarm_def_name,
expression="cpu.idle_perc > 0",
alarm_actions = notification_id,
ok_actions = notification_id,
severity="LOW")
self.assertEqual(alarm_def_name, body['name'])
alarm_def_id = body['id']
self.assertEqual("cpu.idle_perc > 0", body['expression'])
# Delete alarm definition and verify if deleted
self.monitoring_client.delete_alarm_definition(alarm_def_id)
self.assertRaises(lib_exc.NotFound,
self.monitoring_client.get_alarm_definition, alarm_def_id)
# Delete notification
self.monitoring_client.delete_notification(notification_id)
self.assertRaises(lib_exc.NotFound,
self.monitoring_client.get_notification, notification_id)
@test.attr(type="gate")
def test_create_alarm_definition_with_multiple_notification(self):
# Test case to create alarm definition with notification method
notification_name1 = data_utils.rand_name('notification-')
notification_type1 = 'EMAIL'
# Replace below email with valid email address as required.
address1 = 'root@localhost'
notification_name2 = data_utils.rand_name('notification-')
notification_type2 = 'PAGERDUTY'
#Replace below with valid Pagerduty API key
address2 = '34kdfklskdjerer3434'
body = self.monitoring_client.create_notification(name=notification_name1, type=notification_type1, address=address1)
self.assertEqual(notification_name1, body['name'])
notification_id1 = body['id']
body = self.monitoring_client.create_notification(name=notification_name2, type=notification_type2, address=address2)
self.assertEqual(notification_name2, body['name'])
notification_id2 = body['id']
# Create an alarm definition
alarm_def_name = data_utils.rand_name('monitoring_alarm_definition')
body = self.monitoring_client.create_alarm_definition(name=alarm_def_name,
expression="cpu.idle_perc > 0",
alarm_actions = [notification_id1, notification_id2],
ok_actions = [notification_id1, notification_id2],
severity="LOW")
self.assertEqual(alarm_def_name, body['name'])
alarm_def_id = body['id']
self.assertEqual("cpu.idle_perc > 0", body['expression'])
# Delete alarm definition and verify if deleted
self.monitoring_client.delete_alarm_definition(alarm_def_id)
self.assertRaises(lib_exc.NotFound,
self.monitoring_client.get_alarm_definition, alarm_def_id)
# Delete notification
self.monitoring_client.delete_notification(notification_id1)
# Delete notification
self.monitoring_client.delete_notification(notification_id2)
@test.attr(type="gate")
def test_update_notification_in_alarm_definition(self):
# Test case to create alarm definition with notification method
notification_name = data_utils.rand_name('notification-')
notification_type = 'EMAIL'
u_address = 'root@localhost'
body = self.monitoring_client.create_notification(name=notification_name, type=notification_type, address=u_address)
self.assertEqual(notification_name, body['name'])
notification_id = body['id']
# Create an alarm
alarm_def_name = data_utils.rand_name('monitoring_alarm_definition')
body = self.monitoring_client.create_alarm_definition(name=alarm_def_name, expression="cpu.idle_perc > 0")
self.assertEqual(alarm_def_name, body['name'])
alarm_def_id = body['id']
self.assertEqual("cpu.idle_perc > 0", body['expression'])
#Update alarm
alarm_def_name = data_utils.rand_name('monitoring_alarm_update')
body = self.monitoring_client.update_alarm_definition(
alarm_def_id,
name = alarm_def_name,
actions_enabled = 'true',
expression = "cpu.idle_perc < 0",
alarm_actions = notification_id,
ok_actions = notification_id
)
self.assertEqual(alarm_def_name, body['name'])
self.assertEqual("cpu.idle_perc < 0", body['expression'])
# Get and verify details of an alarm after update
body = self.monitoring_client.get_alarm_definition(alarm_def_id)
self.assertEqual(alarm_def_name, body['name'])
# Delete alarm and verify if deleted
self.monitoring_client.delete_alarm_definition(alarm_def_id)
self.assertRaises(lib_exc.NotFound,
self.monitoring_client.get_alarm_definition, alarm_def_id)
# Delete notification
self.monitoring_client.delete_notification(notification_id)
@test.attr(type="gate")
def test_create_alarm_definition_with_url_in_expression(self):
# Test case to create alarm definition with notification method
notification_name = data_utils.rand_name('notification-')
notification_type = 'EMAIL'
u_address = 'root@localhost'
body = self.monitoring_client.create_notification(name=notification_name, type=notification_type, address=u_address)
self.assertEqual(notification_name, body['name'])
notification_id = body['id']
# Create an alarm definition
alarm_def_name = data_utils.rand_name('monitoring_alarm_definition')
body = self.monitoring_client.create_alarm_definition(name=alarm_def_name,
expression="avg(cpu.idle_perc{url=https://www.google.com}) gt 0",
alarm_actions = notification_id,
ok_actions = notification_id,
severity="LOW")
self.assertEqual(alarm_def_name, body['name'])
alarm_def_id = body['id']
self.assertEqual("avg(cpu.idle_perc{url=https://www.google.com}) gt 0", body['expression'])
# Delete alarm and verify if deleted
self.monitoring_client.delete_alarm_definition(alarm_def_id)
self.assertRaises(lib_exc.NotFound,
self.monitoring_client.get_alarm_definition, alarm_def_id)
# Delete notification
self.monitoring_client.delete_notification(notification_id)
@test.attr(type="gate")
def test_create_alarm_definition_with_specialchars_in_expression(self):
# Test case to create alarm with notification method
notification_name = data_utils.rand_name('notification-')
notification_type = 'EMAIL'
u_address = 'root@localhost'
body = self.monitoring_client.create_notification(name=notification_name, type=notification_type, address=u_address)
self.assertEqual(notification_name, body['name'])
notification_id = body['id']
# Create an alarm
alarm_def_name = data_utils.rand_name('monitoring_alarm')
body = self.monitoring_client.create_alarm_definition(name=alarm_def_name,
expression="avg(cpu.idle_perc{dev=/usr/local/bin}) gt 0",
alarm_actions = notification_id,
ok_actions = notification_id,
severity="LOW")
self.assertEqual(alarm_def_name, body['name'])
alarm_def_id = body['id']
self.assertEqual("avg(cpu.idle_perc{dev=/usr/local/bin}) gt 0", body['expression'])
# Delete alarm and verify if deleted
self.monitoring_client.delete_alarm_definition(alarm_def_id)
self.assertRaises(lib_exc.NotFound,
self.monitoring_client.get_alarm_definition, alarm_def_id)
# Delete notification
self.monitoring_client.delete_notification(notification_id)
@test.attr(type="gate")
def test_create_alarm_with_specialchar_in_expression(self):
# Test case to create alarm with notification method
notification_name = data_utils.rand_name('notification-')
notification_type = 'EMAIL'
u_address = 'root@localhost'
body = self.monitoring_client.create_notification(name=notification_name, type=notification_type, address=u_address)
self.assertEqual(notification_name, body['name'])
notification_id = body['id']
# Create an alarm
alarm_def_name = data_utils.rand_name('monitoring_alarm')
body = self.monitoring_client.create_alarm_definition(name=alarm_def_name,
expression="avg(cpu.idle_perc{dev=!@#$%^*}) gt 0",
alarm_actions = notification_id,
ok_actions = notification_id,
severity="LOW")
self.assertEqual(alarm_def_name, body['name'])
alarm_def_id = body['id']
self.assertEqual("avg(cpu.idle_perc{dev=!@#$%^*}) gt 0", body['expression'])
# Delete alarm and verify if deleted
self.monitoring_client.delete_alarm_definition(alarm_def_id)
self.assertRaises(lib_exc.NotFound,
self.monitoring_client.get_alarm_definition, alarm_def_id)
# Delete notification
self.monitoring_client.delete_notification(notification_id)
@test.attr(type="gate")
def test_list_alarm_by_def_id(self):
# Test case to create alarm definition with notification method
count = 0
notification_name = data_utils.rand_name('notification-')
notification_type = 'EMAIL'
u_address = 'root@localhost'
body = self.monitoring_client.create_notification(name=notification_name, type=notification_type, address=u_address)
self.assertEqual(notification_name, body['name'])
notification_id = body['id']
# Create an alarm definition
alarm_def_name = data_utils.rand_name('monitoring_alarm_definition')
body = self.monitoring_client.create_alarm_definition(name=alarm_def_name,
expression="cpu.idle_perc > 0",
alarm_actions = notification_id,
ok_actions = notification_id,
severity="LOW")
self.assertEqual(alarm_def_name, body['name'])
alarm_def_id = body['id']
self.assertEqual("cpu.idle_perc > 0", body['expression'])
# List alarms based on alarm_definition_id
while count < 60 :
body = self.monitoring_client.list_alarms(alarm_definition_id=alarm_def_id)
if len(body['elements']) > 0:
break
time.sleep(2)
count += 1
self.assertGreater(len(body['elements']), 0, "Alarm is not created.")
alarm_id = body['elements'][0]['id']
self.assertEqual('200', body.response['status'])
# Delete alarm definition and verify if deleted
self.monitoring_client.delete_alarm_definition(alarm_def_id)
self.assertRaises(lib_exc.NotFound,
self.monitoring_client.get_alarm_definition, alarm_def_id)
# Delete notification
self.monitoring_client.delete_notification(notification_id)
@test.attr(type="gate")
def test_list_alarm_by_metric_name(self):
# Test case to list alarm by metric name
count = 0
notification_name = data_utils.rand_name('notification-')
notification_type = 'EMAIL'
u_address = 'root@localhost'
body = self.monitoring_client.create_notification(name=notification_name, type=notification_type, address=u_address)
self.assertEqual(notification_name, body['name'])
notification_id = body['id']
# Create an alarm definition
alarm_def_name = data_utils.rand_name('monitoring_alarm_definition')
body = self.monitoring_client.create_alarm_definition(name=alarm_def_name,
expression="cpu.idle_perc > 0",
alarm_actions = notification_id,
ok_actions = notification_id,
severity="LOW")
self.assertEqual(alarm_def_name, body['name'])
alarm_def_id = body['id']
self.assertEqual("cpu.idle_perc > 0", body['expression'])
# List alarms based on alarm_definition_id
while count < 60 :
body = self.monitoring_client.list_alarms(alarm_definition_id=alarm_def_id)
if len(body['elements']) > 0:
break
time.sleep(2)
count += 1
self.assertGreater(len(body['elements']), 0, "Alarm is not created.")
body = self.monitoring_client.list_alarms(metric_name="cpu.idle_perc")
self.assertEqual('200', body.response['status'])
alarm_name = body['elements'][0]['metrics'][0]['name']
self.assertEqual('cpu.idle_perc', alarm_name)
# Delete alarm definition and verify if deleted
self.monitoring_client.delete_alarm_definition(alarm_def_id)
self.assertRaises(lib_exc.NotFound,
self.monitoring_client.get_alarm_definition, alarm_def_id)
# Delete notification
self.monitoring_client.delete_notification(notification_id)
@test.attr(type="gate")
def test_list_alarm_by_metric_name_and_dimension(self):
# Test case to list alarm by metric name and dimension
count = 0
notification_name = data_utils.rand_name('notification-')
notification_type = 'EMAIL'
u_address = 'root@localhost'
body = self.monitoring_client.create_notification(name=notification_name, type=notification_type, address=u_address)
self.assertEqual(notification_name, body['name'])
notification_id = body['id']
# Create an alarm definition
alarm_def_name = data_utils.rand_name('monitoring_alarm_definition')
body = self.monitoring_client.create_alarm_definition(name=alarm_def_name,
expression="cpu.idle_perc > 0",
alarm_actions = notification_id,
ok_actions = notification_id,
severity="LOW")
self.assertEqual(alarm_def_name, body['name'])
alarm_def_id = body['id']
self.assertEqual("cpu.idle_perc > 0", body['expression'])
# List alarms based on alarm_definition_id
while count < 60 :
body = self.monitoring_client.list_alarms(alarm_definition_id=alarm_def_id)
if len(body['elements']) > 0:
break
time.sleep(2)
count += 1
self.assertGreater(len(body['elements']), 0, "Alarm is not created.")
# List alarm using metric name
body = self.monitoring_client.get_alarms_by_metric_dimensions("cpu.idle_perc","service:monitoring")
self.assertEqual('200', body.response['status'])
alarm_name = body['elements'][0]['metrics'][0]['name']
self.assertEqual('cpu.idle_perc', alarm_name)
# Delete alarm definition and verify if deleted
self.monitoring_client.delete_alarm_definition(alarm_def_id)
self.assertRaises(lib_exc.NotFound,
self.monitoring_client.get_alarm_definition, alarm_def_id)
# Delete notification
self.monitoring_client.delete_notification(notification_id)
@test.attr(type="gate")
def test_list_alarm_by_state(self):
# Test case to create alarm definition with notification method
count = 0
notification_name = data_utils.rand_name('notification-')
notification_type = 'EMAIL'
u_address = 'root@localhost'
body = self.monitoring_client.create_notification(name=notification_name, type=notification_type, address=u_address)
self.assertEqual(notification_name, body['name'])
notification_id = body['id']
# Create an alarm definition
alarm_def_name = data_utils.rand_name('monitoring_alarm_definition')
body = self.monitoring_client.create_alarm_definition(name=alarm_def_name,
expression="cpu.idle_perc > 0",
alarm_actions = notification_id,
ok_actions = notification_id,
severity="LOW")
self.assertEqual(alarm_def_name, body['name'])
alarm_def_id = body['id']
self.assertEqual("cpu.idle_perc > 0", body['expression'])
# List alarms based on alarm_definition_id
while count < 60:
body = self.monitoring_client.list_alarms(alarm_definition_id=alarm_def_id)
if len(body['elements']) > 0:
break
time.sleep(2)
count += 1
self.assertGreater(len(body['elements']), 0, "Alarm is not created.")
# List alarm using state
body = self.monitoring_client.get_alarms_by_state(alarm_def_id, "ALARM")
self.assertEqual('200', body.response['status'])
# Delete alarm definition and verify if deleted
self.monitoring_client.delete_alarm_definition(alarm_def_id)
self.assertRaises(lib_exc.NotFound,
self.monitoring_client.get_alarm_definition, alarm_def_id)
# Delete notification
self.monitoring_client.delete_notification(notification_id)
@test.attr(type="gate")
def test_get_delete_the_specified_alarm(self):
# create alarm definition with notification method
count = 0
notification_name = data_utils.rand_name('notification-')
notification_type = 'EMAIL'
u_address = 'root@localhost'
body = self.monitoring_client.create_notification(name=notification_name, type=notification_type, address=u_address)
self.assertEqual(notification_name, body['name'])
notification_id = body['id']
# Create an alarm definition
alarm_def_name = data_utils.rand_name('monitoring_alarm_definition')
body = self.monitoring_client.create_alarm_definition(name=alarm_def_name,
expression="cpu.idle_perc > 0",
alarm_actions = notification_id,
ok_actions = notification_id,
severity="LOW")
self.assertEqual(alarm_def_name, body['name'])
alarm_def_id = body['id']
self.assertEqual("cpu.idle_perc > 0", body['expression'])
# List alarms based on alarm_definition_id
while count < 60:
body = self.monitoring_client.list_alarms(alarm_definition_id=alarm_def_id)
if len(body['elements']) > 0:
break
time.sleep(2)
count += 1
self.assertGreater(len(body['elements']), 0, "Alarm is not created.")
# List alarm using alarm def id
body = self.monitoring_client.get_alarms_by_def_id(alarm_def_id)
self.assertEqual('200', body.response['status'])
alarm_id = body['elements'][0]['id']
# List specific alarm
body = self.monitoring_client.get_alarm(alarm_id)
self.assertEqual('200', body.response['status'])
# Delete alarm and verify if deleted
self.monitoring_client.delete_alarm(alarm_id)
self.assertRaises(lib_exc.NotFound,
self.monitoring_client.get_alarm, alarm_id)
# Delete alarm definition and verify if deleted
self.monitoring_client.delete_alarm_definition(alarm_def_id)
self.assertRaises(lib_exc.NotFound,
self.monitoring_client.get_alarm_definition, alarm_def_id)
# Delete notification
self.monitoring_client.delete_notification(notification_id)
@test.attr(type="gate")
def test_update_the_specified_alarm(self):
# Test to update a specified alarm
count = 0
notification_name = data_utils.rand_name('notification-')
notification_type = 'EMAIL'
u_address = 'root@localhost'
body = self.monitoring_client.create_notification(name=notification_name, type=notification_type, address=u_address)
self.assertEqual(notification_name, body['name'])
notification_id = body['id']
# Create an alarm definition
alarm_def_name = data_utils.rand_name('monitoring_alarm_definition')
body = self.monitoring_client.create_alarm_definition(name=alarm_def_name,
expression="cpu.idle_perc > 0",
alarm_actions = notification_id,
ok_actions = notification_id,
severity="LOW")
self.assertEqual(alarm_def_name, body['name'])
alarm_def_id = body['id']
self.assertEqual("cpu.idle_perc > 0", body['expression'])
# List alarms based on alarm_definition_id
while count < 60:
body = self.monitoring_client.list_alarms(alarm_definition_id=alarm_def_id)
if len(body['elements']) > 0:
break
time.sleep(2)
count += 1
self.assertGreater(len(body['elements']), 0, "Alarm is not created.")
# List alarm using alarm def id
body = self.monitoring_client.get_alarms_by_def_id(alarm_def_id)
self.assertEqual('200', body.response['status'])
alarm_id = body['elements'][0]['id']
# Update specific alarm
body = self.monitoring_client.update_alarm(alarm_id, state="OK", lifecycle_state="OPEN", link="http://pagerduty.com/")
self.assertEqual('200', body.response['status'])
# Delete alarm definition and verify if deleted
self.monitoring_client.delete_alarm_definition(alarm_def_id)
self.assertRaises(lib_exc.NotFound,
self.monitoring_client.get_alarm_definition, alarm_def_id)
# Delete notification
self.monitoring_client.delete_notification(notification_id)
@test.attr(type="gate")
def test_alarms_history_state(self):
# create alarm definition with notification method
count = 0
notification_name = data_utils.rand_name('notification-')
notification_type = 'EMAIL'
u_address = 'root@localhost'
body = self.monitoring_client.create_notification(name=notification_name, type=notification_type, address=u_address)
self.assertEqual(notification_name, body['name'])
notification_id = body['id']
# Create an alarm definition
alarm_def_name = data_utils.rand_name('monitoring_alarm_definition')
body = self.monitoring_client.create_alarm_definition(name=alarm_def_name,
expression="cpu.idle_perc > 0",
alarm_actions = notification_id,
ok_actions = notification_id,
severity="LOW")
self.assertEqual(alarm_def_name, body['name'])
alarm_def_id = body['id']
self.assertEqual("cpu.idle_perc > 0", body['expression'])
# List alarms based on alarm_definition_id
while count < 60:
body = self.monitoring_client.list_alarms(alarm_definition_id=alarm_def_id)
if len(body['elements']) > 0:
break
time.sleep(2)
count += 1
self.assertGreater(len(body['elements']), 0, "Alarm is not created.")
# List alarm using alarm def id
body = self.monitoring_client.get_alarms_by_def_id(alarm_def_id)
self.assertEqual('200', body.response['status'])
alarm_id = body['elements'][0]['id']
# Update specific alarm
body = self.monitoring_client.update_alarm(alarm_id, state="OK", lifecycle_state="OPEN", link="http://pagerduty.com/")
self.assertEqual('200', body.response['status'])
# Get alarms state history
body = self.monitoring_client.get_alarms_state_history_by_dimensions("service:monitoring")
self.assertEqual('200', body.response['status'])
self.assertTrue('old_state' in body['elements'][0].keys(), body['elements'][0].keys())
self.assertTrue('new_state' in body['elements'][0].keys(), body['elements'][0].keys())
# Delete alarm definition and verify if deleted
self.monitoring_client.delete_alarm_definition(alarm_def_id)
self.assertRaises(lib_exc.NotFound,
self.monitoring_client.get_alarm_definition, alarm_def_id)
# Delete notification
self.monitoring_client.delete_notification(notification_id)
@test.attr(type="gate")
def test_alarms_history_state_by_start_end_time(self):
# create alarm definition with notification method
count = 0
notification_name = data_utils.rand_name('notification-')
notification_type = 'EMAIL'
u_address = 'root@localhost'
body = self.monitoring_client.create_notification(name=notification_name, type=notification_type, address=u_address)
self.assertEqual(notification_name, body['name'])
notification_id = body['id']
# Create an alarm definition
alarm_def_name = data_utils.rand_name('monitoring_alarm_definition')
body = self.monitoring_client.create_alarm_definition(name=alarm_def_name,
expression="cpu.idle_perc > 0",
alarm_actions = notification_id,
ok_actions = notification_id,
severity="LOW")
self.assertEqual(alarm_def_name, body['name'])
alarm_def_id = body['id']
self.assertEqual("cpu.idle_perc > 0", body['expression'])
# List alarms based on alarm_definition_id
while count < 60:
body = self.monitoring_client.list_alarms(alarm_definition_id=alarm_def_id)
if len(body['elements']) > 0:
break
time.sleep(2)
count += 1
self.assertGreater(len(body['elements']), 0, "Alarm is not created.")
# List alarm using alarm def id
body = self.monitoring_client.get_alarms_by_def_id(alarm_def_id)
self.assertEqual('200', body.response['status'])
alarm_id = body['elements'][0]['id']
# Update specific alarm
body = self.monitoring_client.update_alarm(alarm_id, state="OK", lifecycle_state="OPEN", link="http://pagerduty.com/")
self.assertEqual('200', body.response['status'])
# Get alarms state history
current_time = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')
current_time = current_time.replace(' ', 'T') + 'Z'
body = self.monitoring_client.get_alarms_state_history_by_dimensions_and_time(dimensions="service:monitoring", end_time=current_time)
self.assertEqual('200', body.response['status'])
# Delete alarm definition and verify if deleted
self.monitoring_client.delete_alarm_definition(alarm_def_id)
self.assertRaises(lib_exc.NotFound,
self.monitoring_client.get_alarm_definition, alarm_def_id)
# Delete notification
self.monitoring_client.delete_notification(notification_id)
@test.attr(type="gate")
def test_alarm_history_state_by_alarm_id(self):
# create alarm definition with notification method
count = 0
notification_name = data_utils.rand_name('notification-')
notification_type = 'EMAIL'
u_address = 'root@localhost'
body = self.monitoring_client.create_notification(name=notification_name, type=notification_type, address=u_address)
self.assertEqual(notification_name, body['name'])
notification_id = body['id']
# Create an alarm definition
alarm_def_name = data_utils.rand_name('monitoring_alarm_definition')
body = self.monitoring_client.create_alarm_definition(name=alarm_def_name,
expression="cpu.idle_perc > 0",
alarm_actions = notification_id,
ok_actions = notification_id,
severity="LOW")
self.assertEqual(alarm_def_name, body['name'])
alarm_def_id = body['id']
self.assertEqual("cpu.idle_perc > 0", body['expression'])
# List alarms based on alarm_definition_id
while count < 60:
body = self.monitoring_client.list_alarms(alarm_definition_id=alarm_def_id)
if len(body['elements']) > 0:
break
time.sleep(2)
count += 1
self.assertGreater(len(body['elements']), 0, "Alarm state is not created.")
# List alarm using alarm def id
body = self.monitoring_client.get_alarms_by_def_id(alarm_def_id)
self.assertEqual('200', body.response['status'])
alarm_id = body['elements'][0]['id']
# Update specific alarm
body = self.monitoring_client.update_alarm(alarm_id, state="OK", lifecycle_state="OPEN", link="http://pagerduty.com/")
self.assertEqual('200', body.response['status'])
# List alarm state history based on alarm_definition_id
while count < 60:
body = self.monitoring_client.get_alarm_state_history_by_alarm_id(alarm_id)
if len(body['elements']) > 0:
break
time.sleep(2)
count += 1
self.assertGreater(len(body['elements']), 0, "Alarm state is not updated.")
# Get alarms state history
self.assertEqual('200', body.response['status'])
self.assertTrue('old_state' in body['elements'][0].keys(), body['elements'][0].keys())
self.assertTrue('new_state' in body['elements'][0].keys(), body['elements'][0].keys())
# Delete alarm definition and verify if deleted
self.monitoring_client.delete_alarm_definition(alarm_def_id)
self.assertRaises(lib_exc.NotFound,
self.monitoring_client.get_alarm_definition, alarm_def_id)
# Delete notification
self.monitoring_client.delete_notification(notification_id)
@classmethod
def resource_cleanup(cls):
super(MonitoringAlarmingAPITestJSON, cls).resource_cleanup()
| apache-2.0 | 9,134,014,418,691,864,000 | 45.355422 | 141 | 0.591371 | false |
tensorflow/datasets | tensorflow_datasets/structured/dart/dart.py | 1 | 4869 | # coding=utf-8
# Copyright 2021 The TensorFlow Datasets Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""dart dataset."""
import json
import os
import tensorflow.compat.v2 as tf
import tensorflow_datasets.public_api as tfds
_CITATION = """
@article{radev2020dart,
title={DART: Open-Domain Structured Data Record to Text Generation},
author={Dragomir Radev and Rui Zhang and Amrit Rau and Abhinand Sivaprasad and Chiachun Hsieh and Nazneen Fatema Rajani and Xiangru Tang and Aadit Vyas and Neha Verma and Pranav Krishna and Yangxiaokang Liu and Nadia Irwanto and Jessica Pan and Faiaz Rahman and Ahmad Zaidi and Murori Mutuma and Yasin Tarabar and Ankit Gupta and Tao Yu and Yi Chern Tan and Xi Victoria Lin and Caiming Xiong and Richard Socher},
journal={arXiv preprint arXiv:2007.02871},
year={2020}
"""
_DESCRIPTION = """
DART (DAta Record to Text generation) contains RDF entity-relation annotated
with sentence descriptions that cover all facts in the triple set. DART was
constructed using existing datasets such as: WikiTableQuestions, WikiSQL, WebNLG
and Cleaned E2E. The tables from WikiTableQuestions and WikiSQL were transformed
to subject-predicate-object triples, and its text annotations were mainly
collected from MTurk. The meaningful representations in E2E were also
transformed to triples and its descriptions were used, some that couldn't be
transformed were dropped.
The dataset splits of E2E and WebNLG are kept, and for the WikiTableQuestions
and WikiSQL the Jaccard similarity is used to keep similar tables in the same
set (train/dev/tes).
This dataset is constructed following a standarized table format.
"""
_URL = 'https://github.com/Yale-LILY/dart/archive/master.zip'
class Dart(tfds.core.GeneratorBasedBuilder):
"""DAta Record to Text Generation."""
VERSION = tfds.core.Version('0.1.0')
def _info(self):
return tfds.core.DatasetInfo(
builder=self,
# This is the description that will appear on the datasets page.
description=_DESCRIPTION,
# tfds.features.FeatureConnectors
features=tfds.features.FeaturesDict({
'input_text': {
'table': # Each row will be one triple fact.
tfds.features.Sequence({
# we'll only have subject/predicate/object headers
'column_header': tf.string,
'row_number': tf.int16,
'content': tf.string,
}),
},
'target_text': tf.string,
}),
supervised_keys=('input_text', 'target_text'),
# Homepage of the dataset for documentation
homepage='https://github.com/Yale-LILY/dart',
citation=_CITATION,
)
def _split_generators(self, dl_manager):
"""Returns SplitGenerators."""
extracted_path = os.path.join(
dl_manager.download_and_extract(_URL), 'dart-master', 'data', 'v1.1.1')
return {
tfds.Split.TRAIN:
self._generate_examples(
json_file=os.path.join(extracted_path,
'dart-v1.1.1-full-train.json')),
tfds.Split.VALIDATION:
self._generate_examples(
json_file=os.path.join(extracted_path,
'dart-v1.1.1-full-dev.json')),
tfds.Split.TEST:
self._generate_examples(
json_file=os.path.join(extracted_path,
'dart-v1.1.1-full-test.json')),
}
def _generate_examples(self, json_file):
"""Yields examples."""
with tf.io.gfile.GFile(json_file) as f:
data = json.load(f)
for entry_count, entry in enumerate(data):
table = []
for i, triple_set in enumerate(entry['tripleset']):
for header, content in zip(['subject', 'predicate', 'object'],
triple_set):
table.append({
'column_header': header,
'row_number': i,
'content': content,
})
for annotation_count, annotation in enumerate(entry['annotations']):
yield '{}_{}'.format(entry_count, annotation_count), {
'input_text': {
'table': table,
},
'target_text': annotation['text']
}
| apache-2.0 | 2,792,275,343,321,724,000 | 39.575 | 414 | 0.636681 | false |
Mitali-Sodhi/CodeLingo | Dataset/python/multi_interface.py | 1 | 6077 | import re
import time
import sys
import os
import copy
PARAMS = {}
NAME_PREFIX = 'vm_'
METRICS = {
'time' : 0,
'data' : {}
}
LAST_METRICS = copy.deepcopy(METRICS)
METRICS_CACHE_MAX = 5
INTERFACES = []
descriptors = []
stats_tab = {
"rx_bytes" : 0,
"rx_pkts" : 1,
"rx_errs" : 2,
"rx_drops" : 3,
"tx_bytes" : 8,
"tx_pkts" : 9,
"tx_errs" : 10,
"tx_drops" : 11,
}
# Where to get the stats from
net_stats_file = "/proc/net/dev"
def create_desc(skel, prop):
d = skel.copy()
for k,v in prop.iteritems():
d[k] = v
return d
def metric_init(params):
global descriptors
global INTERFACES
# INTERFACES = params.get('interfaces')
watch_interfaces = params.get('interfaces')
excluded_interfaces = params.get('excluded_interfaces')
get_interfaces(watch_interfaces,excluded_interfaces)
# print INTERFACES
time_max = 60
Desc_Skel = {
'name' : 'XXX',
'call_back' : get_delta,
'time_max' : 60,
'value_type' : 'float',
'format' : '%.4f',
'units' : '/s',
'slope' : 'both', # zero|positive|negative|both
'description' : 'XXX',
'groups' : 'network',
}
for dev in INTERFACES:
descriptors.append(create_desc(Desc_Skel, {
"name" : "rx_bytes_" + dev,
"units" : "bytes/sec",
"description" : "received bytes per sec",
}))
descriptors.append(create_desc(Desc_Skel, {
"name" : "rx_pkts_" + dev,
"units" : "pkts/sec",
"description" : "received packets per sec",
}))
descriptors.append(create_desc(Desc_Skel, {
"name" : "rx_errs_" + dev,
"units" : "pkts/sec",
"description" : "received error packets per sec",
}))
descriptors.append(create_desc(Desc_Skel, {
"name" : "rx_drops_" + dev,
"units" : "pkts/sec",
"description" : "receive packets dropped per sec",
}))
descriptors.append(create_desc(Desc_Skel, {
"name" : "tx_bytes_" + dev,
"units" : "bytes/sec",
"description" : "transmitted bytes per sec",
}))
descriptors.append(create_desc(Desc_Skel, {
"name" : "tx_pkts_" + dev,
"units" : "pkts/sec",
"description" : "transmitted packets per sec",
}))
descriptors.append(create_desc(Desc_Skel, {
"name" : "tx_errs_" + dev,
"units" : "pkts/sec",
"description" : "transmitted error packets per sec",
}))
descriptors.append(create_desc(Desc_Skel, {
"name" : "tx_drops_" + dev,
"units" : "pkts/sec",
"description" : "transmitted dropped packets per sec",
}))
return descriptors
def metric_cleanup():
'''Clean up the metric module.'''
pass
def get_interfaces(watch_interfaces, excluded_interfaces):
global INTERFACES
if_excluded = 0
# check if particular interfaces have been specifieid. Watch only those
if watch_interfaces != "":
INTERFACES = watch_interfaces.split(" ")
else:
if excluded_interfaces != "":
excluded_if_list = excluded_interfaces.split(" ")
f = open(net_stats_file, "r")
for line in f:
# Find only lines with :
if re.search(":", line):
a = line.split(":")
dev_name = a[0].lstrip()
# Determine if interface is excluded by name or regex
for ex in excluded_if_list:
if re.match(ex,dev_name):
if_excluded = 1
if not if_excluded:
INTERFACES.append(dev_name)
if_excluded = 0
return 0
def get_metrics():
"""Return all metrics"""
global METRICS, LAST_METRICS
if (time.time() - METRICS['time']) > METRICS_CACHE_MAX:
try:
file = open(net_stats_file, 'r')
except IOError:
return 0
# convert to dict
metrics = {}
for line in file:
if re.search(":", line):
a = line.split(":")
dev_name = a[0].lstrip()
metrics[dev_name] = re.split("\s+", a[1].lstrip())
# update cache
LAST_METRICS = copy.deepcopy(METRICS)
METRICS = {
'time': time.time(),
'data': metrics
}
return [METRICS, LAST_METRICS]
def get_delta(name):
"""Return change over time for the requested metric"""
# get metrics
[curr_metrics, last_metrics] = get_metrics()
# Names will be in the format of tx/rx underscore metric_name underscore interface
# e.g. tx_bytes_eth0
parts = name.split("_")
iface = parts[2]
name = parts[0] + "_" + parts[1]
index = stats_tab[name]
try:
delta = (float(curr_metrics['data'][iface][index]) - float(last_metrics['data'][iface][index])) /(curr_metrics['time'] - last_metrics['time'])
if delta < 0:
print name + " is less 0"
delta = 0
except KeyError:
delta = 0.0
return delta
if __name__ == '__main__':
try:
params = {
"interfaces": "",
"excluded_interfaces": "dummy",
"debug" : True,
}
metric_init(params)
while True:
for d in descriptors:
v = d['call_back'](d['name'])
print ('value for %s is '+d['format']) % (d['name'], v)
time.sleep(5)
except StandardError:
print sys.exc_info()[0]
os._exit(1)
| mit | 4,746,327,573,861,715,000 | 27.530516 | 148 | 0.481981 | false |
Beyond-Imagination/BlubBlub | RaspberryPI/django-env/bin/miniterm.py | 1 | 35109 | #!/home/pi/Django/bin/python3
#
# Very simple serial terminal
#
# This file is part of pySerial. https://github.com/pyserial/pyserial
# (C)2002-2015 Chris Liechti <[email protected]>
#
# SPDX-License-Identifier: BSD-3-Clause
import codecs
import os
import sys
import threading
import serial
from serial.tools.list_ports import comports
from serial.tools import hexlify_codec
# pylint: disable=wrong-import-order,wrong-import-position
codecs.register(lambda c: hexlify_codec.getregentry() if c == 'hexlify' else None)
try:
raw_input
except NameError:
# pylint: disable=redefined-builtin,invalid-name
raw_input = input # in python3 it's "raw"
unichr = chr
def key_description(character):
"""generate a readable description for a key"""
ascii_code = ord(character)
if ascii_code < 32:
return 'Ctrl+{:c}'.format(ord('@') + ascii_code)
else:
return repr(character)
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
class ConsoleBase(object):
"""OS abstraction for console (input/output codec, no echo)"""
def __init__(self):
if sys.version_info >= (3, 0):
self.byte_output = sys.stdout.buffer
else:
self.byte_output = sys.stdout
self.output = sys.stdout
def setup(self):
"""Set console to read single characters, no echo"""
def cleanup(self):
"""Restore default console settings"""
def getkey(self):
"""Read a single key from the console"""
return None
def write_bytes(self, byte_string):
"""Write bytes (already encoded)"""
self.byte_output.write(byte_string)
self.byte_output.flush()
def write(self, text):
"""Write string"""
self.output.write(text)
self.output.flush()
def cancel(self):
"""Cancel getkey operation"""
# - - - - - - - - - - - - - - - - - - - - - - - -
# context manager:
# switch terminal temporary to normal mode (e.g. to get user input)
def __enter__(self):
self.cleanup()
return self
def __exit__(self, *args, **kwargs):
self.setup()
if os.name == 'nt': # noqa
import msvcrt
import ctypes
class Out(object):
"""file-like wrapper that uses os.write"""
def __init__(self, fd):
self.fd = fd
def flush(self):
pass
def write(self, s):
os.write(self.fd, s)
class Console(ConsoleBase):
def __init__(self):
super(Console, self).__init__()
self._saved_ocp = ctypes.windll.kernel32.GetConsoleOutputCP()
self._saved_icp = ctypes.windll.kernel32.GetConsoleCP()
ctypes.windll.kernel32.SetConsoleOutputCP(65001)
ctypes.windll.kernel32.SetConsoleCP(65001)
self.output = codecs.getwriter('UTF-8')(Out(sys.stdout.fileno()), 'replace')
# the change of the code page is not propagated to Python, manually fix it
sys.stderr = codecs.getwriter('UTF-8')(Out(sys.stderr.fileno()), 'replace')
sys.stdout = self.output
self.output.encoding = 'UTF-8' # needed for input
def __del__(self):
ctypes.windll.kernel32.SetConsoleOutputCP(self._saved_ocp)
ctypes.windll.kernel32.SetConsoleCP(self._saved_icp)
def getkey(self):
while True:
z = msvcrt.getwch()
if z == unichr(13):
return unichr(10)
elif z in (unichr(0), unichr(0x0e)): # functions keys, ignore
msvcrt.getwch()
else:
return z
def cancel(self):
# CancelIo, CancelSynchronousIo do not seem to work when using
# getwch, so instead, send a key to the window with the console
hwnd = ctypes.windll.kernel32.GetConsoleWindow()
ctypes.windll.user32.PostMessageA(hwnd, 0x100, 0x0d, 0)
elif os.name == 'posix':
import atexit
import termios
import fcntl
class Console(ConsoleBase):
def __init__(self):
super(Console, self).__init__()
self.fd = sys.stdin.fileno()
self.old = termios.tcgetattr(self.fd)
atexit.register(self.cleanup)
if sys.version_info < (3, 0):
self.enc_stdin = codecs.getreader(sys.stdin.encoding)(sys.stdin)
else:
self.enc_stdin = sys.stdin
def setup(self):
new = termios.tcgetattr(self.fd)
new[3] = new[3] & ~termios.ICANON & ~termios.ECHO & ~termios.ISIG
new[6][termios.VMIN] = 1
new[6][termios.VTIME] = 0
termios.tcsetattr(self.fd, termios.TCSANOW, new)
def getkey(self):
c = self.enc_stdin.read(1)
if c == unichr(0x7f):
c = unichr(8) # map the BS key (which yields DEL) to backspace
return c
def cancel(self):
fcntl.ioctl(self.fd, termios.TIOCSTI, b'\0')
def cleanup(self):
termios.tcsetattr(self.fd, termios.TCSAFLUSH, self.old)
else:
raise NotImplementedError(
'Sorry no implementation for your platform ({}) available.'.format(sys.platform))
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
class Transform(object):
"""do-nothing: forward all data unchanged"""
def rx(self, text):
"""text received from serial port"""
return text
def tx(self, text):
"""text to be sent to serial port"""
return text
def echo(self, text):
"""text to be sent but displayed on console"""
return text
class CRLF(Transform):
"""ENTER sends CR+LF"""
def tx(self, text):
return text.replace('\n', '\r\n')
class CR(Transform):
"""ENTER sends CR"""
def rx(self, text):
return text.replace('\r', '\n')
def tx(self, text):
return text.replace('\n', '\r')
class LF(Transform):
"""ENTER sends LF"""
class NoTerminal(Transform):
"""remove typical terminal control codes from input"""
REPLACEMENT_MAP = dict((x, 0x2400 + x) for x in range(32) if unichr(x) not in '\r\n\b\t')
REPLACEMENT_MAP.update(
{
0x7F: 0x2421, # DEL
0x9B: 0x2425, # CSI
})
def rx(self, text):
return text.translate(self.REPLACEMENT_MAP)
echo = rx
class NoControls(NoTerminal):
"""Remove all control codes, incl. CR+LF"""
REPLACEMENT_MAP = dict((x, 0x2400 + x) for x in range(32))
REPLACEMENT_MAP.update(
{
0x20: 0x2423, # visual space
0x7F: 0x2421, # DEL
0x9B: 0x2425, # CSI
})
class Printable(Transform):
"""Show decimal code for all non-ASCII characters and replace most control codes"""
def rx(self, text):
r = []
for c in text:
if ' ' <= c < '\x7f' or c in '\r\n\b\t':
r.append(c)
elif c < ' ':
r.append(unichr(0x2400 + ord(c)))
else:
r.extend(unichr(0x2080 + ord(d) - 48) for d in '{:d}'.format(ord(c)))
r.append(' ')
return ''.join(r)
echo = rx
class Colorize(Transform):
"""Apply different colors for received and echo"""
def __init__(self):
# XXX make it configurable, use colorama?
self.input_color = '\x1b[37m'
self.echo_color = '\x1b[31m'
def rx(self, text):
return self.input_color + text
def echo(self, text):
return self.echo_color + text
class DebugIO(Transform):
"""Print what is sent and received"""
def rx(self, text):
sys.stderr.write(' [RX:{}] '.format(repr(text)))
sys.stderr.flush()
return text
def tx(self, text):
sys.stderr.write(' [TX:{}] '.format(repr(text)))
sys.stderr.flush()
return text
# other ideas:
# - add date/time for each newline
# - insert newline after: a) timeout b) packet end character
EOL_TRANSFORMATIONS = {
'crlf': CRLF,
'cr': CR,
'lf': LF,
}
TRANSFORMATIONS = {
'direct': Transform, # no transformation
'default': NoTerminal,
'nocontrol': NoControls,
'printable': Printable,
'colorize': Colorize,
'debug': DebugIO,
}
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
def ask_for_port():
"""\
Show a list of ports and ask the user for a choice. To make selection
easier on systems with long device names, also allow the input of an
index.
"""
sys.stderr.write('\n--- Available ports:\n')
ports = []
for n, (port, desc, hwid) in enumerate(sorted(comports()), 1):
sys.stderr.write('--- {:2}: {:20} {!r}\n'.format(n, port, desc))
ports.append(port)
while True:
port = raw_input('--- Enter port index or full name: ')
try:
index = int(port) - 1
if not 0 <= index < len(ports):
sys.stderr.write('--- Invalid index!\n')
continue
except ValueError:
pass
else:
port = ports[index]
return port
class Miniterm(object):
"""\
Terminal application. Copy data from serial port to console and vice versa.
Handle special keys from the console to show menu etc.
"""
def __init__(self, serial_instance, echo=False, eol='crlf', filters=()):
self.console = Console()
self.serial = serial_instance
self.echo = echo
self.raw = False
self.input_encoding = 'UTF-8'
self.output_encoding = 'UTF-8'
self.eol = eol
self.filters = filters
self.update_transformations()
self.exit_character = 0x1d # GS/CTRL+]
self.menu_character = 0x14 # Menu: CTRL+T
self.alive = None
self._reader_alive = None
self.receiver_thread = None
self.rx_decoder = None
self.tx_decoder = None
def _start_reader(self):
"""Start reader thread"""
self._reader_alive = True
# start serial->console thread
self.receiver_thread = threading.Thread(target=self.reader, name='rx')
self.receiver_thread.daemon = True
self.receiver_thread.start()
def _stop_reader(self):
"""Stop reader thread only, wait for clean exit of thread"""
self._reader_alive = False
if hasattr(self.serial, 'cancel_read'):
self.serial.cancel_read()
self.receiver_thread.join()
def start(self):
"""start worker threads"""
self.alive = True
self._start_reader()
# enter console->serial loop
self.transmitter_thread = threading.Thread(target=self.writer, name='tx')
self.transmitter_thread.daemon = True
self.transmitter_thread.start()
self.console.setup()
def stop(self):
"""set flag to stop worker threads"""
self.alive = False
def join(self, transmit_only=False):
"""wait for worker threads to terminate"""
self.transmitter_thread.join()
if not transmit_only:
if hasattr(self.serial, 'cancel_read'):
self.serial.cancel_read()
self.receiver_thread.join()
def close(self):
self.serial.close()
def update_transformations(self):
"""take list of transformation classes and instantiate them for rx and tx"""
transformations = [EOL_TRANSFORMATIONS[self.eol]] + [TRANSFORMATIONS[f]
for f in self.filters]
self.tx_transformations = [t() for t in transformations]
self.rx_transformations = list(reversed(self.tx_transformations))
def set_rx_encoding(self, encoding, errors='replace'):
"""set encoding for received data"""
self.input_encoding = encoding
self.rx_decoder = codecs.getincrementaldecoder(encoding)(errors)
def set_tx_encoding(self, encoding, errors='replace'):
"""set encoding for transmitted data"""
self.output_encoding = encoding
self.tx_encoder = codecs.getincrementalencoder(encoding)(errors)
def dump_port_settings(self):
"""Write current settings to sys.stderr"""
sys.stderr.write("\n--- Settings: {p.name} {p.baudrate},{p.bytesize},{p.parity},{p.stopbits}\n".format(
p=self.serial))
sys.stderr.write('--- RTS: {:8} DTR: {:8} BREAK: {:8}\n'.format(
('active' if self.serial.rts else 'inactive'),
('active' if self.serial.dtr else 'inactive'),
('active' if self.serial.break_condition else 'inactive')))
try:
sys.stderr.write('--- CTS: {:8} DSR: {:8} RI: {:8} CD: {:8}\n'.format(
('active' if self.serial.cts else 'inactive'),
('active' if self.serial.dsr else 'inactive'),
('active' if self.serial.ri else 'inactive'),
('active' if self.serial.cd else 'inactive')))
except serial.SerialException:
# on RFC 2217 ports, it can happen if no modem state notification was
# yet received. ignore this error.
pass
sys.stderr.write('--- software flow control: {}\n'.format('active' if self.serial.xonxoff else 'inactive'))
sys.stderr.write('--- hardware flow control: {}\n'.format('active' if self.serial.rtscts else 'inactive'))
sys.stderr.write('--- serial input encoding: {}\n'.format(self.input_encoding))
sys.stderr.write('--- serial output encoding: {}\n'.format(self.output_encoding))
sys.stderr.write('--- EOL: {}\n'.format(self.eol.upper()))
sys.stderr.write('--- filters: {}\n'.format(' '.join(self.filters)))
def reader(self):
"""loop and copy serial->console"""
try:
while self.alive and self._reader_alive:
# read all that is there or wait for one byte
data = self.serial.read(self.serial.in_waiting or 1)
if data:
if self.raw:
self.console.write_bytes(data)
else:
text = self.rx_decoder.decode(data)
for transformation in self.rx_transformations:
text = transformation.rx(text)
self.console.write(text)
except serial.SerialException:
self.alive = False
self.console.cancel()
raise # XXX handle instead of re-raise?
def writer(self):
"""\
Loop and copy console->serial until self.exit_character character is
found. When self.menu_character is found, interpret the next key
locally.
"""
menu_active = False
try:
while self.alive:
try:
c = self.console.getkey()
except KeyboardInterrupt:
c = '\x03'
if not self.alive:
break
if menu_active:
self.handle_menu_key(c)
menu_active = False
elif c == self.menu_character:
menu_active = True # next char will be for menu
elif c == self.exit_character:
self.stop() # exit app
break
else:
#~ if self.raw:
text = c
for transformation in self.tx_transformations:
text = transformation.tx(text)
self.serial.write(self.tx_encoder.encode(text))
if self.echo:
echo_text = c
for transformation in self.tx_transformations:
echo_text = transformation.echo(echo_text)
self.console.write(echo_text)
except:
self.alive = False
raise
def handle_menu_key(self, c):
"""Implement a simple menu / settings"""
if c == self.menu_character or c == self.exit_character:
# Menu/exit character again -> send itself
self.serial.write(self.tx_encoder.encode(c))
if self.echo:
self.console.write(c)
elif c == '\x15': # CTRL+U -> upload file
self.upload_file()
elif c in '\x08hH?': # CTRL+H, h, H, ? -> Show help
sys.stderr.write(self.get_help_text())
elif c == '\x12': # CTRL+R -> Toggle RTS
self.serial.rts = not self.serial.rts
sys.stderr.write('--- RTS {} ---\n'.format('active' if self.serial.rts else 'inactive'))
elif c == '\x04': # CTRL+D -> Toggle DTR
self.serial.dtr = not self.serial.dtr
sys.stderr.write('--- DTR {} ---\n'.format('active' if self.serial.dtr else 'inactive'))
elif c == '\x02': # CTRL+B -> toggle BREAK condition
self.serial.break_condition = not self.serial.break_condition
sys.stderr.write('--- BREAK {} ---\n'.format('active' if self.serial.break_condition else 'inactive'))
elif c == '\x05': # CTRL+E -> toggle local echo
self.echo = not self.echo
sys.stderr.write('--- local echo {} ---\n'.format('active' if self.echo else 'inactive'))
elif c == '\x06': # CTRL+F -> edit filters
self.change_filter()
elif c == '\x0c': # CTRL+L -> EOL mode
modes = list(EOL_TRANSFORMATIONS) # keys
eol = modes.index(self.eol) + 1
if eol >= len(modes):
eol = 0
self.eol = modes[eol]
sys.stderr.write('--- EOL: {} ---\n'.format(self.eol.upper()))
self.update_transformations()
elif c == '\x01': # CTRL+A -> set encoding
self.change_encoding()
elif c == '\x09': # CTRL+I -> info
self.dump_port_settings()
#~ elif c == '\x01': # CTRL+A -> cycle escape mode
#~ elif c == '\x0c': # CTRL+L -> cycle linefeed mode
elif c in 'pP': # P -> change port
self.change_port()
elif c in 'sS': # S -> suspend / open port temporarily
self.suspend_port()
elif c in 'bB': # B -> change baudrate
self.change_baudrate()
elif c == '8': # 8 -> change to 8 bits
self.serial.bytesize = serial.EIGHTBITS
self.dump_port_settings()
elif c == '7': # 7 -> change to 8 bits
self.serial.bytesize = serial.SEVENBITS
self.dump_port_settings()
elif c in 'eE': # E -> change to even parity
self.serial.parity = serial.PARITY_EVEN
self.dump_port_settings()
elif c in 'oO': # O -> change to odd parity
self.serial.parity = serial.PARITY_ODD
self.dump_port_settings()
elif c in 'mM': # M -> change to mark parity
self.serial.parity = serial.PARITY_MARK
self.dump_port_settings()
elif c in 'sS': # S -> change to space parity
self.serial.parity = serial.PARITY_SPACE
self.dump_port_settings()
elif c in 'nN': # N -> change to no parity
self.serial.parity = serial.PARITY_NONE
self.dump_port_settings()
elif c == '1': # 1 -> change to 1 stop bits
self.serial.stopbits = serial.STOPBITS_ONE
self.dump_port_settings()
elif c == '2': # 2 -> change to 2 stop bits
self.serial.stopbits = serial.STOPBITS_TWO
self.dump_port_settings()
elif c == '3': # 3 -> change to 1.5 stop bits
self.serial.stopbits = serial.STOPBITS_ONE_POINT_FIVE
self.dump_port_settings()
elif c in 'xX': # X -> change software flow control
self.serial.xonxoff = (c == 'X')
self.dump_port_settings()
elif c in 'rR': # R -> change hardware flow control
self.serial.rtscts = (c == 'R')
self.dump_port_settings()
else:
sys.stderr.write('--- unknown menu character {} --\n'.format(key_description(c)))
def upload_file(self):
"""Ask user for filenname and send its contents"""
sys.stderr.write('\n--- File to upload: ')
sys.stderr.flush()
with self.console:
filename = sys.stdin.readline().rstrip('\r\n')
if filename:
try:
with open(filename, 'rb') as f:
sys.stderr.write('--- Sending file {} ---\n'.format(filename))
while True:
block = f.read(1024)
if not block:
break
self.serial.write(block)
# Wait for output buffer to drain.
self.serial.flush()
sys.stderr.write('.') # Progress indicator.
sys.stderr.write('\n--- File {} sent ---\n'.format(filename))
except IOError as e:
sys.stderr.write('--- ERROR opening file {}: {} ---\n'.format(filename, e))
def change_filter(self):
"""change the i/o transformations"""
sys.stderr.write('\n--- Available Filters:\n')
sys.stderr.write('\n'.join(
'--- {:<10} = {.__doc__}'.format(k, v)
for k, v in sorted(TRANSFORMATIONS.items())))
sys.stderr.write('\n--- Enter new filter name(s) [{}]: '.format(' '.join(self.filters)))
with self.console:
new_filters = sys.stdin.readline().lower().split()
if new_filters:
for f in new_filters:
if f not in TRANSFORMATIONS:
sys.stderr.write('--- unknown filter: {}\n'.format(repr(f)))
break
else:
self.filters = new_filters
self.update_transformations()
sys.stderr.write('--- filters: {}\n'.format(' '.join(self.filters)))
def change_encoding(self):
"""change encoding on the serial port"""
sys.stderr.write('\n--- Enter new encoding name [{}]: '.format(self.input_encoding))
with self.console:
new_encoding = sys.stdin.readline().strip()
if new_encoding:
try:
codecs.lookup(new_encoding)
except LookupError:
sys.stderr.write('--- invalid encoding name: {}\n'.format(new_encoding))
else:
self.set_rx_encoding(new_encoding)
self.set_tx_encoding(new_encoding)
sys.stderr.write('--- serial input encoding: {}\n'.format(self.input_encoding))
sys.stderr.write('--- serial output encoding: {}\n'.format(self.output_encoding))
def change_baudrate(self):
"""change the baudrate"""
sys.stderr.write('\n--- Baudrate: ')
sys.stderr.flush()
with self.console:
backup = self.serial.baudrate
try:
self.serial.baudrate = int(sys.stdin.readline().strip())
except ValueError as e:
sys.stderr.write('--- ERROR setting baudrate: {} ---\n'.format(e))
self.serial.baudrate = backup
else:
self.dump_port_settings()
def change_port(self):
"""Have a conversation with the user to change the serial port"""
with self.console:
try:
port = ask_for_port()
except KeyboardInterrupt:
port = None
if port and port != self.serial.port:
# reader thread needs to be shut down
self._stop_reader()
# save settings
settings = self.serial.getSettingsDict()
try:
new_serial = serial.serial_for_url(port, do_not_open=True)
# restore settings and open
new_serial.applySettingsDict(settings)
new_serial.rts = self.serial.rts
new_serial.dtr = self.serial.dtr
new_serial.open()
new_serial.break_condition = self.serial.break_condition
except Exception as e:
sys.stderr.write('--- ERROR opening new port: {} ---\n'.format(e))
new_serial.close()
else:
self.serial.close()
self.serial = new_serial
sys.stderr.write('--- Port changed to: {} ---\n'.format(self.serial.port))
# and restart the reader thread
self._start_reader()
def suspend_port(self):
"""\
open port temporarily, allow reconnect, exit and port change to get
out of the loop
"""
# reader thread needs to be shut down
self._stop_reader()
self.serial.close()
sys.stderr.write('\n--- Port closed: {} ---\n'.format(self.serial.port))
do_change_port = False
while not self.serial.is_open:
sys.stderr.write('--- Quit: {exit} | p: port change | any other key to reconnect ---\n'.format(
exit=key_description(self.exit_character)))
k = self.console.getkey()
if k == self.exit_character:
self.stop() # exit app
break
elif k in 'pP':
do_change_port = True
break
try:
self.serial.open()
except Exception as e:
sys.stderr.write('--- ERROR opening port: {} ---\n'.format(e))
if do_change_port:
self.change_port()
else:
# and restart the reader thread
self._start_reader()
sys.stderr.write('--- Port opened: {} ---\n'.format(self.serial.port))
def get_help_text(self):
"""return the help text"""
# help text, starts with blank line!
return """
--- pySerial ({version}) - miniterm - help
---
--- {exit:8} Exit program
--- {menu:8} Menu escape key, followed by:
--- Menu keys:
--- {menu:7} Send the menu character itself to remote
--- {exit:7} Send the exit character itself to remote
--- {info:7} Show info
--- {upload:7} Upload file (prompt will be shown)
--- {repr:7} encoding
--- {filter:7} edit filters
--- Toggles:
--- {rts:7} RTS {dtr:7} DTR {brk:7} BREAK
--- {echo:7} echo {eol:7} EOL
---
--- Port settings ({menu} followed by the following):
--- p change port
--- 7 8 set data bits
--- N E O S M change parity (None, Even, Odd, Space, Mark)
--- 1 2 3 set stop bits (1, 2, 1.5)
--- b change baud rate
--- x X disable/enable software flow control
--- r R disable/enable hardware flow control
""".format(version=getattr(serial, 'VERSION', 'unknown version'),
exit=key_description(self.exit_character),
menu=key_description(self.menu_character),
rts=key_description('\x12'),
dtr=key_description('\x04'),
brk=key_description('\x02'),
echo=key_description('\x05'),
info=key_description('\x09'),
upload=key_description('\x15'),
repr=key_description('\x01'),
filter=key_description('\x06'),
eol=key_description('\x0c'))
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
# default args can be used to override when calling main() from an other script
# e.g to create a miniterm-my-device.py
def main(default_port=None, default_baudrate=9600, default_rts=None, default_dtr=None):
"""Command line tool, entry point"""
import argparse
parser = argparse.ArgumentParser(
description="Miniterm - A simple terminal program for the serial port.")
parser.add_argument(
"port",
nargs='?',
help="serial port name ('-' to show port list)",
default=default_port)
parser.add_argument(
"baudrate",
nargs='?',
type=int,
help="set baud rate, default: %(default)s",
default=default_baudrate)
group = parser.add_argument_group("port settings")
group.add_argument(
"--parity",
choices=['N', 'E', 'O', 'S', 'M'],
type=lambda c: c.upper(),
help="set parity, one of {N E O S M}, default: N",
default='N')
group.add_argument(
"--rtscts",
action="store_true",
help="enable RTS/CTS flow control (default off)",
default=False)
group.add_argument(
"--xonxoff",
action="store_true",
help="enable software flow control (default off)",
default=False)
group.add_argument(
"--rts",
type=int,
help="set initial RTS line state (possible values: 0, 1)",
default=default_rts)
group.add_argument(
"--dtr",
type=int,
help="set initial DTR line state (possible values: 0, 1)",
default=default_dtr)
group.add_argument(
"--ask",
action="store_true",
help="ask again for port when open fails",
default=False)
group = parser.add_argument_group("data handling")
group.add_argument(
"-e", "--echo",
action="store_true",
help="enable local echo (default off)",
default=False)
group.add_argument(
"--encoding",
dest="serial_port_encoding",
metavar="CODEC",
help="set the encoding for the serial port (e.g. hexlify, Latin1, UTF-8), default: %(default)s",
default='UTF-8')
group.add_argument(
"-f", "--filter",
action="append",
metavar="NAME",
help="add text transformation",
default=[])
group.add_argument(
"--eol",
choices=['CR', 'LF', 'CRLF'],
type=lambda c: c.upper(),
help="end of line mode",
default='CRLF')
group.add_argument(
"--raw",
action="store_true",
help="Do no apply any encodings/transformations",
default=False)
group = parser.add_argument_group("hotkeys")
group.add_argument(
"--exit-char",
type=int,
metavar='NUM',
help="Unicode of special character that is used to exit the application, default: %(default)s",
default=0x1d) # GS/CTRL+]
group.add_argument(
"--menu-char",
type=int,
metavar='NUM',
help="Unicode code of special character that is used to control miniterm (menu), default: %(default)s",
default=0x14) # Menu: CTRL+T
group = parser.add_argument_group("diagnostics")
group.add_argument(
"-q", "--quiet",
action="store_true",
help="suppress non-error messages",
default=False)
group.add_argument(
"--develop",
action="store_true",
help="show Python traceback on error",
default=False)
args = parser.parse_args()
if args.menu_char == args.exit_char:
parser.error('--exit-char can not be the same as --menu-char')
if args.filter:
if 'help' in args.filter:
sys.stderr.write('Available filters:\n')
sys.stderr.write('\n'.join(
'{:<10} = {.__doc__}'.format(k, v)
for k, v in sorted(TRANSFORMATIONS.items())))
sys.stderr.write('\n')
sys.exit(1)
filters = args.filter
else:
filters = ['default']
while True:
# no port given on command line -> ask user now
if args.port is None or args.port == '-':
try:
args.port = ask_for_port()
except KeyboardInterrupt:
sys.stderr.write('\n')
parser.error('user aborted and port is not given')
else:
if not args.port:
parser.error('port is not given')
try:
serial_instance = serial.serial_for_url(
args.port,
args.baudrate,
parity=args.parity,
rtscts=args.rtscts,
xonxoff=args.xonxoff,
do_not_open=True)
if not hasattr(serial_instance, 'cancel_read'):
# enable timeout for alive flag polling if cancel_read is not available
serial_instance.timeout = 1
if args.dtr is not None:
if not args.quiet:
sys.stderr.write('--- forcing DTR {}\n'.format('active' if args.dtr else 'inactive'))
serial_instance.dtr = args.dtr
if args.rts is not None:
if not args.quiet:
sys.stderr.write('--- forcing RTS {}\n'.format('active' if args.rts else 'inactive'))
serial_instance.rts = args.rts
serial_instance.open()
except serial.SerialException as e:
sys.stderr.write('could not open port {}: {}\n'.format(repr(args.port), e))
if args.develop:
raise
if not args.ask:
sys.exit(1)
else:
args.port = '-'
else:
break
miniterm = Miniterm(
serial_instance,
echo=args.echo,
eol=args.eol.lower(),
filters=filters)
miniterm.exit_character = unichr(args.exit_char)
miniterm.menu_character = unichr(args.menu_char)
miniterm.raw = args.raw
miniterm.set_rx_encoding(args.serial_port_encoding)
miniterm.set_tx_encoding(args.serial_port_encoding)
if not args.quiet:
sys.stderr.write('--- Miniterm on {p.name} {p.baudrate},{p.bytesize},{p.parity},{p.stopbits} ---\n'.format(
p=miniterm.serial))
sys.stderr.write('--- Quit: {} | Menu: {} | Help: {} followed by {} ---\n'.format(
key_description(miniterm.exit_character),
key_description(miniterm.menu_character),
key_description(miniterm.menu_character),
key_description('\x08')))
miniterm.start()
try:
miniterm.join(True)
except KeyboardInterrupt:
pass
if not args.quiet:
sys.stderr.write("\n--- exit ---\n")
miniterm.join()
miniterm.close()
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
if __name__ == '__main__':
main()
| gpl-3.0 | 9,167,541,231,525,012,000 | 34.972336 | 116 | 0.527415 | false |
shivaenigma/pycoin | tests/bip32_test.py | 1 | 15806 | import unittest
from pycoin.key.BIP32Node import BIP32Node
from pycoin.serialize import h2b
class Bip0032TestCase(unittest.TestCase):
def test_vector_1(self):
master = BIP32Node.from_master_secret(h2b("000102030405060708090a0b0c0d0e0f"))
self.assertEqual(
master.wallet_key(as_private=True),
"xprv9s21ZrQH143K3QTDL4LXw2F7HEK3wJUD2nW2nRk4stbPy6cq3jPPqjiChkVvvNKmPG"
"JxWUtg6LnF5kejMRNNU3TGtRBeJgk33yuGBxrMPHi")
self.assertEqual(master.bitcoin_address(), "15mKKb2eos1hWa6tisdPwwDC1a5J1y9nma")
self.assertEqual(master.wif(), "L52XzL2cMkHxqxBXRyEpnPQZGUs3uKiL3R11XbAdHigRzDozKZeW")
self.assertEqual(
master.wallet_key(),
"xpub661MyMwAqRbcFtXgS5sYJABqqG9YLmC4Q1Rdap9gSE8NqtwybGhePY2gZ29ESFjqJo"
"Cu1Rupje8YtGqsefD265TMg7usUDFdp6W1EGMcet8")
m0p = master.subkey(is_hardened=True)
self.assertEqual(
m0p.wallet_key(),
"xpub68Gmy5EdvgibQVfPdqkBBCHxA5htiqg55crXYuXoQRKfDBFA1WEjWgP6LHhwBZeNK1"
"VTsfTFUHCdrfp1bgwQ9xv5ski8PX9rL2dZXvgGDnw")
self.assertEqual(
m0p.wallet_key(as_private=True),
"xprv9uHRZZhk6KAJC1avXpDAp4MDc3sQKNxDiPvvkX8Br5ngLNv1TxvUxt4cV1rGL5hj6K"
"CesnDYUhd7oWgT11eZG7XnxHrnYeSvkzY7d2bhkJ7")
self.assertEqual(master.subkey_for_path("0p").wallet_key(), m0p.wallet_key())
pub_mp0 = master.subkey(is_hardened=True, as_private=False)
self.assertEqual(pub_mp0.wallet_key(), m0p.wallet_key())
self.assertEqual(master.subkey_for_path("0p.pub").wallet_key(), pub_mp0.wallet_key())
m0p1 = m0p.subkey(i=1)
self.assertEqual(
m0p1.wallet_key(),
"xpub6ASuArnXKPbfEwhqN6e3mwBcDTgzisQN1wXN9BJcM47sSikHjJf3UFHKkNAWbWMiGj"
"7Wf5uMash7SyYq527Hqck2AxYysAA7xmALppuCkwQ")
self.assertEqual(
m0p1.wallet_key(as_private=True),
"xprv9wTYmMFdV23N2TdNG573QoEsfRrWKQgWeibmLntzniatZvR9BmLnvSxqu53Kw1UmYP"
"xLgboyZQaXwTCg8MSY3H2EU4pWcQDnRnrVA1xe8fs")
self.assertEqual(master.subkey_for_path("0p/1").wallet_key(), m0p1.wallet_key())
pub_m0p1 = m0p.subkey(i=1, as_private=False)
self.assertEqual(pub_m0p1.wallet_key(), m0p1.wallet_key())
self.assertEqual(master.subkey_for_path("0p/1.pub").wallet_key(), pub_m0p1.wallet_key())
m0p1_1_2p = m0p1.subkey(i=2, is_hardened=True)
self.assertEqual(
m0p1_1_2p.wallet_key(),
"xpub6D4BDPcP2GT577Vvch3R8wDkScZWzQzMMUm3PWbmWvVJrZwQY4VUNgqFJPMM3No2dF"
"DFGTsxxpG5uJh7n7epu4trkrX7x7DogT5Uv6fcLW5")
self.assertEqual(
m0p1_1_2p.wallet_key(as_private=True),
"xprv9z4pot5VBttmtdRTWfWQmoH1taj2axGVzFqSb8C9xaxKymcFzXBDptWmT7FwuEzG3r"
"yjH4ktypQSAewRiNMjANTtpgP4mLTj34bhnZX7UiM")
self.assertEqual(master.subkey_for_path("0p/1/2p").wallet_key(), m0p1_1_2p.wallet_key())
pub_m0p1_1_2p = m0p1.subkey(i=2, as_private=False, is_hardened=True)
self.assertEqual(pub_m0p1_1_2p.wallet_key(), m0p1_1_2p.wallet_key())
self.assertEqual(master.subkey_for_path("0p/1/2p.pub").wallet_key(), pub_m0p1_1_2p.wallet_key())
m0p1_1_2p_2 = m0p1_1_2p.subkey(i=2)
self.assertEqual(
m0p1_1_2p_2.wallet_key(),
"xpub6FHa3pjLCk84BayeJxFW2SP4XRrFd1JYnxeLeU8EqN3vDfZmbqBqaGJAyiLjTAwm6Z"
"LRQUMv1ZACTj37sR62cfN7fe5JnJ7dh8zL4fiyLHV")
self.assertEqual(
m0p1_1_2p_2.wallet_key(as_private=True),
"xprvA2JDeKCSNNZky6uBCviVfJSKyQ1mDYahRjijr5idH2WwLsEd4Hsb2Tyh8RfQMuPh7f"
"7RtyzTtdrbdqqsunu5Mm3wDvUAKRHSC34sJ7in334")
self.assertEqual(master.subkey_for_path("0p/1/2p/2").wallet_key(), m0p1_1_2p_2.wallet_key())
pub_m0p1_1_2p_2 = m0p1_1_2p.subkey(i=2, as_private=False)
self.assertEqual(pub_m0p1_1_2p_2.wallet_key(), m0p1_1_2p_2.wallet_key())
self.assertEqual(master.subkey_for_path("0p/1/2p/2.pub").wallet_key(), pub_m0p1_1_2p_2.wallet_key())
m0p1_1_2p_2_1000000000 = m0p1_1_2p_2.subkey(i=1000000000)
self.assertEqual(
m0p1_1_2p_2_1000000000.wallet_key(),
"xpub6H1LXWLaKsWFhvm6RVpEL9P4KfRZSW7abD2ttkWP3SSQvnyA8FSVqNTEcYFgJS2UaF"
"cxupHiYkro49S8yGasTvXEYBVPamhGW6cFJodrTHy")
self.assertEqual(
m0p1_1_2p_2_1000000000.wallet_key(as_private=True),
"xprvA41z7zogVVwxVSgdKUHDy1SKmdb533PjDz7J6N6mV6uS3ze1ai8FHa8kmHScGpWmj4"
"WggLyQjgPie1rFSruoUihUZREPSL39UNdE3BBDu76")
self.assertEqual(master.subkey_for_path("0p/1/2p/2/1000000000").wallet_key(),
m0p1_1_2p_2_1000000000.wallet_key())
pub_m0p1_1_2p_2_1000000000 = m0p1_1_2p_2.subkey(i=1000000000, as_private=False)
self.assertEqual(pub_m0p1_1_2p_2_1000000000.wallet_key(), m0p1_1_2p_2_1000000000.wallet_key())
self.assertEqual(master.subkey_for_path("0p/1/2p/2/1000000000.pub").wallet_key(),
pub_m0p1_1_2p_2_1000000000.wallet_key())
def test_vector_2(self):
master = BIP32Node.from_master_secret(h2b(
"fffcf9f6f3f0edeae7e4e1dedbd8d5d2cfccc9c6c3c0bdbab7b4b1aeaba8a5a29f9c99"
"9693908d8a8784817e7b7875726f6c696663605d5a5754514e4b484542"))
self.assertEqual(
master.wallet_key(as_private=True),
"xprv9s21ZrQH143K31xYSDQpPDxsXRTUcvj2iNHm5NUtrGiGG5e2DtALGdso3pGz6ssrdK"
"4PFmM8NSpSBHNqPqm55Qn3LqFtT2emdEXVYsCzC2U")
self.assertEqual(
master.wallet_key(),
"xpub661MyMwAqRbcFW31YEwpkMuc5THy2PSt5bDMsktWQcFF8syAmRUapSCGu8ED9W6oDM"
"Sgv6Zz8idoc4a6mr8BDzTJY47LJhkJ8UB7WEGuduB")
m0 = master.subkey()
self.assertEqual(
m0.wallet_key(),
"xpub69H7F5d8KSRgmmdJg2KhpAK8SR3DjMwAdkxj3ZuxV27CprR9LgpeyGmXUbC6wb7ERf"
"vrnKZjXoUmmDznezpbZb7ap6r1D3tgFxHmwMkQTPH")
self.assertEqual(
m0.wallet_key(as_private=True),
"xprv9vHkqa6EV4sPZHYqZznhT2NPtPCjKuDKGY38FBWLvgaDx45zo9WQRUT3dKYnjwih2y"
"JD9mkrocEZXo1ex8G81dwSM1fwqWpWkeS3v86pgKt")
pub_m0 = master.subkey(as_private=False)
self.assertEqual(pub_m0.wallet_key(), m0.wallet_key())
m0_2147483647p = m0.subkey(i=2147483647, is_hardened=True)
self.assertEqual(
m0_2147483647p.wallet_key(),
"xpub6ASAVgeehLbnwdqV6UKMHVzgqAG8Gr6riv3Fxxpj8ksbH9ebxaEyBLZ85ySDhKiLDB"
"rQSARLq1uNRts8RuJiHjaDMBU4Zn9h8LZNnBC5y4a")
self.assertEqual(
m0_2147483647p.wallet_key(as_private=True),
"xprv9wSp6B7kry3Vj9m1zSnLvN3xH8RdsPP1Mh7fAaR7aRLcQMKTR2vidYEeEg2mUCTAwC"
"d6vnxVrcjfy2kRgVsFawNzmjuHc2YmYRmagcEPdU9")
pub_m0_2147483647p = m0.subkey(i=2147483647, is_hardened=True, as_private=False)
self.assertEqual(pub_m0_2147483647p.wallet_key(), m0_2147483647p.wallet_key())
m0_2147483647p_1 = m0_2147483647p.subkey(i=1)
self.assertEqual(
m0_2147483647p_1.wallet_key(),
"xpub6DF8uhdarytz3FWdA8TvFSvvAh8dP3283MY7p2V4SeE2wyWmG5mg5EwVvmdMVCQcoN"
"JxGoWaU9DCWh89LojfZ537wTfunKau47EL2dhHKon")
self.assertEqual(
m0_2147483647p_1.wallet_key(as_private=True),
"xprv9zFnWC6h2cLgpmSA46vutJzBcfJ8yaJGg8cX1e5StJh45BBciYTRXSd25UEPVuesF9"
"yog62tGAQtHjXajPPdbRCHuWS6T8XA2ECKADdw4Ef")
pub_m0_2147483647p_1 = m0_2147483647p.subkey(i=1, as_private=False)
self.assertEqual(pub_m0_2147483647p_1.wallet_key(), m0_2147483647p_1.wallet_key())
pub_m0_2147483647p_1 = pub_m0_2147483647p.subkey(i=1, as_private=False)
self.assertEqual(pub_m0_2147483647p_1.wallet_key(), m0_2147483647p_1.wallet_key())
m0_2147483647p_1_2147483646p = m0_2147483647p_1.subkey(i=2147483646, is_hardened=True)
self.assertEqual(
m0_2147483647p_1_2147483646p.wallet_key(),
"xpub6ERApfZwUNrhLCkDtcHTcxd75RbzS1ed54G1LkBUHQVHQKqhMkhgbmJbZRkrgZw4ko"
"xb5JaHWkY4ALHY2grBGRjaDMzQLcgJvLJuZZvRcEL")
self.assertEqual(
m0_2147483647p_1_2147483646p.wallet_key(as_private=True),
"xprvA1RpRA33e1JQ7ifknakTFpgNXPmW2YvmhqLQYMmrj4xJXXWYpDPS3xz7iAxn8L39nj"
"GVyuoseXzU6rcxFLJ8HFsTjSyQbLYnMpCqE2VbFWc")
pub_m0_2147483647p_1_2147483646p = m0_2147483647p_1.subkey(i=2147483646, as_private=False, is_hardened=True)
self.assertEqual(pub_m0_2147483647p_1_2147483646p.wallet_key(), m0_2147483647p_1_2147483646p.wallet_key())
m0_2147483647p_1_2147483646p_2 = m0_2147483647p_1_2147483646p.subkey(i=2)
self.assertEqual(m0_2147483647p_1_2147483646p_2.wif(), "L3WAYNAZPxx1fr7KCz7GN9nD5qMBnNiqEJNJMU1z9MMaannAt4aK")
self.assertEqual(
m0_2147483647p_1_2147483646p_2.wallet_key(),
"xpub6FnCn6nSzZAw5Tw7cgR9bi15UV96gLZhjDstkXXxvCLsUXBGXPdSnLFbdpq8p9HmGs"
"ApME5hQTZ3emM2rnY5agb9rXpVGyy3bdW6EEgAtqt")
self.assertEqual(
m0_2147483647p_1_2147483646p_2.wallet_key(as_private=True),
"xprvA2nrNbFZABcdryreWet9Ea4LvTJcGsqrMzxHx98MMrotbir7yrKCEXw7nadnHM8Dq3"
"8EGfSh6dqA9QWTyefMLEcBYJUuekgW4BYPJcr9E7j")
pub_m0_2147483647p_1_2147483646p_2 = m0_2147483647p_1_2147483646p.subkey(i=2, as_private=False)
self.assertEqual(pub_m0_2147483647p_1_2147483646p_2.wallet_key(), m0_2147483647p_1_2147483646p_2.wallet_key())
pub_m0_2147483647p_1_2147483646p_2 = pub_m0_2147483647p_1_2147483646p.subkey(i=2, as_private=False)
self.assertEqual(pub_m0_2147483647p_1_2147483646p_2.wallet_key(), m0_2147483647p_1_2147483646p_2.wallet_key())
self.assertEqual(master.subkey_for_path("0/2147483647p/1/2147483646p/2").wallet_key(),
m0_2147483647p_1_2147483646p_2.wallet_key())
self.assertEqual(master.subkey_for_path("0/2147483647p/1/2147483646p/2.pub").wallet_key(),
pub_m0_2147483647p_1_2147483646p_2.wallet_key())
def test_testnet(self):
# WARNING: these values have not been verified independently. TODO: do so
master = BIP32Node.from_master_secret(h2b("000102030405060708090a0b0c0d0e0f"), netcode='XTN')
self.assertEqual(
master.wallet_key(as_private=True),
"tprv8ZgxMBicQKsPeDgjzdC36fs6bMjGApWDNLR9erAXMs5skhMv36j9MV5ecvfavji5kh"
"qjWaWSFhN3YcCUUdiKH6isR4Pwy3U5y5egddBr16m")
self.assertEqual(master.bitcoin_address(), "mkHGce7dctSxHgaWSSbmmrRWsZfzz7MxMk")
self.assertEqual(master.wif(), "cVPXTF2TnozE1PenpP3x9huctiATZmp27T9Ue1d8nqLSExoPwfN5")
def test_streams(self):
m0 = BIP32Node.from_master_secret("foo bar baz".encode("utf8"))
pm0 = m0.public_copy()
self.assertEqual(m0.wallet_key(), pm0.wallet_key())
m1 = m0.subkey()
pm1 = pm0.subkey()
for i in range(4):
m = m1.subkey(i=i)
pm = pm1.subkey(i=i)
self.assertEqual(m.wallet_key(), pm.wallet_key())
self.assertEqual(m.bitcoin_address(), pm.bitcoin_address())
m2 = BIP32Node.from_wallet_key(m.wallet_key(as_private=True))
m3 = m2.public_copy()
self.assertEqual(m.wallet_key(as_private=True), m2.wallet_key(as_private=True))
self.assertEqual(m.wallet_key(), m3.wallet_key())
print(m.wallet_key(as_private=True))
for j in range(2):
k = m.subkey(i=j)
k2 = BIP32Node.from_wallet_key(k.wallet_key(as_private=True))
k3 = BIP32Node.from_wallet_key(k.wallet_key())
k4 = k.public_copy()
self.assertEqual(k.wallet_key(as_private=True), k2.wallet_key(as_private=True))
self.assertEqual(k.wallet_key(), k2.wallet_key())
self.assertEqual(k.wallet_key(), k3.wallet_key())
self.assertEqual(k.wallet_key(), k4.wallet_key())
print(" %s %s" % (k.bitcoin_address(), k.wif()))
def test_public_subkey(self):
my_prv = BIP32Node.from_master_secret(b"foo")
uag = my_prv.subkey(i=0, is_hardened=True, as_private=True)
self.assertEqual(None, uag.subkey(i=0, as_private=False).secret_exponent())
with self.assertRaises(ValueError) as cm:
my_prv.subkey(i=-1)
err = cm.exception
self.assertEqual(err.args, ("i can't be negative", ))
for p in ('-1', '0/-1', '0H/-1'):
with self.assertRaises(ValueError) as cm:
my_prv.subkey_for_path(p)
err = cm.exception
self.assertEqual(err.args, ("i can't be negative", ))
self.assertRaises(ValueError, list, my_prv.subkeys('-1'))
self.assertRaises(ValueError, list, my_prv.subkeys('-1-0'))
def test_repr(self):
from pycoin.key import Key
netcode = 'XTN'
key = Key(secret_exponent=273, netcode=netcode)
wallet = BIP32Node.from_master_secret(bytes(key.wif().encode('ascii')), netcode)
address = wallet.address()
pub_k = wallet.from_text(address)
self.assertEqual(repr(pub_k), '<myb5gZNXePNf2E2ksrjnHRFCwyuvt7oEay>')
wif = wallet.wif()
priv_k = wallet.from_text(wif)
self.assertEqual(repr(priv_k),
'private_for <03ad094b1dc9fdce5d3648ca359b4e210a89d049532fdd39d9ccdd8ca393ac82f4>')
def test_p2wpkh_in_p2sh(self):
from pycoin.key import Key
node = Key.from_text('ypub6XDth9u8DzXV1tcpDtoDKMf6kVMaVMn1juVWEesTshcX4zUVvfNgjPJLXrD9N7AdTLnbHFL64KmBn3SNaTe69iZYbYCqLCCNPZKbLz9niQ4')
self.assertEqual(node.subkey(0).subkey(0).address(),
'35ohQTdNykjkF1Mn9nAVEFjupyAtsPAK1W')
self.assertEqual(node.subkey(1).subkey(0).address(),
'3KaBTcviBLEJajTEMstsA2GWjYoPzPK7Y7')
def test_p2wpkh_native(self):
from pycoin.key import Key
node = Key.from_text('zpub6nsHdRuY92FsMKdbn9BfjBCG6X8pyhCibNP6uDvpnw2cyrVhecvHRMa3Ne8kdJZxjxgwnpbHLkcR4bfnhHy6auHPJyDTQ3kianeuVLdkCYQ')
self.assertEqual(node.subkey(0).subkey(0).address(),
'bc1q3g5tmkmlvxryhh843v4dz026avatc0zzr6h3af')
self.assertEqual(node.subkey(1).subkey(0).address(),
'bc1qdy94n2q5qcp0kg7v9yzwe6wvfkhnvyzje7nx2p')
def test_testnet(self):
from pycoin.key import Key
node = Key.from_text('tpubDEenJGgVMucDfF8qb3MdhJhiGVZ8P2DUzw8NBtNc4uWRPGqUNNmxqUR8M3c1KwN3yE3CFm8nMLQmZH47Q65RQmsSiLenXhxD42DSU1CWyiz')
self.assertEqual(node.subkey(1).subkey(2).address(),
'mvq2eqwKs9LQdFNH6BKEhw11udKjqDjo5J')
self.assertEqual(node.subkey(3).subkey(4).address(),
'mpAgWFoJM5HYCz4byuvaHBfP8taCFPVPR7')
def test_p2wpkh_in_p2sh_testnet(self):
from pycoin.key import Key
node = Key.from_text('upub5FS3DrUQtjhPtXF9ckWB6wZVpW42bVU696FJKGohDu4KGTEkkxsAQNdkqU7ihyMKPCtPauBQ8EGN4zJWasC3TrTTdFhR9XyATvjeM5AHgrR')
self.assertEqual(node.subkey(1).subkey(2).address(),
'2MuAxAoui1rKSr9Ugp2w5G5PbYjdFkuEo1x')
self.assertEqual(node.subkey(3).subkey(4).address(),
'2NE2981iCZBoYgBynpSDWuqF8RWMuBVxdoL')
def test_p2wpkh_native_testnet(self):
from pycoin.key import Key
node = Key.from_text('vpub5aHCMN33HjfXNzN9F3FkUVwr43DMj6FgjVbeAKgUWf9iS56RTGw5MaNdLi7jy49eZmJmP7eQYcA3m6CjWc3UduXfZr4ggipy6wfofB6xtaV')
self.assertEqual(node.subkey(1).subkey(2).address(),
'tb1qweczj5ypy6x92e98f5leq8x3frrqdhp6yearkh')
self.assertEqual(node.subkey(3).subkey(4).address(),
'tb1qpkwg0m8xtxg95gczglc4x04ccq52ns3ez74kq2')
if __name__ == '__main__':
unittest.main()
| mit | -2,865,108,965,114,670,600 | 52.579661 | 143 | 0.675819 | false |
sivertkh/gtrackcore | gtrackcore/test/track_operations/operations/FilterTest.py | 1 | 10998 | import unittest
import numpy as np
from gtrackcore.metadata import GenomeInfo
from gtrackcore.track.core.GenomeRegion import GenomeRegion
from gtrackcore.track_operations.operations.Filter import Filter
from gtrackcore.test.track_operations.TestUtils import \
createSimpleTestTrackContent
class FilterTest(unittest.TestCase):
def setUp(self):
self.chr1 = (GenomeRegion('hg19', 'chr1', 0,
GenomeInfo.GENOMES['hg19']['size']['chr1']))
self.chr1Small = (GenomeRegion('hg19', 'chr1', 0, 10))
self.chromosomes = (GenomeRegion('hg19', c, 0, l)
for c, l in
GenomeInfo.GENOMES['hg19']['size'].iteritems())
def _runTest(self, starts=None, ends=None, values=None, strands=None,
ids=None, edges=None, weights=None, expStarts=None,
expEnds=None, expValues=None, expStrands=None, expIds=None,
expEdges=None, expWeights=None, customChrLength=None,
removeStrands=False, removeValues=False, removeLinks=False,
removeWeights=False, removeExtras=False,
debug=False, expTrackFormatType=None):
track = createSimpleTestTrackContent(startList=starts, endList=ends,
valList=values,
strandList=strands,
idList=ids, edgeList=edges,
weightsList=weights,
customChrLength=customChrLength)
f = Filter(track, removeStrands=removeStrands,
removeValues=removeValues, removeLinks=removeLinks,
removeWeights=removeWeights, removeExtras=removeExtras,
debug=debug)
result = f.calculate()
self.assertTrue(result is not None)
resFound = False
for (k, v) in result.getTrackViews().iteritems():
if cmp(k, self.chr1) == 0 or cmp(k, self.chr1Small) == 0:
# All test tracks are in chr1
resFound = True
newStarts = v.startsAsNumpyArray()
newEnds = v.endsAsNumpyArray()
newValues = v.valsAsNumpyArray()
newStrands = v.strandsAsNumpyArray()
newIds = v.idsAsNumpyArray()
newEdges = v.edgesAsNumpyArray()
newWeights = v.weightsAsNumpyArray()
#newExtras = v.extrasAsNumpyArray()
if debug:
print("newStarts: {}".format(newStarts))
print("expStarts: {}".format(expStarts))
print("newEnds: {}".format(newEnds))
print("expEnds: {}".format(expEnds))
print("newStrands: {}".format(newStrands))
print("expStrands: {}".format(expStrands))
print("newIds: {}".format(newIds))
print("expIds: {}".format(expIds))
print("newEdges: {}".format(newEdges))
print("expEdges: {}".format(expEdges))
if expTrackFormatType is not None:
# Check that the track is of the expected type.
print(expTrackFormatType)
print(v.trackFormat.getFormatName())
self.assertTrue(v.trackFormat.getFormatName() ==
expTrackFormatType)
if expEnds is None:
# Assuming a point type track. Creating the expected ends.
expEnds = np.array(expStarts) + 1
if expStarts is not None:
self.assertTrue(newStarts is not None)
self.assertTrue(np.array_equal(newStarts, expStarts))
else:
self.assertTrue(newStarts is None)
if expEnds is not None:
self.assertTrue(newEnds is not None)
self.assertTrue(np.array_equal(newEnds, expEnds))
else:
self.assertTrue(newEnds is None)
if expValues is not None:
self.assertTrue(newValues is not None)
self.assertTrue(np.array_equal(newValues, expValues))
else:
self.assertTrue(newValues is None)
if expStrands is not None:
self.assertTrue(newStrands is not None)
self.assertTrue(np.array_equal(newStrands, expStrands))
else:
self.assertTrue(newStrands is None)
if expIds is not None:
self.assertTrue(newIds is not None)
self.assertTrue(np.array_equal(newIds, expIds))
else:
self.assertTrue(newIds is None)
if expEdges is not None:
self.assertTrue(newEdges is not None)
self.assertTrue(np.array_equal(newEdges, expEdges))
else:
self.assertTrue(newEdges is None)
if expWeights is not None:
self.assertTrue(newWeights is not None)
self.assertTrue(np.array_equal(newWeights, expWeights))
else:
self.assertTrue(newWeights is None)
#if expExtras is not None:
# self.assertTrue(newExtras is not None)
# self.assertTrue(np.array_equal(newExtras, expExtras))
#else:
# self.assertTrue(newExtras is None)
else:
# Tests if all tracks no in chr1 have a size of 0.
self.assertEqual(v.size, 0)
self.assertTrue(resFound)
def testValuedPointsToPoints(self):
"""
Test removing values from valued points
:return:
"""
self._runTest(starts=[1,2], values=[3,4], removeValues=True,
expStarts=[1,2], expTrackFormatType="Points")
def testLinkedPointsToPoints(self):
"""
Removing all links
:return:
"""
self._runTest(starts=[1,2], ids=['1','2'], edges=['2','1'],
removeLinks=True, expStarts=[1,2],
expTrackFormatType="Points")
def testLinkedValuedPointsToValuedPoints(self):
"""
Removing links keeping values
:return:
"""
self._runTest(starts=[1,2], ids=['1','2'], edges=['2','1'],
values=[3,4], removeLinks=True,
expStarts=[1,2], expValues=[3,4],
expTrackFormatType="Valued points")
def testLinkedValuedPointsToPoints(self):
"""
Removing both links and values
:return:
"""
self._runTest(starts=[1,2], ids=['1','2'], edges=['2','1'],
values=[3,4], removeLinks=True, removeValues=True,
expStarts=[1,2], expTrackFormatType="Points")
def testRemovingWeightsFromLinkedPoints(self):
"""
Removing both links and values
:return:
"""
self._runTest(starts=[1,2], ids=['1','2'], edges=['2','1'],
weights=[[4],[5]], removeWeights=True, expStarts=[1,2],
expIds=['1','2'], expEdges=['2','1'],
expTrackFormatType="Linked points")
def testRemovingStrandsFromPoints(self):
"""
Remove strands
:return:
"""
self._runTest(starts=[1,2], strands=['+','.'], removeStrands=True,
expStarts=[1,2], expTrackFormatType="Points")
def testRemoveValuesMissing(self):
"""
Try to remove values from a track without values. Expect to get the
same track in return
:return:
"""
self._runTest(starts=[1,2], removeValues=True,
expStarts=[1,2], expTrackFormatType="Points")
def testRemoveStrandsMissing(self):
"""
Try to remove strands from a track without strands. Expect to get the
same track in return
:return:
"""
self._runTest(starts=[1,2], removeValues=True,
expStarts=[1,2], expTrackFormatType="Points")
def testRemoveLinksMissing(self):
"""
Try to remove links from a track without links. Expect to get the
same track in return
:return:
"""
self._runTest(starts=[1,2], removeLinks=True,
expStarts=[1,2], expTrackFormatType="Points")
def testRemoveWeightsMissing(self):
"""
Try to remove weights from a track without weights. Expect to get the
same track in return
:return:
"""
self._runTest(starts=[1,2], removeWeights=True,
expStarts=[1,2], expTrackFormatType="Points")
# **** Segments ****
def testValuedSegmentsToSegments(self):
"""
Test removing values from valued segments
:return:
"""
self._runTest(starts=[1,10], ends=[5,15], values=[3,4],
removeValues=True, expStarts=[1,10], expEnds=[5,15],
expTrackFormatType="Segments")
def testLinkedSegmentsToSegments(self):
"""
Removing links.
:return:
"""
self._runTest(starts=[1,10], ends=[5,15], ids=['1','2'],
edges=['2','1'], removeLinks=True, expStarts=[1,10],
expEnds=[5,15], expTrackFormatType="Segments")
def testLinkedValuedSegmentsToValuedSegments(self):
"""
Removing links keeping values
:return:
"""
self._runTest(starts=[1,10], ends=[5,15], ids=['1','2'],
edges=['2','1'], values=[3,4], removeLinks=True,
expStarts=[1,10], expEnds=[5,15], expValues=[3,4],
expTrackFormatType="Valued segments")
def testLinkedValuedSegmentsToLinkedSegments(self):
"""
Removing both links keeping values
:return:
"""
self._runTest(starts=[1,10], ends=[5,15], ids=['1','2'],
edges=['2','1'], values=[3,4], removeValues=True,
expStarts=[1,10], expEnds=[5,15], expIds=['1','2'],
expEdges=['2','1'],
expTrackFormatType="Linked segments")
def testLinkedValuedSegmentToSegments(self):
"""
Removing both links and values
:return:
"""
self._runTest(starts=[1,10], ends=[5,15], ids=['1','2'],
edges=['2','1'], values=[3,4], removeLinks=True,
removeValues=True, expStarts=[1,10], expEnds=[5,15],
expTrackFormatType="Segments")
if __name__ == "__main__":
unittest.main()
| gpl-3.0 | -4,644,463,832,202,392,000 | 38.419355 | 78 | 0.521549 | false |
openmotics/gateway | src/gateway/group_action_controller.py | 1 | 2954 | # Copyright (C) 2020 OpenMotics BV
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
GroupAction BLL
"""
from __future__ import absolute_import
import logging
from ioc import Injectable, Inject, INJECTED, Singleton
from gateway.base_controller import BaseController, SyncStructure
from gateway.dto import GroupActionDTO
from gateway.models import GroupAction
if False: # MYPY
from typing import List, Tuple
logger = logging.getLogger(__name__)
@Injectable.named('group_action_controller')
@Singleton
class GroupActionController(BaseController):
SYNC_STRUCTURES = [SyncStructure(GroupAction, 'group_action')]
@Inject
def __init__(self, master_controller=INJECTED):
super(GroupActionController, self).__init__(master_controller)
def do_basic_action(self, action_type, action_number): # type: (int, int) -> None
self._master_controller.do_basic_action(action_type, action_number)
def do_group_action(self, group_action_id): # type: (int) -> None
self._master_controller.do_group_action(group_action_id)
def load_group_action(self, group_action_id): # type: (int) -> GroupActionDTO
group_action = GroupAction.get(number=group_action_id) # type: GroupAction # TODO: Use exists
group_action_dto = self._master_controller.load_group_action(group_action_id=group_action.number)
return group_action_dto
def load_group_actions(self): # type: () -> List[GroupActionDTO]
group_action_dtos = []
for group_action in list(GroupAction.select()): # TODO: Only fetch the numbers
group_action_dto = self._master_controller.load_group_action(group_action_id=group_action.number)
group_action_dtos.append(group_action_dto)
return group_action_dtos
def save_group_actions(self, group_actions): # type: (List[GroupActionDTO]) -> None
group_actions_to_save = []
for group_action_dto in group_actions:
group_action = GroupAction.get_or_none(number=group_action_dto.id) # type: GroupAction
if group_action is None:
logger.info('Ignored saving non-existing GroupAction {0}'.format(group_action_dto.id))
continue
group_actions_to_save.append(group_action_dto)
self._master_controller.save_group_actions(group_actions_to_save)
| agpl-3.0 | 3,239,037,115,919,670,000 | 43.089552 | 109 | 0.708869 | false |
SymbiFlow/edalize | tests/test_vunit/run.py | 1 | 1483 | # Auto generated by Edalize
def load_module_from_file(name, python_file):
import importlib.util
spec = importlib.util.spec_from_file_location(name, python_file)
module = importlib.util.module_from_spec(spec)
spec.loader.exec_module(module)
return module
def load_runner_hooks(python_file = r''):
if len(python_file) > 0:
return load_module_from_file('vunit_runner_hooks', python_file)
else:
return __import__('edalize.vunit_hooks', fromlist=['vunit_hooks'])
runner = load_runner_hooks().VUnitRunner()
# Override this hook to allow custom creation configuration of the VUnit instance:
vu = runner.create()
lib = vu.add_library("vunit_test_runner_lib")
lib.add_source_files("sv_file.sv")
lib.add_source_files("vlog_file.v")
lib.add_source_files("vlog05_file.v")
lib.add_source_files("vhdl_file.vhd")
lib.add_source_files("vhdl2008_file", vhdl_standard="2008")
lib.add_source_files("another_sv_file.sv")
# Override this hook to customize the library, e.g. compile-flags etc.
# This allows full access to vunit.ui.Library interface:
runner.handle_library("vunit_test_runner_lib", lib)
lib = vu.add_library("libx")
lib.add_source_files("vhdl_lfile")
# Override this hook to customize the library, e.g. compile-flags etc.
# This allows full access to vunit.ui.Library interface:
runner.handle_library("libx", lib)
# override this hook to perform final customization and parametrization of VUnit, custom invokation, etc.
runner.main(vu)
| bsd-2-clause | -3,198,943,249,158,944,300 | 35.170732 | 105 | 0.732974 | false |
endere/Data-structures-2nd-half | src/hash.py | 1 | 2101 | """Hash tables."""
import bst
class HashTable(object):
"""Class object for our Hash Table."""
def __init__(self, bucket_number, function='fnv'):
"""Init for our hash.
Accepts a string to determine which hash the table uses.
"""
self.bucket_list = []
self.function = function
self.bucket_number = bucket_number
for i in range(bucket_number):
self.bucket_list.append(bst.BinarySearchTree())
def _hash(self, key):
"""Use fnv hash if function is fnv, or uses additive hash if function is add."""
if self.function == 'fnv':
h = 2166136261
for i in range(len(key)):
h = (h * 16777619) ^ ord(key[i])
return h
elif self.function == 'add':
h = 0
for i in range(len(key)):
h += ord(key[i])
return h
def set(self, key, value):
"""Place an item in the hash table."""
number = self._hash(key)
stored_key = number if self.function == 'fnv' else key
if self.get(key) is None:
self.bucket_list[number % self.bucket_number].insert(stored_key, value)
def get(self, key):
"""Use a key to retrieve a stored value from the table."""
if type(key) != str:
raise TypeError("This is not the string you're looking for!")
number = self._hash(key)
stored_key = number if self.function == 'fnv' else key
try:
return self.bucket_list[number % self.bucket_number].search(stored_key).stored_value
except AttributeError:
return None
if __name__ == '__main__':
test_table = HashTable(1021)
with open('/usr/share/dict/words') as dictionary:
data = dictionary.read()
data = data.split('\n')
for i in range(len(data)):
print(len(data) - i)
test_table.set(data[i], data[i])
# print(type(test_table.dict_bst))
for i in test_table.bucket_list:
print("key: {} , len: {}".format(test_table.bucket_list.index(i), i.size()))
| mit | 8,588,302,588,909,000,000 | 34.016667 | 96 | 0.557354 | false |
chiemseesurfer/thumbor-mogilefs-loader | loaders/mogilefs_loader_http_fallback.py | 1 | 1218 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2017 Max Oberberger ([email protected])
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
from . import http_loader
from . import mogilefs_loader
from tornado.concurrent import return_future
@return_future
def load(context, path, callback):
def callback_wrapper(result):
if result.successful:
callback(result)
else:
# If mogilefs_loader failed try http_loader
http_loader.load(context, path, callback)
# First attempt to load with mogilefs_loader
mogilefs_loader.load(context, path, callback_wrapper)
| gpl-3.0 | -3,743,907,813,002,403,000 | 35.909091 | 71 | 0.724959 | false |
Banbury/cartwheel-3d | Python/Data/Characters/Bip/Controllers/Jumping.py | 1 | 21457 | from App.Proxys import *
data = SimBiController(
name = 'Jumping',
controlParamsList = [
ControlParams( joint = 'root', kp = 3000.0, kd = 300.0, tauMax = 10000.0, scale = ( 1.0, 0.2, 1.0 ) ),
ControlParams( joint = 'pelvis_torso', kp = 1000.0, kd = 100.0, tauMax = 10000.0, scale = ( 1.0, 0.2, 1.0 ) ),
ControlParams( joint = 'torso_head', kp = 200.0, kd = 20.0, tauMax = 10000.0, scale = ( 1.0, 0.2, 1.0 ) ),
ControlParams( joint = 'lShoulder', kp = 100.0, kd = 10.0, tauMax = 10000.0, scale = ( 0.5, 1.0, 1.0 ) ),
ControlParams( joint = 'rShoulder', kp = 100.0, kd = 10.0, tauMax = 10000.0, scale = ( 0.3, 1.0, 1.0 ) ),
ControlParams( joint = 'lElbow', kp = 5.0, kd = 1.0, tauMax = 10000.0, scale = ( 0.2, 1.0, 1.0 ) ),
ControlParams( joint = 'rElbow', kp = 5.0, kd = 1.0, tauMax = 10000.0, scale = ( 0.2, 1.0, 1.0 ) ),
ControlParams( joint = 'lHip', kp = 300.0, kd = 30.0, tauMax = 10000.0, scale = ( 1.0, 0.66, 1.0 ) ),
ControlParams( joint = 'rHip', kp = 300.0, kd = 30.0, tauMax = 10000.0, scale = ( 1.0, 0.66, 1.0 ) ),
ControlParams( joint = 'lKnee', kp = 300.0, kd = 30.0, tauMax = 10000.0, scale = ( 1.0, 0.2, 1.0 ) ),
ControlParams( joint = 'rKnee', kp = 300.0, kd = 30.0, tauMax = 10000.0, scale = ( 1.0, 0.2, 1.0 ) ),
ControlParams( joint = 'lAnkle', kp = 100.0, kd = 10.0, tauMax = 10000.0, scale = ( 1.0, 0.2, 1.0 ) ),
ControlParams( joint = 'rAnkle', kp = 100.0, kd = 10.0, tauMax = 10000.0, scale = ( 1.0, 0.2, 1.0 ) ),
ControlParams( joint = 'lToeJoint', kp = 50.0, kd = 5.0, tauMax = 10000.0, scale = ( 1.0, 1.0, 1.0 ) ),
ControlParams( joint = 'rToeJoint', kp = 50.0, kd = 5.0, tauMax = 10000.0, scale = ( 1.0, 1.0, 1.0 ) ) ],
states = [
SimBiConState(
name = 'State0',
nextStateIndex = 1,
transitionOn = 'TIME_UP',
duration = 2.0,
externalForces = [
ExternalForce(
body = 'pelvis',
forceX = [ ( 0.0, 0.0 ) ],
forceY = [
( 0.220257139643, -0.297893823029 ),
( 0.269866998824, 4009.37843155 ),
( 0.323865606442, -0.297893823029 ) ],
forceZ = [
( 0.22457639404, 0.230547728033 ),
( 0.312787202786, 1969.12899193 ),
( 0.340594719742, 0.281229318864 ) ],
torqueX = [
( 0.336683417085, 0.026145829397 ),
( 0.388500473607, -54.4526060901 ),
( 0.448698407471, -3.77495360492 ),
( 0.677040782713, -1.51348004883 ),
( 0.878199541855, -0.759655530132 ),
( 1.03586451524, -30.912636278 ) ],
torqueY = [
( 0.361809045226, -0.0251256281407 ),
( 0.429548422996, -8.20440959163 ),
( 0.474930850007, -0.0223053848099 ) ],
torqueZ = [
( 0.336683417085, 0.0753768844221 ),
( 0.416682339801, -13.6502244593 ),
( 0.462311557789, 0.0251256281407 ) ] ) ],
trajectories = [
Trajectory(
joint = 'root',
strength = [ ],
components = [
TrajectoryComponent( rotationAxis = ( 1.0, 0.0, 0.0 ), baseTrajectory = [ ( 0.488294, 0.113631 ) ] ),
TrajectoryComponent(
rotationAxis = ( 0.0, 0.0, 1.0 ),
reverseOnStance = 'RIGHT',
baseTrajectory = [ ( 0.0, 0.0 ), ( 0.25, 0.0 ), ( 0.5, 0.0 ), ( 0.75, 0.0 ), ( 1.0, 0.0 ) ] ) ] ),
Trajectory(
joint = 'SWING_Hip',
strength = [ ],
components = [
TrajectoryComponent(
rotationAxis = ( 1.0, 0.0, 0.0 ),
baseTrajectory = [
( 0.197367186158, -0.311997836151 ),
( 0.29648241206, -2.0351758794 ),
( 0.557788944724, -1.03015075377 ),
( 0.663316582915, -0.929648241206 ),
( 1.06507609055, 0.082112161635 ) ] ),
TrajectoryComponent(
rotationAxis = ( 0.0, 0.0, 1.0 ),
reverseOnStance = 'LEFT',
baseTrajectory = [ ( 0.0, -0.06 ), ( 0.5, -0.06 ), ( 1.0, -0.06 ) ] ) ] ),
Trajectory(
joint = 'STANCE_Hip',
strength = [ ],
components = [
TrajectoryComponent(
rotationAxis = ( 1.0, 0.0, 0.0 ),
baseTrajectory = [
( 0.195969899497, -1.08040201005 ),
( 0.195979899497, 0.0251256281407 ),
( 0.43216080402, 0.0753768844221 ),
( 0.984924623116, -1.08040201005 ) ] ),
TrajectoryComponent( rotationAxis = ( 0.0, 0.0, 1.0 ), reverseOnStance = 'LEFT', baseTrajectory = [ ( 0.0, 0.0 ) ] ) ] ),
Trajectory(
joint = 'SWING_Knee',
strength = [ ],
components = [
TrajectoryComponent(
rotationAxis = ( 1.0, 0.0, 0.0 ),
baseTrajectory = [ ( 0.336683417085, 1.4824120603 ), ( 0.552763819095, 0.175879396985 ) ] ) ] ),
Trajectory(
joint = 'STANCE_Knee',
strength = [ ],
components = [
TrajectoryComponent(
rotationAxis = ( 1.0, 0.0, 0.0 ),
baseTrajectory = [
( 0.16105827842, 0.873209081588 ),
( 0.224457449363, 0.0138604616125 ),
( 0.43216080402, 0.0753768844221 ),
( 0.723618090452, 1.93467336683 ),
( 0.989949748744, 0.879396984925 ) ] ) ] ),
Trajectory(
joint = 'SWING_Ankle',
strength = [ ],
components = [
TrajectoryComponent(
rotationAxis = ( 1.0, 0.0, 0.0 ),
baseTrajectory = [
( 0.020067, 0.809573 ),
( 0.197324, -0.510418 ),
( 0.27135678392, -0.326633165829 ),
( 0.658291457286, 0.376884422111 ),
( 0.994974874372, 0.527638190955 ) ] ) ] ),
Trajectory(
joint = 'STANCE_Ankle',
strength = [ ],
components = [
TrajectoryComponent(
rotationAxis = ( 1.0, 0.0, 0.0 ),
baseTrajectory = [ ( 0.211055276382, -0.326633165829 ), ( 0.261306532663, 0.577889447236 ) ] ) ] ),
Trajectory(
joint = 'STANCE_Shoulder',
strength = [ ( 0.0, 1.0 ) ],
components = [
TrajectoryComponent(
rotationAxis = ( 0.0, 0.0, 1.0 ),
reverseOnStance = 'LEFT',
baseTrajectory = [ ( 0.327759, 1.733668 ) ] ),
TrajectoryComponent(
rotationAxis = ( 0.0, 1.0, 0.0 ),
reverseOnStance = 'RIGHT',
baseTrajectory = [ ( 0.0301, -0.005792 ), ( 0.41806, -0.277104 ), ( 0.973244, -0.017375 ) ] ),
TrajectoryComponent(
rotationAxis = ( 1.0, 0.0, 0.0 ),
baseTrajectory = [ ( 0.006689, -0.025126 ), ( 0.505017, -0.138526 ), ( 0.996656, -0.025126 ) ] ) ] ),
Trajectory(
joint = 'SWING_Shoulder',
strength = [ ( 0.0, 1.0 ) ],
components = [
TrajectoryComponent(
rotationAxis = ( 0.0, 0.0, 1.0 ),
reverseOnStance = 'RIGHT',
baseTrajectory = [ ( 0.110368, 1.884422 ) ] ),
TrajectoryComponent(
rotationAxis = ( 0.0, 1.0, 0.0 ),
reverseOnStance = 'RIGHT',
baseTrajectory = [ ( 0.023411, 0.003989 ), ( 0.471572, 0.243329 ) ] ),
TrajectoryComponent(
rotationAxis = ( 1.0, 0.0, 0.0 ),
baseTrajectory = [ ( 0.013378, 0.005066 ), ( 0.448161, 0.321392 ), ( 0.993311, 0.025126 ) ] ) ] ),
Trajectory(
joint = 'STANCE_Elbow',
strength = [ ( 0.307692, 2.135678 ) ],
components = [
TrajectoryComponent(
rotationAxis = ( 0.0, 1.0, 0.0 ),
reverseOnStance = 'LEFT',
baseTrajectory = [ ( 0.307692, 2.573897 ) ] ) ] ),
Trajectory(
joint = 'SWING_Elbow',
strength = [ ( 0.364548, 2.98995 ) ],
components = [
TrajectoryComponent(
rotationAxis = ( 0.0, 1.0, 0.0 ),
reverseOnStance = 'LEFT',
baseTrajectory = [ ( 0.013378, -2.665055 ) ] ) ] ),
Trajectory(
joint = 'pelvis_torso',
strength = [ ],
components = [
TrajectoryComponent(
rotationAxis = ( 0.0, 1.0, 0.0 ),
reverseOnStance = 'RIGHT',
baseTrajectory = [ ( 0.010033, 0.076817 ), ( 0.150502, -0.29923 ), ( 0.993311, -0.248525 ) ] ),
TrajectoryComponent( rotationAxis = ( 1.0, 0.0, 0.0 ), baseTrajectory = [ ( 0.598662, 0.138959 ) ] ),
TrajectoryComponent(
rotationAxis = ( 0.0, 0.0, 1.0 ),
reverseOnStance = 'RIGHT',
baseTrajectory = [ ( 0.685619, 0.026046 ) ] ) ] ),
Trajectory(
joint = 'torso_head',
strength = [ ],
components = [
TrajectoryComponent(
rotationAxis = ( 0.0, 1.0, 0.0 ),
reverseOnStance = 'LEFT',
baseTrajectory = [ ( 0.010033, 0.076817 ), ( 0.150502, -0.29923 ), ( 0.993311, -0.248525 ) ] ),
TrajectoryComponent( rotationAxis = ( 1.0, 0.0, 0.0 ), baseTrajectory = [ ] ) ] ),
Trajectory(
joint = 'STANCE_ToeJoint',
strength = [ ],
components = [
TrajectoryComponent(
rotationAxis = ( 1.0, 0.0, 0.0 ),
baseTrajectory = [ ( 0.692308, 0.025126 ), ( 0.856187, -1.532663 ) ] ) ] ),
Trajectory(
joint = 'SWING_ToeJoint',
strength = [ ],
components = [
TrajectoryComponent(
rotationAxis = ( 1.0, 0.0, 0.0 ),
baseTrajectory = [ ( 0.38796, 0.014868 ), ( 0.826087, -0.431185 ) ] ) ] )
]
),
SimBiConState(
name = 'State1',
nextStateIndex = 1,
transitionOn = 'TIME_UP',
duration = 0.4,
externalForces = [
ExternalForce(
body = 'pelvis',
forceX = [ ( 0.0, 0.0 ) ],
forceY = [ ( 0.0, 0.0 ) ],
forceZ = [ ( 0.0, 0.0 ) ],
torqueX = [ ],
torqueY = [ ],
torqueZ = [ ] ) ],
trajectories = [
Trajectory(
joint = 'root',
strength = [ ],
components = [
TrajectoryComponent( rotationAxis = ( 1.0, 0.0, 0.0 ), baseTrajectory = [ ( 0.488294, 0.113631 ) ] ),
TrajectoryComponent(
rotationAxis = ( 0.0, 0.0, 1.0 ),
reverseOnStance = 'RIGHT',
baseTrajectory = [ ( 0.0, 0.0 ), ( 0.25, 0.0 ), ( 0.5, 0.0 ), ( 0.75, 0.0 ), ( 1.0, 0.0 ) ] ) ] ),
Trajectory(
joint = 'SWING_Hip',
strength = [ ],
components = [
TrajectoryComponent(
rotationAxis = ( 1.0, 0.0, 0.0 ),
feedback = LinearBalanceFeedback( axis = ( 0.0, 0.0, 1.0 ), cd = -0.55, cv = -0.3 ),
baseTrajectory = [ ( 0.541806, -0.438308 ), ( 0.692308, -0.362199 ), ( 0.859532, -0.160317 ), ( 0.996656, 0.200194 ) ] ),
TrajectoryComponent(
rotationAxis = ( 0.0, 0.0, 1.0 ),
reverseOnStance = 'LEFT',
feedback = LinearBalanceFeedback( axis = ( 1.0, 0.0, 0.0 ), cd = 0.55, cv = 0.3 ),
baseTrajectory = [ ( 0.0, -0.06 ), ( 0.5, -0.06 ), ( 1.0, -0.06 ) ] ) ] ),
Trajectory(
joint = 'SWING_Knee',
strength = [ ],
components = [
TrajectoryComponent(
rotationAxis = ( 1.0, 0.0, 0.0 ),
baseTrajectory = [ ( 0.528428, 1.658482 ), ( 0.80602, 1.006429 ), ( 0.983278, 0.354748 ) ] ) ] ),
Trajectory(
joint = 'STANCE_Knee',
strength = [ ],
components = [
TrajectoryComponent(
rotationAxis = ( 1.0, 0.0, 0.0 ),
baseTrajectory = [
( 0.147157, 0.130628 ),
( 0.394649, 0.318731 ),
( 0.61204, 0.29114 ),
( 0.832776, 0.236208 ),
( 0.989967, 0.576787 ) ] ) ] ),
Trajectory(
joint = 'SWING_Ankle',
strength = [ ],
components = [
TrajectoryComponent(
rotationAxis = ( 1.0, 0.0, 0.0 ),
baseTrajectory = [
( 0.020067, 0.809573 ),
( 0.197324, -0.510418 ),
( 0.488294, -0.518456 ),
( 0.75, -0.5 ),
( 0.751, -0.15 ),
( 1.0, -0.15 ) ] ) ] ),
Trajectory(
joint = 'STANCE_Ankle',
strength = [ ],
components = [
TrajectoryComponent(
rotationAxis = ( 1.0, 0.0, 0.0 ),
baseTrajectory = [ ( 0.354515, -0.354772 ), ( 0.625418, 0.764028 ), ( 0.749164, 1.163781 ) ] ) ] ),
Trajectory(
joint = 'STANCE_Shoulder',
strength = [ ( 0.0, 1.0 ) ],
components = [
TrajectoryComponent(
rotationAxis = ( 0.0, 0.0, 1.0 ),
reverseOnStance = 'LEFT',
baseTrajectory = [ ( 0.327759, 1.733668 ) ] ),
TrajectoryComponent(
rotationAxis = ( 0.0, 1.0, 0.0 ),
reverseOnStance = 'RIGHT',
baseTrajectory = [ ( 0.0301, -0.005792 ), ( 0.41806, -0.277104 ), ( 0.973244, -0.017375 ) ] ),
TrajectoryComponent(
rotationAxis = ( 1.0, 0.0, 0.0 ),
baseTrajectory = [ ( 0.006689, -0.025126 ), ( 0.505017, -0.138526 ), ( 0.996656, -0.025126 ) ] ) ] ),
Trajectory(
joint = 'SWING_Shoulder',
strength = [ ( 0.0, 1.0 ) ],
components = [
TrajectoryComponent(
rotationAxis = ( 0.0, 0.0, 1.0 ),
reverseOnStance = 'RIGHT',
baseTrajectory = [ ( 0.110368, 1.884422 ) ] ),
TrajectoryComponent(
rotationAxis = ( 0.0, 1.0, 0.0 ),
reverseOnStance = 'RIGHT',
baseTrajectory = [ ( 0.023411, 0.003989 ), ( 0.471572, 0.243329 ) ] ),
TrajectoryComponent(
rotationAxis = ( 1.0, 0.0, 0.0 ),
baseTrajectory = [ ( 0.013378, 0.005066 ), ( 0.448161, 0.321392 ), ( 0.993311, 0.025126 ) ] ) ] ),
Trajectory(
joint = 'STANCE_Elbow',
strength = [ ( 0.307692, 2.135678 ) ],
components = [
TrajectoryComponent(
rotationAxis = ( 0.0, 1.0, 0.0 ),
reverseOnStance = 'LEFT',
baseTrajectory = [ ( 0.307692, 2.573897 ) ] ) ] ),
Trajectory(
joint = 'SWING_Elbow',
strength = [ ( 0.364548, 2.98995 ) ],
components = [
TrajectoryComponent(
rotationAxis = ( 0.0, 1.0, 0.0 ),
reverseOnStance = 'LEFT',
baseTrajectory = [ ( 0.013378, -2.665055 ) ] ) ] ),
Trajectory(
joint = 'pelvis_torso',
strength = [ ],
components = [
TrajectoryComponent(
rotationAxis = ( 0.0, 1.0, 0.0 ),
reverseOnStance = 'RIGHT',
baseTrajectory = [ ( 0.010033, 0.076817 ), ( 0.150502, -0.29923 ), ( 0.993311, -0.248525 ) ] ),
TrajectoryComponent( rotationAxis = ( 1.0, 0.0, 0.0 ), baseTrajectory = [ ( 0.598662, 0.138959 ) ] ),
TrajectoryComponent(
rotationAxis = ( 0.0, 0.0, 1.0 ),
reverseOnStance = 'RIGHT',
baseTrajectory = [ ( 0.685619, 0.026046 ) ] ) ] ),
Trajectory(
joint = 'torso_head',
strength = [ ],
components = [
TrajectoryComponent(
rotationAxis = ( 0.0, 1.0, 0.0 ),
reverseOnStance = 'LEFT',
baseTrajectory = [ ( 0.010033, 0.076817 ), ( 0.150502, -0.29923 ), ( 0.993311, -0.248525 ) ] ),
TrajectoryComponent( rotationAxis = ( 1.0, 0.0, 0.0 ), baseTrajectory = [ ] ) ] ),
Trajectory(
joint = 'STANCE_ToeJoint',
strength = [ ],
components = [
TrajectoryComponent(
rotationAxis = ( 1.0, 0.0, 0.0 ),
baseTrajectory = [ ( 0.692308, 0.025126 ), ( 0.856187, -1.532663 ) ] ) ] ),
Trajectory(
joint = 'SWING_ToeJoint',
strength = [ ],
components = [
TrajectoryComponent(
rotationAxis = ( 1.0, 0.0, 0.0 ),
baseTrajectory = [ ( 0.38796, 0.014868 ), ( 0.826087, -0.431185 ) ] ) ] )
]
)
]
) | apache-2.0 | 2,999,820,496,715,441,700 | 48.971496 | 149 | 0.360628 | false |
juancarlospaco/microraptor | setup.py | 1 | 4696 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
#
# To generate DEB package from Python Package:
# sudo pip3 install stdeb
# python3 setup.py --verbose --command-packages=stdeb.command bdist_deb
#
#
# To generate RPM package from Python Package:
# sudo apt-get install rpm
# python3 setup.py bdist_rpm --verbose --fix-python --binary-only
#
#
# To generate EXE MS Windows from Python Package (from MS Windows only):
# python3 setup.py bdist_wininst --verbose
#
#
# To generate PKGBUILD ArchLinux from Python Package (from PyPI only):
# sudo pip3 install git+https://github.com/bluepeppers/pip2arch.git
# pip2arch.py PackageNameHere
#
#
# To Upload to PyPI by executing:
# sudo pip install --upgrade pip setuptools wheel virtualenv
# python3 setup.py bdist_egg bdist_wheel --universal sdist --formats=zip upload --sign
"""Setup.py for Python, as Generic as possible."""
import os
import re
import sys
from setuptools import setup, Command
from zipapp import create_archive
##############################################################################
# EDIT HERE
MODULE_PATH = os.path.join(os.path.dirname(__file__), "microraptor.py")
DESCRIPTION = """Microraptor builds cool presentations using Angler, Impress
and Markdown. Presentations using a simple MarkDown file.
Convert a GitHub README.md to Presentations with one command."""
##############################################################################
# Dont touch below
try:
with open(str(MODULE_PATH), "r", encoding="utf-8-sig") as source_code_file:
SOURCE = source_code_file.read()
except:
with open(str(MODULE_PATH), "r") as source_code_file:
SOURCE = source_code_file.read()
def find_this(search, source=SOURCE):
"""Take a string and a filename path string and return the found value."""
print("Searching for {what}.".format(what=search))
if not search or not source:
print("Not found on source: {what}.".format(what=search))
return ""
return str(re.compile(r".*__{what}__ = '(.*?)'".format(
what=search), re.S).match(source).group(1)).strip().replace("'", "")
class ZipApp(Command):
description, user_options = "Creates a zipapp.", []
def initialize_options(self): pass # Dont needed, but required.
def finalize_options(self): pass # Dont needed, but required.
def run(self):
return create_archive("microraptor.py", "microraptor.pyz",
"/usr/bin/env python3")
print("Starting build of setuptools.setup().")
##############################################################################
# EDIT HERE
setup(
name="microraptor",
version=find_this("version"),
description="Presentation builder using Markdown and ImpressJS.",
long_description=DESCRIPTION,
url=find_this("url"),
license=find_this("license"),
author=find_this("author"),
author_email=find_this("email"),
maintainer=find_this("author"),
maintainer_email=find_this("email"),
include_package_data=True,
zip_safe=True,
install_requires=['anglerfish', 'mistune', 'pygments'],
setup_requires=['anglerfish', 'mistune', 'pygments'],
tests_require=['anglerfish', 'mistune', 'pygments'],
requires=['anglerfish', 'mistune', 'pygments'],
scripts=["microraptor.py"],
cmdclass={"zipapp": ZipApp},
keywords=['ImpressJS', 'presentation', 'HTML5', 'Markdown', 'impress',
'CSS', 'HTML', 'Web', 'GFM', 'KISS', 'Builder', 'HTML'],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'Intended Audience :: Other Audience',
'Natural Language :: English',
'License :: OSI Approved :: GNU General Public License (GPL)',
'License :: OSI Approved :: GNU Library or Lesser General Public License (LGPL)',
'License :: OSI Approved :: GNU Lesser General Public License v3 or later (LGPLv3+)',
'License :: OSI Approved :: GNU General Public License v3 or later (GPLv3+)',
'Operating System :: OS Independent',
'Operating System :: POSIX :: Linux',
'Operating System :: Microsoft :: Windows',
'Operating System :: MacOS :: MacOS X',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
'Topic :: Software Development',
],
)
print("Finished build of setuptools.setup().")
| gpl-3.0 | -6,046,680,466,054,644,000 | 28.721519 | 93 | 0.622445 | false |
COCS4950G7/COSC4950 | Source/GUI/frameGUI_V1.4.py | 1 | 4896 | __author__ = 'Jon'
import Tkconstants
import tkFileDialog
from Tkinter import *
import Tkinter as Tk
########################################################################
## adding a hashinng button, used to test basic functionality of enter text.
## Test commit for pycharm rebuild
class MyApp(object):
""""""
#----------------------------------------------------------------------
def __init__(self, parent):
"""Constructor"""
self.root = parent
self.root.title("Main frame")
self.frame = Tk.Frame(parent)
self.frame.pack()
title1 = Label(root, text="Single User Mode")
title1.grid(row=1, column=0)
btn_dictionary = Tk.Button(self.frame, text="Dictionary", command=self.open_dictionary)
btn_dictionary.grid(row=2, column=0)
btn_brute_force = Tk.Button(self.frame , text="Brute Force", command=self.open_brute_force)
btn_brute_force.grid(row=2, column=1)
btn_node = Tk.Button(self.frame, text="Node", command=self.open_node)
btn_node.grid(row=4, column=0)
btn_server = Tk.Button(self.frame, text="Server", command=self.open_server)
btn_server.grid(row=4, column=2)
#----------------------------------------------------------------------
def hide(self):
""""""
self.root.withdraw()
#----------------------------------------------------------------------
def openFrame(self):
""""""
self.hide()
otherFrame = Tk.Toplevel()
otherFrame.geometry("400x300")
otherFrame.title("otherFrame")
handler = lambda: self.onCloseOtherFrame(otherFrame)
btn = Tk.Button(otherFrame, text="Close", command=handler)
btn.pack()
#----------------------------------------------------------------------
def open_node(self):
""""""
self.hide()
node_frame = Tk.Toplevel()
node_frame.geometry("400x300")
node_frame.title("Node")
handler = lambda: self.onCloseOtherFrame(node_frame)
btn = Tk.Button(node_frame, text="Back", command=handler)
btn.pack()
#----------------------------------------------------------------------
def open_server(self):
""""""
self.hide()
server_frame = Tk.Toplevel()
server_frame.geometry("400x300")
server_frame.title("Server")
handler = lambda: self.onCloseOtherFrame(server_frame)
btn = Tk.Button(server_frame, text="Back", command=handler)
btn.pack()
#----------------------------------------------------------------------
def open_sum(self):
""""""
self.hide()
sum_frame = Tk.Toplevel()
sum_frame.geometry("400x300")
sum_frame.title("Single User Mode")
handler = lambda: self.onCloseOtherFrame(sum_frame)
btn = Tk.Button(sum_frame, text="Back", command=handler)
btn.pack()
#----------------------------------------------------------------------
def open_brute_force(self):
""""""
self.hide()
brute_force_frame = Tk.Toplevel()
brute_force_frame.geometry("400x300")
brute_force_frame.title("Brute Force")
handler = lambda: self.onCloseOtherFrame(brute_force_frame)
btn = Tk.Button(brute_force_frame, text="Back", command=handler)
btn.pack()
#----------------------------------------------------------------------
def open_dictionary(self):
""""""
self.hide()
dictionary_frame = Tk.Toplevel()
dictionary_frame.geometry("400x300")
dictionary_frame.title("Dictionary")
handler = lambda: self.onCloseOtherFrame(dictionary_frame)
btn = Tk.Button(dictionary_frame, text="Back", command=handler)
btn.pack()
#----------------------------------------------------------------------
def onCloseOtherFrame(self, otherFrame):
""""""
otherFrame.destroy()
self.show()
#----------------------------------------------------------------------
def show(self):
""""""
self.root.update()
self.root.deiconify()
#----------------------------------------------------------------------
def askopenfile(self):
"""Returns an opened file in read mode."""
self.file_opt = options = {}
options['defaultextension'] = '.txt'
options['filetypes'] = [('all files', '.*'), ('text files', '.txt')]
options['initialdir'] = 'C:\\'
options['initialfile'] = 'myfile.txt'
options['parent'] = root
options['title'] = 'This is a title'
return tkFileDialog.askopenfile(mode='r', **self.file_opt)
#----------------------------------------------------------------------
if __name__ == "__main__":
root = Tk.Tk()
root.geometry("800x600")
app = MyApp(root)
root.mainloop() | gpl-3.0 | -3,765,599,085,140,937,000 | 34.744526 | 99 | 0.469771 | false |
quattor/aquilon | lib/aquilon/worker/templates/personality.py | 1 | 10046 | # -*- cpy-indent-level: 4; indent-tabs-mode: nil -*-
# ex: set expandtab softtabstop=4 shiftwidth=4:
#
# Copyright (C) 2008-2016,2018 Contributor
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
from collections import defaultdict
from operator import attrgetter
from sqlalchemy.orm import joinedload, subqueryload
from aquilon.aqdb.model import (
ArchetypeParamDef,
ParameterizedPersonality,
PersonalityParameter,
PersonalityStage,
)
from aquilon.aqdb.model.feature import host_features
from aquilon.worker.locks import NoLockKey, PlenaryKey
from aquilon.worker.templates import (
Plenary,
PlenaryCollection,
PlenaryParameterized,
PlenaryResource,
StructurePlenary,
)
from aquilon.worker.templates.entitlementutils import flatten_entitlements
from aquilon.worker.templates.panutils import (
pan_append,
pan_assign,
pan_include,
pan_include_if_exists,
pan_variable,
StructureTemplate,
)
LOGGER = logging.getLogger(__name__)
def get_parameters_by_feature(dbstage, dbfeature):
param_def_holder = dbfeature.param_def_holder
assert param_def_holder
param = dbstage.parameters.get(param_def_holder, None)
ret = {}
for param_def in param_def_holder.param_definitions:
if param:
value = param.get_path(param_def.path, compel=False)
else:
value = None
if value is None:
value = param_def.parsed_default
if value is not None:
ret[param_def.path] = value
return ret
def staged_path(prefix, dbstage, suffix):
if dbstage.name == "current":
return "%s/%s/%s" % (prefix, dbstage.personality.name, suffix)
else:
return "%s/%s+%s/%s" % (prefix, dbstage.personality.name,
dbstage.name, suffix)
class PlenaryParameterizedPersonality(PlenaryParameterized):
prefix = "personality"
@classmethod
def template_name(cls, dbobj):
return "{}/{}/{}/{}/config".format(
cls.prefix,
dbobj.name,
dbobj.location.location_type,
dbobj.location.name)
def body(self, lines):
flatten_entitlements(lines, self.dbobj, prefix='/')
for resholder in self.dbobj.resholders:
if resholder.location != self.dbobj.location:
continue
lines.append("")
for resource in sorted(resholder.resources,
key=attrgetter('resource_type', 'name')):
res_path = PlenaryResource.template_name(resource)
pan_append(
lines,
'/system/resources/{}'.format(resource.resource_type),
StructureTemplate(res_path))
Plenary.handlers[ParameterizedPersonality] = PlenaryParameterizedPersonality
class PlenaryPersonality(PlenaryCollection):
def __init__(self, dbstage, logger=LOGGER, allow_incomplete=True):
super(PlenaryPersonality, self).__init__(logger=logger,
allow_incomplete=allow_incomplete)
self.append(PlenaryPersonalityBase.get_plenary(dbstage,
allow_incomplete=allow_incomplete))
for defholder, dbparam in dbstage.parameters.items():
if not isinstance(defholder, ArchetypeParamDef):
continue
self.append(PlenaryPersonalityParameter.get_plenary(dbparam,
allow_incomplete=allow_incomplete))
@classmethod
def query_options(cls, prefix="", load_personality=True):
options = []
if load_personality:
options.append(joinedload(prefix + 'personality'))
return options + [subqueryload(prefix + 'parameters'),
subqueryload(prefix + 'features'),
subqueryload(prefix + 'grns'),
joinedload(prefix + 'features.feature'),
joinedload(prefix + 'features.feature.param_def_holder'),
subqueryload(prefix + 'features.feature.param_def_holder.param_definitions'),
joinedload(prefix + 'features.model')]
Plenary.handlers[PersonalityStage] = PlenaryPersonality
class PlenaryPersonalityBase(Plenary):
prefix = "personality"
@classmethod
def template_name(cls, dbstage):
return staged_path(cls.prefix, dbstage, "config")
@classmethod
def loadpath(cls, dbstage):
return dbstage.personality.archetype.name
def body(self, lines):
dbpers = self.dbobj.personality
if self.dbobj.name == "current":
pan_variable(lines, "PERSONALITY", dbpers.name)
else:
pan_variable(lines, "PERSONALITY", "%s+%s" % (dbpers.name,
self.dbobj.name))
pan_assign(lines, "/system/personality/name", dbpers.name)
if dbpers.staged:
pan_assign(lines, "/system/personality/stage", self.dbobj.name)
eon_id_map = defaultdict(set)
for grn_rec in self.dbobj.grns:
eon_id_map[grn_rec.target].add(grn_rec.grn.eon_id)
for target in sorted(eon_id_map):
for eon_id in sorted(eon_id_map[target]):
pan_append(lines, "/system/eon_id_maps/%s" % target, eon_id)
pan_assign(lines, "/system/personality/owner_eon_id",
dbpers.owner_eon_id)
user_list = sorted(dbusr.name for dbusr in dbpers.root_users)
if user_list:
pan_assign(lines, "/system/root_users", user_list)
ng_list = sorted(ng.name for ng in dbpers.root_netgroups)
if ng_list:
pan_assign(lines, "/system/root_netgroups", ng_list)
pre, post = host_features(self.dbobj)
for dbfeature in sorted(frozenset()
.union(pre)
.union(post)
.intersection(self.dbobj.param_features),
key=attrgetter('name')):
base_path = "/system/" + dbfeature.cfg_path
params = get_parameters_by_feature(self.dbobj, dbfeature)
for key in sorted(params.keys()):
pan_assign(lines, base_path + "/" + key, params[key])
for dbfeature in sorted(pre, key=attrgetter('name')):
pan_include(lines, dbfeature.cfg_path + "/config")
pan_append(lines, "/metadata/features", dbfeature.cfg_path + "/config")
pan_include_if_exists(lines, "personality/config")
if dbpers.host_environment.name != 'legacy':
pan_assign(lines, "/system/personality/host_environment",
dbpers.host_environment, True)
if dbpers.config_override:
pan_include(lines, "features/personality/config_override/config")
for dbfeature in sorted(post, key=attrgetter('name')):
pan_include(lines, dbfeature.cfg_path + "/config")
pan_append(lines, "/metadata/features", dbfeature.cfg_path + "/config")
def get_key(self, exclusive=True):
if self.is_deleted():
return NoLockKey(logger=self.logger)
else:
return PlenaryKey(personality=self.dbobj, logger=self.logger,
exclusive=exclusive)
class PlenaryPersonalityParameter(StructurePlenary):
prefix = "personality"
@classmethod
def template_name(cls, dbparam):
return staged_path(cls.prefix, dbparam.personality_stage,
dbparam.param_def_holder.template)
@classmethod
def loadpath(cls, dbparam):
return dbparam.personality_stage.personality.archetype.name
def __init__(self, *args, **kwargs):
super(PlenaryPersonalityParameter, self).__init__(*args, **kwargs)
self.debug_name = "%s/%s" % (self.dbobj.personality_stage.qualified_name,
self.dbobj.param_def_holder.template)
def body(self, lines):
dbparam = self.dbobj
param_def_holder = dbparam.param_def_holder
for param_def in sorted(param_def_holder.param_definitions,
key=attrgetter('path')):
value = dbparam.get_path(param_def.path, compel=False)
if value is None:
value = param_def.parsed_default
if value is None:
continue
# Do a single-level expansion of JSON parameters. This should be
# more efficient to compile according to the Pan documentation, and
# it also avoids trying to assign a value to an empty path if a
# single parameter definition covers the whole template
if isinstance(value, dict):
for k in sorted(value):
v = value[k]
if param_def.path:
pan_assign(lines, param_def.path + "/" + k, v)
else:
pan_assign(lines, k, v)
else:
pan_assign(lines, param_def.path, value)
def get_key(self, exclusive=True):
if self.is_deleted():
return NoLockKey(logger=self.logger)
else:
return PlenaryKey(personality=self.dbobj.personality_stage,
logger=self.logger, exclusive=exclusive)
Plenary.handlers[PersonalityParameter] = PlenaryPersonalityParameter
| apache-2.0 | -806,622,832,720,203,800 | 35.398551 | 103 | 0.601334 | false |
imdaveho/boilerplates | django-boilerplate/project_name/settings/production.py | 1 | 1544 | import sys
from {{ project_name }}.settings.default import *
# Custom Project Structure:
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
APPS_DIR = os.path.join(BASE_DIR, 'apps')
sys.path.insert(0, BASE_DIR)
sys.path.insert(1, APPS_DIR)
sys.path.insert(2, os.path.join(BASE_DIR, 'libs'))
# Production Configs (overwriting base.py):
DEBUG = False
TEMPLATE_DEBUG = DEBUG
ROOT_PATH = os.path.dirname(__file__)
STATIC_ROOT = 'staticfiles'
STATICFILES_DIRS = (
os.path.join(BASE_DIR, 'static'),
)
PREPEND_WWW = False
SECURE_PROXY_SSL_HEADER = ("HTTP_X_FORWARDED_PROTO", "https")
SESSION_STORAGE = "django.contrib.sessions.backends.signed_cookies"
MESSAGE_STORAGE = "django.contrib.messages.storage.cookie.CookieStorage"
# Template Stuff (overwriting base.py):
TEMPLATE_DIRS = (
os.path.join(BASE_DIR, 'templates'),
os.path.join(ROOT_PATH, 'templates'),
)
TEMPLATE_LOADERS = (
("django.template.loaders.cached.loader", (
"django.template.loaders.filesystem.Loader",
"django.template.loaders.app_directories.Loader",
)),
)
TEMPLATE_CONTEXT_PROCESSORS = [
"django.contrib.auth.context_processors.auth",
"django.core.context_processors.debug",
"django.core.context_processors.i18n",
"django.core.context_processors.media",
"django.core.context_processors.static",
"django.core.context_processors.tz",
# "django.core.context_processors.request",
"django.contrib.messages.context_processors.messages",
]
# Miscellaneous:
TEST_RUNNER = "django.test.runner.DiscoverRunner"
| gpl-2.0 | 2,579,424,371,143,748,600 | 28.132075 | 72 | 0.715674 | false |
jdavidrcamacho/Tests_GP | 02 - Programs being tested/RV_function.py | 1 | 3615 | # -*- coding: utf-8 -*-
"""
Created on Fri Feb 3 11:36:58 2017
@author: camacho
"""
import numpy as np
import matplotlib.pyplot as pl
pl.close("all")
##### RV FUNCTION 1 - circular orbit
def RV_circular(P=365,K=0.1,T=0,gamma=0,time=100,space=20):
#parameters
#P = period in days
#K = semi-amplitude of the signal
#T = velocity at zero phase
#gamma = average velocity of the star
#time = time of the simulation
#space => I want an observation every time/space days
t=np.linspace(0,time,space)
RV=[K*np.sin(2*np.pi*x/P - T) + gamma for x in t]
RV=[x for x in RV] #m/s
return [t,RV]
##### RV FUNCTION 2 - keplerian orbit
def RV_kepler(P=365,e=0,K=0.1,T=0,gamma=0,w=np.pi,time=100,space=1000):
#parameters
#P = period in days
#e = eccentricity
#K = RV amplitude
#gamma = constant system RV
#T = zero phase
#w = longitude of the periastron
#time = time of the simulation
#space => I want an observation every time/space days
t=np.linspace(0,time,space)
#mean anomaly
Mean_anom=[2*np.pi*(x1-T)/P for x1 in t]
#eccentric anomaly -> E0=M + e*sin(M) + 0.5*(e**2)*sin(2*M)
E0=[x + e*np.sin(x) + 0.5*(e**2)*np.sin(2*x) for x in Mean_anom]
#mean anomaly -> M0=E0 - e*sin(E0)
M0=[x - e*np.sin(x) for x in E0]
i=0
while i<100:
#[x + y for x, y in zip(first, second)]
calc_aux=[x2-y for x2,y in zip(Mean_anom,M0)]
E1=[x3 + y/(1-e*np.cos(x3)) for x3,y in zip(E0,calc_aux)]
M1=[x4 - e*np.sin(x4) for x4 in E0]
i+=1
E0=E1
M0=M1
nu=[2*np.arctan(np.sqrt((1+e)/(1-e))*np.tan(x5/2)) for x5 in E0]
RV=[ gamma + K*(e*np.cos(w)+np.cos(w+x6)) for x6 in nu]
RV=[x for x in RV] #m/s
return t,RV
#Examples
#a=RV_circular()
#pl.figure('RV_circular with P=365')
#pl.plot(a[0],a[1],':',)
#pl.title('planet of 365 days orbit')
#pl.xlabel('time')
#pl.ylabel('RV (Km/s)')
#b=RV_circular(P=100)
#pl.figure('RV_circular with P=100')
#pl.title('planet of 100 days orbit')
#pl.plot(b[0],b[1],':',)
#pl.xlabel('time')
#pl.ylabel('RV (Km/s)')
#c=RV_kepler(P=100,e=0,w=np.pi,time=100)
#pl.figure()
#pl.plot(c[0],c[1],':',)
#pl.title('P=100, e=0, w=pi, time=100')
#pl.xlabel('time')
#pl.ylabel('RV (Km/s)')
#d1=RV_kepler(P=100,e=0, w=0,time=500)
#pl.figure()
#pl.title('P=100, e=0, w=pi, time=25')
#pl.plot(d[0],d[1],'-',)
#pl.xlabel('time')
#pl.ylabel('RV (Km/s)')
#d2=RV_kepler(P=100,e=0, w=np.pi,time=500)
#pl.figure()
#pl.title('P=100, e=0, w=pi, time=25')
#pl.plot(d[0],d[1],'-',)
#pl.xlabel('time')
#pl.ylabel('RV (Km/s)')
#d3=RV_kepler(P=100,e=0.5, w=np.pi,time=500)
#pl.figure()
#pl.title('P=100, e=0, w=pi, time=25')
#pl.plot(d[0],d[1],'-',)
#pl.xlabel('time')
#pl.ylabel('RV (Km/s)')
#d4=RV_kepler(P=100,e=0.5, w=np.pi/2,time=500)
#pl.figure()
#pl.title('P=100, e=0, w=pi, time=25')
#pl.plot(d[0],d[1],'-',)
#pl.xlabel('time')
#pl.ylabel('RV (Km/s)')
d1=RV_kepler(P=100,e=0, w=0,time=500)
d2=RV_kepler(P=100,e=0.5, w=0,time=500)
d3=RV_kepler(P=100,e=0.5, w=np.pi,time=500)
d4=RV_kepler(P=100,e=0.5, w=np.pi/2,time=500)
# Four axes, returned as a 2-d array
f, axarr = pl.subplots(2, 2)
axarr[0, 0].plot(d1[0],d1[1])
axarr[0, 0].set_title('e=0 and w=0')
axarr[0, 1].plot(d2[0],d2[1])
axarr[0, 1].set_title('e=0.5, w=0')
axarr[1, 0].plot(d3[0],d3[1])
axarr[1, 0].set_title('e=0.5, w=pi')
axarr[1, 1].plot(d4[0],d4[1])
axarr[1, 1].set_title('e=0.5, w=pi/2')
#pl.setp(pl.xticks(fontsize = 18) for a in axarr[0,:])#pl.yticks(fontsize=18))
pl.setp([a.get_xticklabels() for a in axarr[0, :]], visible=False) | mit | 1,223,838,283,404,268,000 | 25.202899 | 78 | 0.582849 | false |
privb0x23/bliss-initramfs | pkg/hooks/Firmware.py | 1 | 1964 | # Copyright 2012-2017 Jonathan Vasquez <[email protected]>
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation and/or
# other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
# ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from pkg.hooks.Hook import Hook
class Firmware(Hook):
# Copy firmware?
_use = 0
# If enabled, all the firmware in /lib/firmware will be copied into the initramfs.
# If you know exactly what firmware files you want, definitely leave this at 0 so
# to reduce the initramfs size.
_copy_all = 0
# A list of firmware files to include in the initramfs
_files = [
# Add your firmware files below
#"iwlwifi-6000g2a-6.ucode",
#"/yamaha/yss225_registers.bin",
]
# Gets the flag_all_firmware value
@classmethod
def IsCopyAllEnabled(cls):
return cls._copy_all
| bsd-2-clause | 149,393,277,640,515,650 | 42.644444 | 86 | 0.746945 | false |
IamJeffG/geopandas | geopandas/plotting.py | 1 | 13216 | from __future__ import print_function
import warnings
import numpy as np
from six import next
from six.moves import xrange
from shapely.geometry import Polygon
def plot_polygon(ax, poly, facecolor='red', edgecolor='black', alpha=0.5, linewidth=1.0, **kwargs):
""" Plot a single Polygon geometry """
from descartes.patch import PolygonPatch
a = np.asarray(poly.exterior)
if poly.has_z:
poly = Polygon(zip(*poly.exterior.xy))
# without Descartes, we could make a Patch of exterior
ax.add_patch(PolygonPatch(poly, facecolor=facecolor, linewidth=0, alpha=alpha)) # linewidth=0 because boundaries are drawn separately
ax.plot(a[:, 0], a[:, 1], color=edgecolor, linewidth=linewidth, **kwargs)
for p in poly.interiors:
x, y = zip(*p.coords)
ax.plot(x, y, color=edgecolor, linewidth=linewidth)
def plot_multipolygon(ax, geom, facecolor='red', edgecolor='black', alpha=0.5, linewidth=1.0, **kwargs):
""" Can safely call with either Polygon or Multipolygon geometry
"""
if geom.type == 'Polygon':
plot_polygon(ax, geom, facecolor=facecolor, edgecolor=edgecolor, alpha=alpha, linewidth=linewidth, **kwargs)
elif geom.type == 'MultiPolygon':
for poly in geom.geoms:
plot_polygon(ax, poly, facecolor=facecolor, edgecolor=edgecolor, alpha=alpha, linewidth=linewidth, **kwargs)
def plot_linestring(ax, geom, color='black', linewidth=1.0, **kwargs):
""" Plot a single LineString geometry """
a = np.array(geom)
ax.plot(a[:, 0], a[:, 1], color=color, linewidth=linewidth, **kwargs)
def plot_multilinestring(ax, geom, color='red', linewidth=1.0, **kwargs):
""" Can safely call with either LineString or MultiLineString geometry
"""
if geom.type == 'LineString':
plot_linestring(ax, geom, color=color, linewidth=linewidth, **kwargs)
elif geom.type == 'MultiLineString':
for line in geom.geoms:
plot_linestring(ax, line, color=color, linewidth=linewidth, **kwargs)
def plot_point(ax, pt, marker='o', markersize=2, color='black', **kwargs):
""" Plot a single Point geometry """
ax.plot(pt.x, pt.y, marker=marker, markersize=markersize, color=color, **kwargs)
def gencolor(N, colormap='Set1'):
"""
Color generator intended to work with one of the ColorBrewer
qualitative color scales.
Suggested values of colormap are the following:
Accent, Dark2, Paired, Pastel1, Pastel2, Set1, Set2, Set3
(although any matplotlib colormap will work).
"""
from matplotlib import cm
# don't use more than 9 discrete colors
n_colors = min(N, 9)
cmap = cm.get_cmap(colormap, n_colors)
colors = cmap(range(n_colors))
for i in xrange(N):
yield colors[i % n_colors]
def plot_series(s, cmap='Set1', color=None, ax=None, linewidth=1.0,
figsize=None, **color_kwds):
""" Plot a GeoSeries
Generate a plot of a GeoSeries geometry with matplotlib.
Parameters
----------
Series
The GeoSeries to be plotted. Currently Polygon,
MultiPolygon, LineString, MultiLineString and Point
geometries can be plotted.
cmap : str (default 'Set1')
The name of a colormap recognized by matplotlib. Any
colormap will work, but categorical colormaps are
generally recommended. Examples of useful discrete
colormaps include:
Accent, Dark2, Paired, Pastel1, Pastel2, Set1, Set2, Set3
color : str (default None)
If specified, all objects will be colored uniformly.
ax : matplotlib.pyplot.Artist (default None)
axes on which to draw the plot
linewidth : float (default 1.0)
Line width for geometries.
figsize : pair of floats (default None)
Size of the resulting matplotlib.figure.Figure. If the argument
ax is given explicitly, figsize is ignored.
**color_kwds : dict
Color options to be passed on to the actual plot function
Returns
-------
matplotlib axes instance
"""
if 'colormap' in color_kwds:
warnings.warn("'colormap' is deprecated, please use 'cmap' instead "
"(for consistency with matplotlib)", FutureWarning)
cmap = color_kwds.pop('colormap')
if 'axes' in color_kwds:
warnings.warn("'axes' is deprecated, please use 'ax' instead "
"(for consistency with pandas)", FutureWarning)
ax = color_kwds.pop('axes')
import matplotlib.pyplot as plt
if ax is None:
fig, ax = plt.subplots(figsize=figsize)
ax.set_aspect('equal')
color_generator = gencolor(len(s), colormap=cmap)
for geom in s:
if color is None:
col = next(color_generator)
else:
col = color
if geom.type == 'Polygon' or geom.type == 'MultiPolygon':
if 'facecolor' in color_kwds:
plot_multipolygon(ax, geom, linewidth=linewidth, **color_kwds)
else:
plot_multipolygon(ax, geom, facecolor=col, linewidth=linewidth, **color_kwds)
elif geom.type == 'LineString' or geom.type == 'MultiLineString':
plot_multilinestring(ax, geom, color=col, linewidth=linewidth, **color_kwds)
elif geom.type == 'Point':
plot_point(ax, geom, color=col, **color_kwds)
plt.draw()
return ax
def plot_dataframe(s, column=None, cmap=None, color=None, linewidth=1.0,
categorical=False, legend=False, ax=None,
scheme=None, k=5, vmin=None, vmax=None, figsize=None,
**color_kwds):
""" Plot a GeoDataFrame
Generate a plot of a GeoDataFrame with matplotlib. If a
column is specified, the plot coloring will be based on values
in that column. Otherwise, a categorical plot of the
geometries in the `geometry` column will be generated.
Parameters
----------
GeoDataFrame
The GeoDataFrame to be plotted. Currently Polygon,
MultiPolygon, LineString, MultiLineString and Point
geometries can be plotted.
column : str (default None)
The name of the column to be plotted.
categorical : bool (default False)
If False, cmap will reflect numerical values of the
column being plotted. For non-numerical columns (or if
column=None), this will be set to True.
cmap : str (default 'Set1')
The name of a colormap recognized by matplotlib.
color : str (default None)
If specified, all objects will be colored uniformly.
linewidth : float (default 1.0)
Line width for geometries.
legend : bool (default False)
Plot a legend (Experimental; currently for categorical
plots only)
ax : matplotlib.pyplot.Artist (default None)
axes on which to draw the plot
scheme : pysal.esda.mapclassify.Map_Classifier
Choropleth classification schemes (requires PySAL)
k : int (default 5)
Number of classes (ignored if scheme is None)
vmin : None or float (default None)
Minimum value of cmap. If None, the minimum data value
in the column to be plotted is used.
vmax : None or float (default None)
Maximum value of cmap. If None, the maximum data value
in the column to be plotted is used.
figsize
Size of the resulting matplotlib.figure.Figure. If the argument
axes is given explicitly, figsize is ignored.
**color_kwds : dict
Color options to be passed on to the actual plot function
Returns
-------
matplotlib axes instance
"""
if 'colormap' in color_kwds:
warnings.warn("'colormap' is deprecated, please use 'cmap' instead "
"(for consistency with matplotlib)", FutureWarning)
cmap = color_kwds.pop('colormap')
if 'axes' in color_kwds:
warnings.warn("'axes' is deprecated, please use 'ax' instead "
"(for consistency with pandas)", FutureWarning)
ax = color_kwds.pop('axes')
import matplotlib.pyplot as plt
from matplotlib.lines import Line2D
from matplotlib.colors import Normalize
from matplotlib import cm
if column is None:
return plot_series(s.geometry, cmap=cmap, color=color,
ax=ax, linewidth=linewidth, figsize=figsize,
**color_kwds)
else:
if s[column].dtype is np.dtype('O'):
categorical = True
if categorical:
if cmap is None:
cmap = 'Set1'
categories = list(set(s[column].values))
categories.sort()
valuemap = dict([(k, v) for (v, k) in enumerate(categories)])
values = [valuemap[k] for k in s[column]]
else:
values = s[column]
if scheme is not None:
binning = __pysal_choro(values, scheme, k=k)
values = binning.yb
# set categorical to True for creating the legend
categorical = True
binedges = [binning.yb.min()] + binning.bins.tolist()
categories = ['{0:.2f} - {1:.2f}'.format(binedges[i], binedges[i+1])
for i in range(len(binedges)-1)]
cmap = norm_cmap(values, cmap, Normalize, cm, vmin=vmin, vmax=vmax)
if ax is None:
fig, ax = plt.subplots(figsize=figsize)
ax.set_aspect('equal')
for geom, value in zip(s.geometry, values):
if color is None:
col = cmap.to_rgba(value)
else:
col = color
if geom.type == 'Polygon' or geom.type == 'MultiPolygon':
plot_multipolygon(ax, geom, facecolor=col, linewidth=linewidth, **color_kwds)
elif geom.type == 'LineString' or geom.type == 'MultiLineString':
plot_multilinestring(ax, geom, color=col, linewidth=linewidth, **color_kwds)
elif geom.type == 'Point':
plot_point(ax, geom, color=col, **color_kwds)
if legend:
if categorical:
patches = []
for value, cat in enumerate(categories):
patches.append(Line2D([0], [0], linestyle="none",
marker="o", alpha=color_kwds.get('alpha', 0.5),
markersize=10, markerfacecolor=cmap.to_rgba(value)))
ax.legend(patches, categories, numpoints=1, loc='best')
else:
# TODO: show a colorbar
raise NotImplementedError
plt.draw()
return ax
def __pysal_choro(values, scheme, k=5):
""" Wrapper for choropleth schemes from PySAL for use with plot_dataframe
Parameters
----------
values
Series to be plotted
scheme
pysal.esda.mapclassify classificatin scheme
['Equal_interval'|'Quantiles'|'Fisher_Jenks']
k
number of classes (2 <= k <=9)
Returns
-------
binning
Binning objects that holds the Series with values replaced with
class identifier and the bins.
"""
try:
from pysal.esda.mapclassify import Quantiles, Equal_Interval, Fisher_Jenks
schemes = {}
schemes['equal_interval'] = Equal_Interval
schemes['quantiles'] = Quantiles
schemes['fisher_jenks'] = Fisher_Jenks
s0 = scheme
scheme = scheme.lower()
if scheme not in schemes:
scheme = 'quantiles'
warnings.warn('Unrecognized scheme "{0}". Using "Quantiles" '
'instead'.format(s0), UserWarning, stacklevel=3)
if k < 2 or k > 9:
warnings.warn('Invalid k: {0} (2 <= k <= 9), setting k=5 '
'(default)'.format(k), UserWarning, stacklevel=3)
k = 5
binning = schemes[scheme](values, k)
return binning
except ImportError:
raise ImportError("PySAL is required to use the 'scheme' keyword")
def norm_cmap(values, cmap, normalize, cm, vmin=None, vmax=None):
""" Normalize and set colormap
Parameters
----------
values
Series or array to be normalized
cmap
matplotlib Colormap
normalize
matplotlib.colors.Normalize
cm
matplotlib.cm
vmin
Minimum value of colormap. If None, uses min(values).
vmax
Maximum value of colormap. If None, uses max(values).
Returns
-------
n_cmap
mapping of normalized values to colormap (cmap)
"""
mn = min(values) if vmin is None else vmin
mx = max(values) if vmax is None else vmax
norm = normalize(vmin=mn, vmax=mx)
n_cmap = cm.ScalarMappable(norm=norm, cmap=cmap)
return n_cmap
| bsd-3-clause | -1,790,349,565,041,138,400 | 34.431635 | 138 | 0.590875 | false |
spillai/crisp | crisp/rotations.py | 1 | 9499 | # -*- coding: utf-8 -*-
from __future__ import division, print_function, absolute_import
"""
Rotation handling module
"""
__author__ = "Hannes Ovrén"
__copyright__ = "Copyright 2013, Hannes Ovrén"
__license__ = "GPL"
__email__ = "[email protected]"
import numpy as np
from numpy.testing import assert_almost_equal
from . import ransac
#------------------------------------------------------------------------------
def procrustes(X, Y, remove_mean=False):
"""Orthogonal procrustes problem solver
The procrustes problem finds the best rotation R, and translation t
where
X = R*Y + t
The number of points in X and Y must be at least 2.
For the minimal case of two points, a third point is temporarily created
and used for the estimation.
Parameters
-----------------
X : (3, N) ndarray
First set of points
Y : (3, N) ndarray
Second set of points
remove_mean : bool
If true, the mean is removed from X and Y before solving the
procrustes problem. Can yield better results in some applications.
Returns
-----------------
R : (3,3) ndarray
Rotation component
t : (3,) ndarray
Translation component (None if remove_mean is False)
"""
assert X.shape == Y.shape
assert X.shape[0] > 1
# Minimal case, create third point using cross product
if X.shape[0] == 2:
X3 = np.cross(X[:,0], X[:,1], axis=0)
X = np.hstack((X, X3 / np.linalg.norm(X3)))
Y3 = np.cross(Y[:,0], Y[:,1], axis=0)
Y = np.hstack((Y, Y3 / np.linalg.norm(Y3)))
D, N = X.shape[:2]
if remove_mean:
mx = np.mean(X, axis=1).reshape(D, 1)
my = np.mean(Y, axis=1).reshape(D, 1)
Xhat = X - mx
Yhat = Y - my
else:
Xhat = X
Yhat = Y
(U, S, V) = np.linalg.svd((Xhat).dot(Yhat.T))
Dtmp = np.eye(Xhat.shape[0])
Dtmp[-1,-1] = np.linalg.det(U.dot(V))
R_est = U.dot(Dtmp).dot(V)
# Now X=R_est*(Y-my)+mx=R_est*Y+t_est
if remove_mean:
t_est= mx - R_est.dot(my)
else:
t_est = None
return (R_est, t_est)
#--------------------------------------------------------------------------
def rotation_matrix_to_axis_angle(R):
"""Convert a 3D rotation matrix to a 3D axis angle representation
Parameters
---------------
R : (3,3) array
Rotation matrix
Returns
----------------
v : (3,) array
(Unit-) rotation angle
theta : float
Angle of rotations, in radians
Note
--------------
This uses the algorithm as described in Multiple View Geometry, p. 584
"""
assert R.shape == (3,3)
assert_almost_equal(np.linalg.det(R), 1.0, err_msg="Not a rotation matrix: determinant was not 1")
S, V = np.linalg.eig(R)
k = np.argmin(np.abs(S - 1.))
s = S[k]
assert_almost_equal(s, 1.0, err_msg="Not a rotation matrix: No eigen value s=1")
v = np.real(V[:, k]) # Result is generally complex
vhat = np.array([R[2,1] - R[1,2], R[0,2] - R[2,0], R[1,0] - R[0,1]])
sintheta = 0.5 * np.dot(v, vhat)
costheta = 0.5 * (np.trace(R) - 1)
theta = np.arctan2(sintheta, costheta)
return (v, theta)
#--------------------------------------------------------------------------
def axis_angle_to_rotation_matrix(v, theta):
"""Convert rotation from axis-angle to rotation matrix
Parameters
---------------
v : (3,) ndarray
Rotation axis (normalized)
theta : float
Rotation angle (radians)
Returns
----------------
R : (3,3) ndarray
Rotation matrix
"""
if np.abs(theta) < np.spacing(1):
return np.eye(3)
else:
v = v.reshape(3,1)
np.testing.assert_almost_equal(np.linalg.norm(v), 1.)
vx = np.array([[0, -v[2], v[1]],
[v[2], 0, -v[0]],
[-v[1], v[0], 0]])
vvt = np.dot(v, v.T)
R = np.eye(3)*np.cos(theta) + (1 - np.cos(theta))*vvt + vx * np.sin(theta)
return R
#--------------------------------------------------------------------------
def quat_to_rotation_matrix(q):
"""Convert unit quaternion to rotation matrix
Parameters
-------------
q : (4,) ndarray
Unit quaternion, scalar as first element
Returns
----------------
R : (3,3) ndarray
Rotation matrix
"""
q = q.flatten()
assert q.size == 4
assert_almost_equal(np.linalg.norm(q), 1.0, err_msg="Not a unit quaternion!")
qq = q ** 2
R = np.array([[qq[0] + qq[1] - qq[2] - qq[3], 2*q[1]*q[2] -
2*q[0]*q[3], 2*q[1]*q[3] + 2*q[0]*q[2]],
[2*q[1]*q[2] + 2*q[0]*q[3], qq[0] - qq[1] + qq[2] -
qq[3], 2*q[2]*q[3] - 2*q[0]*q[1]],
[2*q[1]*q[3] - 2*q[0]*q[2], 2*q[2]*q[3] + 2*q[0]*q[1],
qq[0] - qq[1] - qq[2] + qq[3]]])
return R
#--------------------------------------------------------------------------
def integrate_gyro_quaternion(gyro_ts, gyro_data):
"""Integrate angular velocities to rotations
Parameters
---------------
gyro_ts : ndarray
Timestamps
gyro_data : (3, N) ndarray
Angular velocity measurements
Returns
---------------
rotations : (4, N) ndarray
Rotation sequence as unit quaternions (first element scalar)
"""
#NB: Quaternion q = [a, n1, n2, n3], scalar first
q_list = np.zeros((gyro_ts.shape[0], 4)) # Nx4 quaternion list
q_list[0,:] = np.array([1, 0, 0, 0]) # Initial rotation (no rotation)
# Iterate over all (except first)
for i in range(1, gyro_ts.size):
w = gyro_data[i]
dt = gyro_ts[i] - gyro_ts[i - 1]
qprev = q_list[i - 1]
A = np.array([[0, -w[0], -w[1], -w[2]],
[w[0], 0, w[2], -w[1]],
[w[1], -w[2], 0, w[0]],
[w[2], w[1], -w[0], 0]])
qnew = (np.eye(4) + (dt/2.0) * A).dot(qprev)
qnorm = np.sqrt(np.sum(qnew ** 2))
qnew /= qnorm
q_list[i] = qnew
return q_list
#--------------------------------------------------------------------------
def slerp(q1, q2, u):
"""SLERP: Spherical linear interpolation between two unit quaternions.
Parameters
------------
q1 : (4, ) ndarray
Unit quaternion (first element scalar)
q2 : (4, ) ndarray
Unit quaternion (first element scalar)
u : float
Interpolation factor in range [0,1] where 0 is first quaternion
and 1 is second quaternion.
Returns
-----------
q : (4,) ndarray
The interpolated unit quaternion
"""
q1 = q1.flatten()
q2 = q2.flatten()
assert q1.shape == q2.shape
assert q1.size == 4
costheta = np.sqrt(np.sum(q1 * q2))
theta = np.arccos(costheta)
f1 = np.sin((1.0 - u)*theta) / np.sin(theta)
f2 = np.sin(u*theta) / np.sin(theta)
# Shortest path is wanted, so conjugate if necessary
if costheta < 0:
f1 = -f1
q = f1*q1 + f2*q2
q = q / np.sqrt(np.sum(q**2)) # Normalize
else:
q = f1*q1 + f2*q2
q = q / np.sqrt(np.sum(q**2)) # Normalize
return q
#--------------------------------------------------------------------------
def estimate_rotation_procrustes_ransac(x, y, camera, threshold, inlier_ratio=0.75, do_translation=False):
"""Calculate rotation between two sets of image coordinates using ransac.
Inlier criteria is the reprojection error of y into image 1.
Parameters
-------------------------
x : array 2xN image coordinates in image 1
y : array 2xN image coordinates in image 2
camera : Camera model
threshold : float pixel distance threshold to accept as inlier
do_translation : bool Try to estimate the translation as well
Returns
------------------------
R : array 3x3 The rotation that best fulfills X = RY
t : array 3x1 translation if do_translation is False
residual : array pixel distances ||x - xhat|| where xhat ~ KRY (and lens distorsion)
inliers : array Indices of the points (in X and Y) that are RANSAC inliers
"""
assert x.shape == y.shape
assert x.shape[0] == 2
X = camera.unproject(x)
Y = camera.unproject(y)
data = np.vstack((X, Y, x))
assert data.shape[0] == 8
model_func = lambda data: procrustes(data[:3], data[3:6], remove_mean=do_translation)
def eval_func(model, data):
Y = data[3:6].reshape(3,-1)
x = data[6:].reshape(2,-1)
R, t = model
Xhat = np.dot(R, Y) if t is None else np.dot(R, Y) + t
xhat = camera.project(Xhat)
dist = np.sqrt(np.sum((x-xhat)**2, axis=0))
return dist
inlier_selection_prob = 0.99999
model_points = 2
ransac_iterations = int(np.log(1 - inlier_selection_prob) / np.log(1-inlier_ratio**model_points))
model_est, ransac_consensus_idx = ransac.RANSAC(model_func, eval_func, data, model_points, ransac_iterations, threshold, recalculate=True)
if model_est is not None:
(R, t) = model_est
dist = eval_func((R, t), data)
else:
dist = None
R, t = None, None
ransac_consensus_idx = []
return R, t, dist, ransac_consensus_idx
| gpl-3.0 | -1,031,934,485,086,081,700 | 29.149206 | 146 | 0.502896 | false |
prim/ocempgui | doc/examples/table.py | 1 | 2042 | # Table examples.
from ocempgui.widgets import Renderer, Table, Label, Button
from ocempgui.widgets.Constants import *
def create_table_view ():
# Crate and display a Table.
table = Table (9, 2)
table.spacing = 5
table.topleft = 5, 5
label = Label ("Nonaligned wide Label")
table.add_child (0, 0, label)
table.add_child (0, 1, Button ("Simple Button"))
label = Label ("Top align")
table.add_child (1, 0, label)
table.set_align (1, 0, ALIGN_TOP)
table.add_child (1, 1, Button ("Simple Button"))
label = Label ("Bottom align")
table.add_child (2, 0, label)
table.set_align (2, 0, ALIGN_BOTTOM)
table.add_child (2, 1, Button ("Simple Button"))
label = Label ("Left align")
table.add_child (3, 0, label)
table.set_align (3, 0, ALIGN_LEFT)
table.add_child (3, 1, Button ("Simple Button"))
label = Label ("Right align")
table.add_child (4, 0, label)
table.set_align (4, 0, ALIGN_RIGHT)
table.add_child (4, 1, Button ("Simple Button"))
label = Label ("Topleft align")
table.add_child (5, 0, label)
table.set_align (5, 0, ALIGN_TOP | ALIGN_LEFT)
table.add_child (5, 1, Button ("Simple Button"))
label = Label ("Topright align")
table.add_child (6, 0, label)
table.set_align (6, 0, ALIGN_TOP | ALIGN_RIGHT)
table.add_child (6, 1, Button ("Simple Button"))
label = Label ("Bottomleft align")
table.add_child (7, 0, label)
table.set_align (7, 0, ALIGN_BOTTOM |ALIGN_LEFT)
table.add_child (7, 1, Button ("Simple Button"))
label = Label ("Bottomright align")
table.add_child (8, 0, label)
table.set_align (8, 0, ALIGN_BOTTOM |ALIGN_RIGHT)
table.add_child (8, 1, Button ("Simple Button"))
return table
if __name__ == "__main__":
# Initialize the drawing window.
re = Renderer ()
re.create_screen (250, 350)
re.title = "Table examples"
re.color = (234, 228, 223)
re.add_widget (create_table_view ())
# Start the main rendering loop.
re.start ()
| bsd-2-clause | 528,070,260,686,461,950 | 30.415385 | 59 | 0.616552 | false |
MERegistro/meregistro | meregistro/apps/reportes/views/normativa_jurisdiccional.py | 1 | 1076 | # -*- coding: UTF-8 -*-
from django.http import HttpResponse, HttpResponseRedirect
from datetime import datetime, date
from apps.seguridad.decorators import login_required, credential_required
from apps.seguridad.models import Usuario, Perfil
from apps.titulos.models.NormativaJurisdiccional import NormativaJurisdiccional
import csv
from apps.reportes.models import Reporte
@login_required
@credential_required('tit_nor_jur_consulta')
def normativas_jurisdiccionales(request, q):
filename = 'normativas_jurisdiccionales_' + str(date.today()) + '.xls'
reporte = Reporte(headers=['NUMERO/AÑO', 'TIPO', 'JURISDICCION', 'OTORGADA POR', 'OBSERVACIONES', 'ESTADO'], filename=filename)
for nj in q:
if nj.estado is None:
estado_nombre = ''
else:
estado_nombre = nj.estado.nombre.encode('utf8')
reporte.rows.append([nj.numero_anio.encode('utf8'), unicode(nj.tipo_normativa_jurisdiccional), unicode(nj.jurisdiccion), unicode(nj.otorgada_por), nj.observaciones.encode('utf8'), estado_nombre])
return reporte.as_csv()
| bsd-3-clause | -264,908,265,114,107,870 | 45.73913 | 203 | 0.733023 | false |
mozbhearsum/balrog | auslib/test/web/api/test_releases.py | 1 | 2720 | from auslib.test.web.api.base import CommonTestBase
class TestPublicReleasesAPI(CommonTestBase):
def test_get_releases(self):
ret = self.public_client.get("/api/v1/releases")
got = ret.get_json()
self.assertEqual(len(got["releases"]), 3)
self.assertIsInstance(got["releases"][0], dict)
releases = [(release["name"], release["product"]) for release in got["releases"]]
self.assertIn(("Fennec.55.0a1", "Fennec"), releases)
self.assertIn(("Firefox.55.0a1", "Firefox"), releases)
self.assertIn(("q", "q"), releases)
def test_get_releases_names(self):
ret = self.public_client.get("/api/v1/releases?names_only=1")
got = ret.get_json()
self.assertEqual(len(got["names"]), 3)
self.assertIn("Fennec.55.0a1", got["names"])
self.assertIn("Firefox.55.0a1", got["names"])
self.assertIn("q", got["names"])
def test_get_releases_by_product(self):
ret = self.public_client.get("/api/v1/releases?product=Fennec")
got = ret.get_json()
self.assertEqual(len(got["releases"]), 1)
self.assertEqual(got["releases"][0]["name"], "Fennec.55.0a1")
def test_get_releases_by_name_prefix(self):
ret = self.public_client.get("/api/v1/releases?name_prefix=F")
got = ret.get_json()
self.assertEqual(len(got["releases"]), 2)
releases = [(release["name"], release["product"]) for release in got["releases"]]
self.assertIn(("Firefox.55.0a1", "Firefox"), releases)
self.assertIn(("Fennec.55.0a1", "Fennec"), releases)
def test_get_release(self):
release = "Firefox.55.0a1"
ret = self.public_client.get("/api/v1/releases/{}".format(release))
self.assertTrue(ret.status_code, 200)
got = ret.get_json()
self.assertNotIn("X-CSRF-Token", ret.headers)
self.assertEqual(got["name"], release)
self.assertEqual(got["schema_version"], 1)
self.assertIn("p", got["platforms"])
platform = got["platforms"]["p"]
self.assertIn("l", platform["locales"])
def test_get_release_locale(self):
ret = self.public_client.get("/api/v1/releases/Firefox.55.0a1/builds/p/l")
self.assertEqual(ret.status_code, 200)
self.assertEqual(ret.headers["X-Data-Version"], "1")
got = ret.get_json()
self.assertEqual(got["buildID"], "5")
def test_get_release_locale_not_found(self):
ret = self.public_client.get("/api/v1/releases/Firefox.55.0a1/builds/404/l")
self.assertEqual(ret.status_code, 404)
ret = self.public_client.get("/api/v1/releases/Firefox.55.0a1/builds/p/404")
self.assertEqual(ret.status_code, 404)
| mpl-2.0 | -7,702,099,460,482,028,000 | 44.333333 | 89 | 0.621691 | false |
steffgrez/aio-jsonrpc-2.0 | aio_jsonrpc_20/response.py | 1 | 1779 |
class ResponseMaker(object):
__slot__ = ['error_verbose']
def __init__(self, error_verbose=True):
self.error_verbose = error_verbose
def get_response(self, result, request_id):
return {
"jsonrpc": "2.0",
"result": result,
"id": request_id
}
def get_error(self, code, message, data=None, request_id=None):
result = {
"jsonrpc": "2.0",
"error": {
"code": code,
"message": message,
},
"id": request_id
}
if self.error_verbose and data:
result["error"]['data'] = data
return result
def get_parse_error(self, data=None, request_id=None):
return self.get_error(
-32700, 'Parse error', data=data, request_id=request_id
)
def get_invalid_request(self, data=None, request_id=None):
return self.get_error(
-32600, 'Invalid Request', data=data, request_id=request_id
)
def get_method_not_found(self, data=None, request_id=None):
return self.get_error(
-32601, 'Method not found', data=data, request_id=request_id
)
def get_invalid_params(self, data=None, request_id=None):
return self.get_error(
-32602, 'Invalid params', data=data, request_id=request_id
)
def get_internal_error(self, data=None, request_id=None):
return self.get_error(
-32603, 'Internal error', data=data, request_id=request_id
)
def get_server_error(self, code, data, request_id=None):
return self.get_error(
code=code,
message='Server error',
data=data,
request_id=request_id
)
| mit | 4,117,111,114,315,011,600 | 27.693548 | 72 | 0.540753 | false |
markovmodel/molPX | molpx/_linkutils.py | 1 | 18471 | import numpy as _np
from matplotlib.widgets import AxesWidget as _AxesWidget
from matplotlib.colors import is_color_like as _is_color_like
from matplotlib.axes import Axes as _mplAxes
from matplotlib.figure import Figure as _mplFigure
from IPython.display import display as _ipydisplay
from pyemma.util.types import is_int as _is_int
from scipy.spatial import cKDTree as _cKDTree
from ._bmutils import get_ascending_coord_idx
from mdtraj import Trajectory as _mdTrajectory
from nglview import NGLWidget as _NGLwdg
from ipywidgets import HBox as _HBox, VBox as _VBox
def pts_per_axis_unit(mplax, pt_per_inch=72):
r"""
Return how many pt per axis unit of a given maptplotlib axis a figure has
Parameters
----------
mplax : :obj:`matplotlib.axes._subplots.AxesSubplot`
pt_per_inch : how many points are in an inch (this number should not change)
Returns
--------
pt_per_xunit, pt_per_yunit
"""
# matplotlib voodoo
# Get bounding box
bbox = mplax.get_window_extent().transformed(mplax.get_figure().dpi_scale_trans.inverted())
span_inch = _np.array([bbox.width, bbox.height], ndmin=2).T
span_units = [mplax.get_xlim(), mplax.get_ylim()]
span_units = _np.diff(span_units, axis=1)
inch_per_unit = span_inch / span_units
return inch_per_unit * pt_per_inch
def update2Dlines(iline, x, y):
"""
provide a common interface to update objects on the plot to a new position (x,y) depending
on whether they are hlines, vlines, dots etc
Parameters
----------
iline: :obj:`matplotlib.lines.Line2D` object
x : float with new position
y : float with new position
"""
# TODO FIND OUT A CLEANER WAY TO DO THIS (dict or class)
if not hasattr(iline,'whatisthis'):
raise AttributeError("This method will only work if iline has the attribute 'whatsthis'")
else:
# TODO find cleaner way of distinguishing these 2Dlines
if iline.whatisthis in ['dot']:
iline.set_xdata((x))
iline.set_ydata((y))
elif iline.whatisthis in ['lineh']:
iline.set_ydata((y,y))
elif iline.whatisthis in ['linev']:
iline.set_xdata((x,x))
else:
# TODO: FIND OUT WNY EXCEPTIONS ARE NOT BEING RAISED
raise TypeError("what is this type of 2Dline?")
class ClickOnAxisListener(object):
def __init__(self, ngl_wdg, crosshairs, showclick_objs, ax, pos,
list_mpl_objects_to_update):
self.ngl_wdg = ngl_wdg
self.crosshairs = crosshairs
self.showclick_objs = showclick_objs
self.ax = ax
self.pos = pos
self.list_mpl_objects_to_update = list_mpl_objects_to_update
self.list_of_dots = [None]*self.pos.shape[0]
self.fig_size = self.ax.figure.get_size_inches()
self.kdtree = None
def build_tree(self):
# Use ax.transData to compute distance in pixels
# regardelss of the axes units (http://matplotlib.org/users/transforms_tutorial.html)
# Corresponds to the visual distance between clicked point and target point
self.kdtree = _cKDTree(self.ax.transData.transform(self.pos))
@property
def figure_changed_size(self):
return not _np.allclose(self.fig_size, self.ax.figure.get_size_inches())
def __call__(self, event):
# Wait for the first click or a a figsize change
# to build the kdtree
if self.figure_changed_size or self.kdtree is None:
self.build_tree()
self.fig_size = self.ax.figure.get_size_inches()
# Was the click inside the bounding box?
if self.ax.get_window_extent().contains(event.x, event.y):
if self.crosshairs:
for iline in self.showclick_objs:
update2Dlines(iline, event.xdata, event.ydata)
_, index = self.kdtree.query(x=[event.x, event.y], k=1)
for idot in self.list_mpl_objects_to_update:
update2Dlines(idot, self.pos[index, 0], self.pos[index, 1])
self.ngl_wdg.isClick = True
if hasattr(self.ngl_wdg, '_GeomsInWid'):
# We're in a sticky situation
if event.button == 1:
# Pressed left
self.ngl_wdg._GeomsInWid[index].show()
if self.list_of_dots[index] is None:
# Plot and store the dot in case there wasn't
self.list_of_dots[index] = self.ax.plot(self.pos[index, 0], self.pos[index, 1], 'o',
c=self.ngl_wdg._GeomsInWid[index].color_dot, ms=7)[0]
elif event.button in [2, 3]:
# Pressed right or middle
self.ngl_wdg._GeomsInWid[index].hide()
# Delete dot if the geom is not visible anymore
if not self.ngl_wdg._GeomsInWid[index].is_visible() and self.list_of_dots[index] is not None:
self.list_of_dots[index].remove()
self.list_of_dots[index] = None
else:
# We're not sticky, just go to the frame
self.ngl_wdg.frame = index
class MolPXBox(object):
r"""
Class created to be the parent class of MolPXHBox and MolPXVBox, which inherit from
MolPXBox and the ipywidget classes HBox and VBox (*args and **kwargs are for these)
The sole purpose of this class is to avoid monkey-patching elsewhere in the code,
this class creates them as empty lists on instantiation.
It also implements two methods:
* self.display (=IPython.display(self)
* append_if_existing
"""
def __init__(self, *args, **kwargs):
self.linked_axes = []
self.linked_mdgeoms = []
self.linked_ngl_wdgs = []
self.linked_data_arrays = []
self.linked_ax_wdgs = []
self.linked_figs = []
def display(self):
_ipydisplay(self)
def append_if_existing(self, args0, startswith_arg="linked_"):
r"""
args0 is the tuple containing all widgets to be included in the MolPXBox
this tuple can contain itself other MolPXWidget
so we iterate through them and appending linked stuff
"""
for iarg in args0:
for attrname in dir(iarg):
if attrname.startswith(startswith_arg) and len(iarg.__dict__[attrname]) != 0:
self.__dict__[attrname] += iarg.__dict__[attrname]
def auto_append_these_mpx_attrs(iobj, *attrs):
r""" The attribute s name is automatically derived
from the attribute s type via a type:name dictionary
*attrs : any number of unnamed objects of the types in type2attrname.
If the object type is a list, it will be flattened prior to attempting
"""
attrs_flat_list = []
for sublist in attrs:
if isinstance(sublist, list):
for item in sublist:
attrs_flat_list.append(item)
else:
attrs_flat_list.append(sublist)
# Go through the arguments and assign them an attrname according to their types
for iattr in attrs_flat_list:
for attrname, itype in type2attrname.items():
if isinstance(iattr, itype):
iobj.__dict__[attrname].append(iattr)
break
class MolPXHBox(_HBox, MolPXBox):
def __init__(self, *args, **kwargs):
super(MolPXHBox, self).__init__(*args, **kwargs)
self.append_if_existing(args[0])
class MolPXVBox(_VBox, MolPXBox):
def __init__(self, *args, **kwargs):
super(MolPXVBox, self).__init__(*args, **kwargs)
self.append_if_existing(args[0])
type2attrname = {"linked_axes": _mplAxes,
"linked_mdgeoms": _mdTrajectory,
"linked_ngl_wdgs": _NGLwdg,
"linked_data_arrays": _np.ndarray,
"linked_ax_wdgs": _AxesWidget,
"linked_figs": _mplFigure,
}
class ChangeInNGLWidgetListener(object):
def __init__(self, ngl_wdg, list_mpl_objects_to_update, pos):
self.ngl_wdg = ngl_wdg
self.list_mpl_objects_to_update = list_mpl_objects_to_update
self.pos = pos
def __call__(self, change):
self.ngl_wdg.isClick = False
_idx = change["new"]
try:
for idot in self.list_mpl_objects_to_update:
update2Dlines(idot, self.pos[_idx, 0], self.pos[_idx, 1])
#print("caught index error with index %s (new=%s, old=%s)" % (_idx, change["new"], change["old"]))
except IndexError as e:
for idot in self.list_mpl_objects_to_update:
update2Dlines(idot, self.pos[0, 0], self.pos[0, 1])
print("caught index error with index %s (new=%s, old=%s)" % (_idx, change["new"], change["old"]))
#print("set xy = (%s, %s)" % (x[_idx], y[_idx]))
class GeometryInNGLWidget(object):
r"""
returns an object that is aware of where its geometries are located in the NGLWidget their representation status
The object exposes two methods, show and hide, to automagically know what to do
"""
def __init__(self, geom, ngl_wdg, list_of_repr_dicts=None,
color_molecule_hex='Element', n_small=10):
self.lives_at_components = []
self.geom = geom
self.ngl_wdg = ngl_wdg
self.have_repr = []
sticky_rep = 'cartoon'
if self.geom[0].top.n_residues < n_small:
sticky_rep = 'ball+stick'
if list_of_repr_dicts is None:
list_of_repr_dicts = [{'repr_type': sticky_rep, 'selection': 'all'}]
self.list_of_repr_dicts = list_of_repr_dicts
self.color_molecule_hex = color_molecule_hex
self.color_dot = color_molecule_hex
if isinstance(self.color_molecule_hex, str) and color_molecule_hex == 'Element':
self.color_dot = 'red'
def show(self):
# Show can mean either
# - add a whole new component (case 1)
# - add the representation again to a representation-less component (case 2)
# CASE 1
if self.is_empty() or self.all_reps_are_on():
if len(self.have_repr) == self.geom.n_frames:
print("arrived at the end")
component = None
else:
idx = len(self.have_repr)
self.ngl_wdg.add_trajectory(self.geom[idx])
self.lives_at_components.append(len(self.ngl_wdg._ngl_component_ids) - 1)
self.ngl_wdg.clear_representations(component=self.lives_at_components[-1])
self.have_repr.append(True)
component = self.lives_at_components[-1]
# CASE 2
elif self.any_rep_is_off(): # Some are living in the widget already but have no rep
idx = _np.argwhere(~_np.array(self.have_repr))[0].squeeze()
component = self.lives_at_components[idx]
self.have_repr[idx] = True
else:
raise Exception("This situation should not arise. This is a bug")
if component is not None:
for irepr in self.list_of_repr_dicts:
self.ngl_wdg.add_representation(irepr['repr_type'],
selection=irepr['selection'],
component=component,
color=self.color_molecule_hex)
def hide(self):
if self.is_empty() or self.all_reps_are_off():
print("nothing to hide")
pass
elif self.any_rep_is_on(): # There's represented components already in the widget
idx = _np.argwhere(self.have_repr)[-1].squeeze()
self.ngl_wdg.clear_representations(component=self.lives_at_components[idx])
self.have_repr[idx] = False
else:
raise Exception("This situation should not arise. This is a bug")
# Quickhand methods for knowing what's up
def is_empty(self):
if len(self.have_repr) == 0:
return True
else:
return False
def all_reps_are_off(self):
if len(self.have_repr) == 0:
return True
else:
return _np.all(~_np.array(self.have_repr))
def all_reps_are_on(self):
if len(self.have_repr) == 0:
return False
else:
return _np.all(self.have_repr)
def any_rep_is_off(self):
return _np.any(~_np.array(self.have_repr))
def any_rep_is_on(self):
return _np.any(self.have_repr)
def is_visible(self):
if self.is_empty() or self.all_reps_are_off():
return False
else:
return True
def link_ax_w_pos_2_nglwidget(ax, pos, ngl_wdg,
crosshairs=True,
dot_color='red',
band_width=None,
radius=False,
directionality=None,
exclude_coord=None,
):
r"""
Initial idea for this function comes from @arose, the rest is @gph82
Parameters
----------
ax : matplotlib axis object to be linked
pos : ndarray of shape (N,2) with the positions of the geoms in the ngl_wdg
crosshairs : Boolean or str
If True, a crosshair will show where the mouse-click ocurred. If 'h' or 'v', only the horizontal or
vertical line of the crosshair will be shown, respectively. If False, no crosshair will appear
dot_color : Anything that yields matplotlib.colors.is_color_like(dot_color)==True
Default is 'red'. dot_color='None' yields no dot
band_width : None or iterable of len = 2
If band_width is not None, the method tries to figure out on its own if
there is an ascending coordinate and will include a moving band on :obj:ax
of this width (in units of the axis along which the band is plotted)
If the method cannot find an ascending coordinate, an exception is thrown
directionality : str or None, default is None
If not None, directionality can be either 'a2w' or 'w2a', meaning that connectivity
between axis and widget will be only established as
* 'a2w' : action in axis triggers action in widget, but not the other way around
* 'w2a' : action in widget triggers action in axis, but not the other way around
exclude_coord : None or int , default is None
The excluded coordinate will not be considered when computing the nearest-point-to-click.
Typical use case is for visualize.traj to only compute distances horizontally along the time axis
Returns
-------
axes_widget : :obj:`matplotlib.Axes.Axeswidget` that has been linked to the NGLWidget
"""
assert directionality in [None, 'a2w', 'w2a'], "The directionality parameter has to be in [None, 'a2w', 'w2a'] " \
"not %s"%directionality
assert crosshairs in [True, False, 'h', 'v'], "The crosshairs parameter has to be in [True, False, 'h','v'], " \
"not %s" % crosshairs
ipos = _np.copy(pos)
if _is_int(exclude_coord):
ipos[:,exclude_coord] = 0
# Are we in a sticky situation?
if hasattr(ngl_wdg, '_GeomsInWid'):
sticky = True
else:
assert ngl_wdg.trajectory_0.n_frames == pos.shape[0], \
("Mismatching frame numbers %u vs %u" % (ngl_wdg.trajectory_0.n_frames, pos.shape[0]))
sticky = False
# Basic interactive objects
showclick_objs = []
if crosshairs in [True, 'h']:
lineh = ax.axhline(ax.get_ybound()[0], c="black", ls='--')
setattr(lineh, 'whatisthis', 'lineh')
showclick_objs.append(lineh)
if crosshairs in [True, 'v']:
linev = ax.axvline(ax.get_xbound()[0], c="black", ls='--')
setattr(linev, 'whatisthis', 'linev')
showclick_objs.append(linev)
if _is_color_like(dot_color):
pass
else:
raise TypeError('dot_color should be a matplotlib color')
dot = ax.plot(pos[0,0],pos[0,1], 'o', c=dot_color, ms=7, zorder=100)[0]
setattr(dot,'whatisthis','dot')
list_mpl_objects_to_update = [dot]
# Other objects, related to smoothing options
if band_width is not None:
if radius:
band_width_in_pts = int(_np.round(pts_per_axis_unit(ax).mean() * _np.mean(band_width)))
rad = ax.plot(pos[0, 0], pos[0, 1], 'o',
ms=_np.round(band_width_in_pts),
c='green', alpha=.25, markeredgecolor='None')[0]
setattr(rad, 'whatisthis', 'dot')
if not sticky:
list_mpl_objects_to_update.append(rad)
else:
# print("Band_width(x,y) is %s" % (band_width))
coord_idx = get_ascending_coord_idx(pos)
if _np.ndim(coord_idx)>0 and len(coord_idx)==0:
raise ValueError("Must have an ascending coordinate for band_width usage")
band_width_in_pts = int(_np.round(pts_per_axis_unit(ax)[coord_idx] * band_width[coord_idx]))
# print("Band_width in %s is %s pts"%('xy'[coord_idx], band_width_in_pts))
band_call = [ax.axvline, ax.axhline][coord_idx]
band_init = [ax.get_xbound, ax.get_ybound][coord_idx]
band_type = ['linev', 'lineh'][coord_idx]
band = band_call(band_init()[0],
lw=band_width_in_pts,
c="green", ls='-',
alpha=.25)
setattr(band, 'whatisthis', band_type)
list_mpl_objects_to_update.append(band)
ngl_wdg.isClick = False
CLA_listener = ClickOnAxisListener(ngl_wdg, crosshairs, showclick_objs, ax, pos,
list_mpl_objects_to_update)
NGL_listener = ChangeInNGLWidgetListener(ngl_wdg, list_mpl_objects_to_update, pos)
# Connect axes to widget
axes_widget = _AxesWidget(ax)
if directionality in [None, 'a2w']:
axes_widget.connect_event('button_release_event', CLA_listener)
# Connect widget to axes
if directionality in [None, 'w2a']:
ngl_wdg.observe(NGL_listener, "frame", "change")
ngl_wdg.center()
return axes_widget
| lgpl-3.0 | -6,859,057,174,613,301,000 | 38.133475 | 118 | 0.584592 | false |
aykut/django-oscar | oscar/apps/shipping/tests.py | 1 | 4176 | from decimal import Decimal as D
from django.utils import unittest
from django.test.client import Client
from oscar.apps.shipping.methods import FreeShipping, FixedPriceShipping
from oscar.apps.shipping.models import OrderAndItemLevelChargeMethod
from oscar.apps.basket.models import Basket
from oscar.test.helpers import create_product
from oscar.test.decorators import dataProvider
class FreeShippingTest(unittest.TestCase):
def test_shipping_is_free(self):
method = FreeShipping()
basket = Basket()
method.set_basket(basket)
self.assertEquals(D('0.00'), method.basket_charge_incl_tax())
self.assertEquals(D('0.00'), method.basket_charge_excl_tax())
class FixedPriceShippingTest(unittest.TestCase):
def test_fixed_price_shipping_charges_for_empty_basket(self):
method = FixedPriceShipping(D('10.00'), D('10.00'))
basket = Basket()
method.set_basket(basket)
self.assertEquals(D('10.00'), method.basket_charge_incl_tax())
self.assertEquals(D('10.00'), method.basket_charge_excl_tax())
def test_fixed_price_shipping_assumes_no_tax(self):
method = FixedPriceShipping(D('10.00'))
basket = Basket()
method.set_basket(basket)
self.assertEquals(D('10.00'), method.basket_charge_excl_tax())
shipping_values = lambda: [('1.00',),
('5.00',),
('10.00',),
('12.00',)]
@dataProvider(shipping_values)
def test_different_values(self, value):
method = FixedPriceShipping(D(value))
basket = Basket()
method.set_basket(basket)
self.assertEquals(D(value), method.basket_charge_excl_tax())
class OrderAndItemLevelChargeMethodTest(unittest.TestCase):
def setUp(self):
self.method = OrderAndItemLevelChargeMethod(price_per_order=D('5.00'), price_per_item=D('1.00'))
self.basket = Basket.objects.create()
self.method.set_basket(self.basket)
def test_order_level_charge_for_empty_basket(self):
self.assertEquals(D('5.00'), self.method.basket_charge_incl_tax())
def test_single_item_basket(self):
p = create_product()
self.basket.add_product(p)
self.assertEquals(D('5.00') + D('1.00'), self.method.basket_charge_incl_tax())
def test_multi_item_basket(self):
p = create_product()
self.basket.add_product(p, 7)
self.assertEquals(D('5.00') + 7*D('1.00'), self.method.basket_charge_incl_tax())
class ZeroFreeShippingThresholdTest(unittest.TestCase):
def setUp(self):
self.method = OrderAndItemLevelChargeMethod(price_per_order=D('10.00'), free_shipping_threshold=D('0.00'))
self.basket = Basket.objects.create()
self.method.set_basket(self.basket)
def test_free_shipping_with_empty_basket(self):
self.assertEquals(D('0.00'), self.method.basket_charge_incl_tax())
def test_free_shipping_with_nonempty_basket(self):
p = create_product(D('5.00'))
self.basket.add_product(p)
self.assertEquals(D('0.00'), self.method.basket_charge_incl_tax())
class NonZeroFreeShippingThresholdTest(unittest.TestCase):
def setUp(self):
self.method = OrderAndItemLevelChargeMethod(price_per_order=D('10.00'), free_shipping_threshold=D('20.00'))
self.basket = Basket.objects.create()
self.method.set_basket(self.basket)
def test_basket_below_threshold(self):
p = create_product(D('5.00'))
self.basket.add_product(p)
self.assertEquals(D('10.00'), self.method.basket_charge_incl_tax())
def test_basket_on_threshold(self):
p = create_product(D('5.00'))
self.basket.add_product(p, 4)
self.assertEquals(D('0.00'), self.method.basket_charge_incl_tax())
def test_basket_above_threshold(self):
p = create_product(D('5.00'))
self.basket.add_product(p, 8)
self.assertEquals(D('0.00'), self.method.basket_charge_incl_tax())
| bsd-3-clause | -1,676,387,105,145,000,200 | 38.028037 | 115 | 0.630747 | false |
brianhouse/wavefarm | granu/braid/pattern.py | 1 | 2625 | """ Pattern is just a list (of whatever) that can be specified in compacted form
... with the addition of the Markov expansion of tuples on calling resolve
"""
import random
class Pattern(list):
def __init__(self, value=[0]):
list.__init__(self, value)
self.resolve()
def resolve(self):
""" Choose a path through the Markov chain """
return self._unroll(self._subresolve(self))
def _subresolve(self, pattern):
""" Resolve a subbranch of the pattern """
steps = []
for step in pattern:
if type(step) == tuple:
step = self._pick(step)
if type(step) == tuple or type(step) == list: ## so Im limiting it to one layer of nesting?
step = self._subresolve(step)
elif type(step) == list:
step = self._subresolve(step)
steps.append(step)
return steps
def _pick(self, step):
""" Choose between options for a given step """
assert len(step) == 2 or len(step) == 3
if len(step) == 2:
if type(step[1]) == float: # (1, 0.5) is a 50% chance of playing a 1, otherwise 0
step = step[0], [0, 0], step[1] ## it's a 0, 0 because 0 patterns dont progress, and this could be the root level: is this a bug?
else:
step = step[0], step[1], 0.5 # (1, 2) is a 50% chance of playing a 1 vs a 2
step = step[0] if step[2] > random.random() else step[1] # (1, 2, 0.5) is full form ## expand this to accommodate any number of options
return step
def _unroll(self, pattern, divs=None, r=None):
""" Unroll a compacted form to a pattern with lcm steps """
if divs is None:
divs = self._get_divs(pattern)
r = []
elif r is None:
r = []
for step in pattern:
if type(step) == list:
self._unroll(step, (divs // len(pattern)), r)
else:
r.append(step)
for i in range((divs // len(pattern)) - 1):
r.append(0)
return r
def _get_divs(self, pattern):
""" Find lcm for a subpattern """
subs = [(self._get_divs(step) if type(step) == list else 1) * len(pattern) for step in pattern]
divs = subs[0]
for step in subs[1:]:
divs = lcm(divs, step)
return divs
def lcm(a, b):
gcd, tmp = a, b
while tmp != 0:
gcd, tmp = tmp, gcd % tmp
return a * b // gcd
| gpl-3.0 | -2,916,064,081,948,905,500 | 34 | 155 | 0.509333 | false |
cggh/scikit-allel | allel/test/test_api.py | 1 | 8225 | # -*- coding: utf-8 -*-
def test_public_api():
# The idea of this test is to ensure that all functions we expect to be in the
# public API under the correct namespace are indeed there.
import allel
# allel.model.ndarray
assert callable(allel.GenotypeVector)
assert callable(allel.GenotypeArray)
assert callable(allel.HaplotypeArray)
assert callable(allel.AlleleCountsArray)
assert callable(allel.GenotypeAlleleCountsVector)
assert callable(allel.GenotypeAlleleCountsArray)
assert callable(allel.SortedIndex)
assert callable(allel.UniqueIndex)
assert callable(allel.SortedMultiIndex)
assert callable(allel.VariantTable)
assert callable(allel.FeatureTable)
# allel.model.dask
assert callable(allel.GenotypeDaskVector)
assert callable(allel.GenotypeDaskArray)
assert callable(allel.HaplotypeDaskArray)
assert callable(allel.AlleleCountsDaskArray)
assert callable(allel.GenotypeAlleleCountsDaskVector)
assert callable(allel.GenotypeAlleleCountsDaskArray)
# allel.model.chunked
assert callable(allel.GenotypeChunkedArray)
assert callable(allel.HaplotypeChunkedArray)
assert callable(allel.AlleleCountsChunkedArray)
assert callable(allel.GenotypeAlleleCountsChunkedArray)
assert callable(allel.VariantChunkedTable)
assert callable(allel.AlleleCountsChunkedTable)
# allel.model.util
assert callable(allel.create_allele_mapping)
assert callable(allel.locate_fixed_differences)
assert callable(allel.locate_private_alleles)
assert callable(allel.sample_to_haplotype_selection)
# allel.io.fasta
assert callable(allel.write_fasta)
# allel.io.gff
assert callable(allel.iter_gff3)
assert callable(allel.gff3_to_recarray)
assert callable(allel.gff3_to_dataframe)
assert callable(allel.gff3_parse_attributes)
# allel.io.vcf_read
assert callable(allel.read_vcf)
assert callable(allel.vcf_to_npz)
assert callable(allel.vcf_to_hdf5)
assert callable(allel.vcf_to_zarr)
assert callable(allel.iter_vcf_chunks)
assert callable(allel.read_vcf_headers)
assert callable(allel.vcf_to_dataframe)
assert callable(allel.vcf_to_csv)
assert callable(allel.vcf_to_recarray)
# allel.io.vcf_write
assert callable(allel.write_vcf)
assert callable(allel.write_vcf_header)
assert callable(allel.write_vcf_data)
# allel.stats.admixture
assert callable(allel.patterson_f2)
assert callable(allel.patterson_f3)
assert callable(allel.patterson_d)
assert callable(allel.moving_patterson_f3)
assert callable(allel.moving_patterson_d)
assert callable(allel.average_patterson_f3)
assert callable(allel.average_patterson_d)
# backwards compatibility
assert callable(allel.blockwise_patterson_f3)
assert callable(allel.blockwise_patterson_d)
# allel.stats.decomposition
assert callable(allel.pca)
assert callable(allel.randomized_pca)
# allel.stats.distance
assert callable(allel.pairwise_distance)
assert callable(allel.pairwise_dxy)
assert callable(allel.pcoa)
assert callable(allel.condensed_coords)
assert callable(allel.condensed_coords_within)
assert callable(allel.condensed_coords_between)
assert callable(allel.plot_pairwise_distance)
# allel.stats.diversity
assert callable(allel.mean_pairwise_difference)
assert callable(allel.mean_pairwise_difference_between)
assert callable(allel.sequence_diversity)
assert callable(allel.sequence_divergence)
assert callable(allel.windowed_diversity)
assert callable(allel.windowed_divergence)
assert callable(allel.windowed_df)
assert callable(allel.watterson_theta)
assert callable(allel.windowed_watterson_theta)
assert callable(allel.tajima_d)
assert callable(allel.windowed_tajima_d)
assert callable(allel.moving_tajima_d)
# allel.stats.fst
assert callable(allel.weir_cockerham_fst)
assert callable(allel.hudson_fst)
assert callable(allel.patterson_fst)
assert callable(allel.windowed_weir_cockerham_fst)
assert callable(allel.windowed_hudson_fst)
assert callable(allel.windowed_patterson_fst)
assert callable(allel.moving_weir_cockerham_fst)
assert callable(allel.moving_hudson_fst)
assert callable(allel.moving_patterson_fst)
assert callable(allel.average_weir_cockerham_fst)
assert callable(allel.average_hudson_fst)
assert callable(allel.average_patterson_fst)
# backwards compatibility
assert callable(allel.blockwise_weir_cockerham_fst)
assert callable(allel.blockwise_hudson_fst)
assert callable(allel.blockwise_patterson_fst)
# allel.stats.hw
assert callable(allel.heterozygosity_observed)
assert callable(allel.heterozygosity_expected)
assert callable(allel.inbreeding_coefficient)
# allel.stats.ld
assert callable(allel.rogers_huff_r)
assert callable(allel.rogers_huff_r_between)
assert callable(allel.locate_unlinked)
assert callable(allel.windowed_r_squared)
assert callable(allel.plot_pairwise_ld)
# allel.stats.mendel
assert callable(allel.mendel_errors)
assert callable(allel.paint_transmission)
assert callable(allel.phase_progeny_by_transmission)
assert callable(allel.phase_parents_by_transmission)
assert callable(allel.phase_by_transmission)
# allel.stats.misc
assert callable(allel.plot_variant_locator)
assert callable(allel.tabulate_state_transitions)
assert callable(allel.tabulate_state_blocks)
# allel.stats.preprocessing
assert callable(allel.get_scaler)
assert callable(allel.StandardScaler)
assert callable(allel.CenterScaler)
assert callable(allel.PattersonScaler)
# allel.stats.roh
assert callable(allel.roh_mhmm)
assert callable(allel.roh_poissonhmm)
# allel.stats.roh
assert callable(allel.ehh_decay)
assert callable(allel.voight_painting)
assert callable(allel.plot_voight_painting)
assert callable(allel.fig_voight_painting)
assert callable(allel.ihs)
assert callable(allel.xpehh)
assert callable(allel.nsl)
assert callable(allel.xpnsl)
assert callable(allel.haplotype_diversity)
assert callable(allel.moving_haplotype_diversity)
assert callable(allel.garud_h)
assert callable(allel.moving_garud_h)
assert callable(allel.plot_haplotype_frequencies)
assert callable(allel.moving_delta_tajima_d)
assert callable(allel.standardize)
assert callable(allel.standardize_by_allele_count)
assert callable(allel.pbs)
# allel.stats.sf
assert callable(allel.sfs)
assert callable(allel.sfs_folded)
assert callable(allel.sfs_scaled)
assert callable(allel.sfs_folded_scaled)
assert callable(allel.scale_sfs)
assert callable(allel.scale_sfs_folded)
assert callable(allel.joint_sfs)
assert callable(allel.joint_sfs_folded)
assert callable(allel.joint_sfs_scaled)
assert callable(allel.joint_sfs_folded_scaled)
assert callable(allel.scale_joint_sfs)
assert callable(allel.scale_joint_sfs_folded)
assert callable(allel.fold_sfs)
assert callable(allel.plot_sfs)
assert callable(allel.plot_sfs_folded)
assert callable(allel.plot_sfs_scaled)
assert callable(allel.plot_sfs_folded_scaled)
assert callable(allel.plot_joint_sfs)
assert callable(allel.plot_joint_sfs_folded)
assert callable(allel.plot_joint_sfs_scaled)
assert callable(allel.plot_joint_sfs_folded_scaled)
# allel.stats.window
assert callable(allel.moving_statistic)
assert callable(allel.moving_mean)
assert callable(allel.moving_std)
assert callable(allel.moving_midpoint)
assert callable(allel.index_windows)
assert callable(allel.position_windows)
assert callable(allel.window_locations)
assert callable(allel.windowed_count)
assert callable(allel.windowed_statistic)
assert callable(allel.per_base)
assert callable(allel.equally_accessible_windows)
# allel.util
assert callable(allel.hdf5_cache)
# N.B., check this is not clobbered, see
# https://github.com/cggh/scikit-allel/issues/163
import allel.util
assert callable(allel.util.hdf5_cache)
| mit | -1,089,891,051,480,196,000 | 36.557078 | 82 | 0.752705 | false |
jimstorch/tokp | tokp_lib/xml_store.py | 1 | 4808 | #------------------------------------------------------------------------------
# File: xml_store.py
# Purpose: Store and Retrieve data from XML files
# Author: Jim Storch
# License: GPLv3 see LICENSE.TXT
#------------------------------------------------------------------------------
import datetime
import re
import glob
from xml.etree import cElementTree as et
from tokp_lib.parse_combat import Raid
#--[ Datetime to String ]------------------------------------------------------
# Seems like a lot of juggling but strftime() and strptime() do not support
# microseconds.
def dt_to_str(dt):
"""Given a datetime object,
returns a string in the format 'YYYY-MM-DD HH:MM:SS:MMMMMM'."""
return '%d-%.2d-%.2d %.2d:%.2d:%.2d.%.6d' % (
dt.year, dt.month, dt.day,
dt.hour, dt.minute, dt.second, dt.microsecond )
#--[ String to Datetime ]------------------------------------------------------
## Regex for str_to_dt()
rawstr = r"^(?P<year>\d{2,})-(?P<month>\d\d)-(?P<day>\d\d)\s(?P<hour>\d\d)" + \
":(?P<minute>\d\d):(?P<second>\d\d)\.(?P<micro>\d*)$"
compile_obj = re.compile(rawstr)
def str_to_dt(string_in):
"""Given a string in the format 'YYYY-MM-DD HH:MM:SS:MMMMMM,'
returns a datetime object."""
match_obj = compile_obj.search(string_in)
if match_obj:
year = int(match_obj.group('year'))
month = int(match_obj.group('month'))
day = int(match_obj.group('day'))
hour = int(match_obj.group('hour'))
minute = int(match_obj.group('minute'))
second = int(match_obj.group('second'))
micro = int(match_obj.group('micro'))
else:
raise ValueError('Could not parse datetime string')
return datetime.datetime(year, month, day, hour, minute, second, micro)
#--[ Indent ]------------------------------------------------------------------
# From http://effbot.org/zone/element-lib.htm (plus Paul Du Bois's comment)
def indent(elem, level=0):
"""Make an ElementTree all nice and pretty with indents and line breaks."""
i = "\n" + level * " "
if len(elem):
if not elem.text or not elem.text.strip():
elem.text = i + " "
for child in elem:
indent(child, level+1)
if not child.tail or not child.tail.strip():
child.tail = i
if not elem.tail or not elem.tail.strip():
elem.tail = i
else:
if level and (not elem.tail or not elem.tail.strip()):
elem.tail = i
#--[ Write Raid XML ]----------------------------------------------------------
def write_raid_xml(raid):
""" Given a Raid object, serializes it to an XML file.
Returns the filename used."""
fname = raid.start_time.strftime("%Y%m%d.%H%M.") + raid.zone + '.xml'
dstr = raid.start_time.strftime("%m/%d/%Y")
xml = et.Element('raid',date = dstr)
## Zone
zone = et.SubElement(xml,'zone')
zone.text = raid.zone
## Start Time
start_time = et.SubElement(xml,'start_time')
start_time.text = dt_to_str(raid.start_time)
## End Time
end_time = et.SubElement(xml,'end_time')
end_time.text = dt_to_str(raid.end_time)
## Members
members = et.SubElement(xml,'members')
raid.raid_members.sort()
for member in raid.raid_members:
name = et.SubElement(members,'name')
name.text = member
## Make pretty and write to a file
indent(xml)
f = open('data/raids/' + fname,'wU')
f.write('<?xml version="1.0"?>\n')
tree = et.ElementTree(xml)
tree.write(f, 'utf-8')
#print et.tostring(xml)
return fname
#--[ Read Raid XML ]-----------------------------------------------------------
def read_raid_xml(fname):
"""Given an XML file name, un-serializes it to a Raid object.
Returns the Raid object."""
tree = et.parse(open('data/raids/' + fname,'rU'))
zone = tree.findtext('zone')
start_time_str = tree.findtext('start_time')
start_time = str_to_dt(start_time_str)
end_time_str = tree.findtext('end_time')
end_time = str_to_dt(end_time_str)
raid = Raid(zone,start_time)
raid.end_time = end_time
for elem in tree.getiterator('name'):
raid.add_member(elem.text)
return raid
#--[ Raid Files ]--------------------------------------------------------------
## Regex for raid_files()
fname_str = r'.*[/\\](?P<fname>.+)\.xml'
fname_obj = re.compile(fname_str)
def raid_files():
"""Returns a chronologically sorted list of raid XML file names."""
file_list = []
xfiles = glob.glob('data/raids/*.xml')
for xfile in xfiles:
match_obj = fname_obj.search(xfile)
file_list.append(match_obj.group('fname'))
file_list.sort()
return file_list
| gpl-3.0 | -795,323,932,101,939,700 | 31.053333 | 81 | 0.532238 | false |
qianqians/Screw | 3rdparty/protobuf/python/google/protobuf/internal/message_factory_test.py | 1 | 8361 | #! /usr/bin/env python
#
# Protocol Buffers - Google's data interchange format
# Copyright 2008 Google Inc. All rights reserved.
# https://developers.google.com/protocol-buffers/
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Tests for google.protobuf.message_factory."""
__author__ = '[email protected] (Matt Toia)'
try:
import unittest2 as unittest #PY26
except ImportError:
import unittest
from google.protobuf import descriptor_pb2
from google.protobuf.internal import factory_test1_pb2
from google.protobuf.internal import factory_test2_pb2
from google.protobuf import descriptor_database
from google.protobuf import descriptor_pool
from google.protobuf import message_factory
class MessageFactoryTest(unittest.TestCase):
def setUp(self):
self.factory_test1_fd = descriptor_pb2.FileDescriptorProto.FromString(
factory_test1_pb2.DESCRIPTOR.serialized_pb)
self.factory_test2_fd = descriptor_pb2.FileDescriptorProto.FromString(
factory_test2_pb2.DESCRIPTOR.serialized_pb)
def _ExerciseDynamicClass(self, cls):
msg = cls()
msg.mandatory = 42
msg.nested_factory_2_enum = 0
msg.nested_factory_2_message.value = 'nested message value'
msg.factory_1_message.factory_1_enum = 1
msg.factory_1_message.nested_factory_1_enum = 0
msg.factory_1_message.nested_factory_1_message.value = (
'nested message value')
msg.factory_1_message.scalar_value = 22
msg.factory_1_message.list_value.extend([u'one', u'two', u'three'])
msg.factory_1_message.list_value.append(u'four')
msg.factory_1_enum = 1
msg.nested_factory_1_enum = 0
msg.nested_factory_1_message.value = 'nested message value'
msg.circular_message.mandatory = 1
msg.circular_message.circular_message.mandatory = 2
msg.circular_message.scalar_value = 'one deep'
msg.scalar_value = 'zero deep'
msg.list_value.extend([u'four', u'three', u'two'])
msg.list_value.append(u'one')
msg.grouped.add()
msg.grouped[0].part_1 = 'hello'
msg.grouped[0].part_2 = 'world'
msg.grouped.add(part_1='testing', part_2='123')
msg.loop.loop.mandatory = 2
msg.loop.loop.loop.loop.mandatory = 4
serialized = msg.SerializeToString()
converted = factory_test2_pb2.Factory2Message.FromString(serialized)
reserialized = converted.SerializeToString()
self.assertEqual(serialized, reserialized)
result = cls.FromString(reserialized)
self.assertEqual(msg, result)
def testGetPrototype(self):
db = descriptor_database.DescriptorDatabase()
pool = descriptor_pool.DescriptorPool(db)
db.Add(self.factory_test1_fd)
db.Add(self.factory_test2_fd)
factory = message_factory.MessageFactory()
cls = factory.GetPrototype(pool.FindMessageTypeByName(
'google.protobuf.python.internal.Factory2Message'))
self.assertFalse(cls is factory_test2_pb2.Factory2Message)
self._ExerciseDynamicClass(cls)
cls2 = factory.GetPrototype(pool.FindMessageTypeByName(
'google.protobuf.python.internal.Factory2Message'))
self.assertTrue(cls is cls2)
def testGetMessages(self):
# performed twice because multiple calls with the same input must be allowed
for _ in range(2):
messages = message_factory.GetMessages([self.factory_test1_fd,
self.factory_test2_fd])
self.assertTrue(
set(['google.protobuf.python.internal.Factory2Message',
'google.protobuf.python.internal.Factory1Message'],
).issubset(set(messages.keys())))
self._ExerciseDynamicClass(
messages['google.protobuf.python.internal.Factory2Message'])
factory_msg1 = messages['google.protobuf.python.internal.Factory1Message']
self.assertTrue(set(
['google.protobuf.python.internal.Factory2Message.one_more_field',
'google.protobuf.python.internal.another_field'],).issubset(set(
ext.full_name
for ext in factory_msg1.DESCRIPTOR.file.pool.FindAllExtensions(
factory_msg1.DESCRIPTOR))))
msg1 = messages['google.protobuf.python.internal.Factory1Message']()
ext1 = msg1.Extensions._FindExtensionByName(
'google.protobuf.python.internal.Factory2Message.one_more_field')
ext2 = msg1.Extensions._FindExtensionByName(
'google.protobuf.python.internal.another_field')
msg1.Extensions[ext1] = 'test1'
msg1.Extensions[ext2] = 'test2'
self.assertEqual('test1', msg1.Extensions[ext1])
self.assertEqual('test2', msg1.Extensions[ext2])
def testDuplicateExtensionNumber(self):
pool = descriptor_pool.DescriptorPool()
factory = message_factory.MessageFactory(pool=pool)
# Add Container message.
f = descriptor_pb2.FileDescriptorProto()
f.name = 'google/protobuf/internal/container.proto'
f.package = 'google.protobuf.python.internal'
msg = f.message_type.add()
msg.name = 'Container'
rng = msg.extension_range.add()
rng.start = 1
rng.end = 10
pool.Add(f)
msgs = factory.GetMessages([f.name])
self.assertIn('google.protobuf.python.internal.Container', msgs)
# Extend container.
f = descriptor_pb2.FileDescriptorProto()
f.name = 'google/protobuf/internal/extension.proto'
f.package = 'google.protobuf.python.internal'
f.dependency.append('google/protobuf/internal/container.proto')
msg = f.message_type.add()
msg.name = 'Extension'
ext = msg.extension.add()
ext.name = 'extension_field'
ext.number = 2
ext.label = descriptor_pb2.FieldDescriptorProto.LABEL_OPTIONAL
ext.type_name = 'Extension'
ext.extendee = 'Container'
pool.Add(f)
msgs = factory.GetMessages([f.name])
self.assertIn('google.protobuf.python.internal.Extension', msgs)
# Add Duplicate extending the same field number.
f = descriptor_pb2.FileDescriptorProto()
f.name = 'google/protobuf/internal/duplicate.proto'
f.package = 'google.protobuf.python.internal'
f.dependency.append('google/protobuf/internal/container.proto')
msg = f.message_type.add()
msg.name = 'Duplicate'
ext = msg.extension.add()
ext.name = 'extension_field'
ext.number = 2
ext.label = descriptor_pb2.FieldDescriptorProto.LABEL_OPTIONAL
ext.type_name = 'Duplicate'
ext.extendee = 'Container'
pool.Add(f)
with self.assertRaises(Exception) as cm:
factory.GetMessages([f.name])
self.assertIn(str(cm.exception),
['Extensions '
'"google.protobuf.python.internal.Duplicate.extension_field" and'
' "google.protobuf.python.internal.Extension.extension_field"'
' both try to extend message type'
' "google.protobuf.python.internal.Container"'
' with field number 2.',
'Double registration of Extensions'])
if __name__ == '__main__':
unittest.main()
| lgpl-2.1 | -2,762,322,544,589,745,000 | 41.441624 | 84 | 0.706853 | false |
tamnm/kodi.mp3.zing.vn | dev/service.vnmusic/service.py | 1 | 5800 | import time
import xbmc
import urlparse
import SimpleHTTPServer
import SocketServer
import urllib2
import urllib
import re
import json
import base64
import requests
import httplib
songInfoApi = 'http://api.mp3.zing.vn/api/mobile/song/getsonginfo?keycode=fafd463e2131914934b73310aa34a23f&requestdata={"id":"_ID_ENCODED_"}'
videoInfoApi ='http://api.mp3.zing.vn/api/mobile/video/getvideoinfo?keycode=fafd463e2131914934b73310aa34a23f&requestdata={"id":"_ID_ENCODED_"}'
zingTvApi ='http://api.tv.zing.vn/2.0/media/info?api_key=d04210a70026ad9323076716781c223f&session_key=91618dfec493ed7dc9d61ac088dff36b&&media_id='
def load(url):
r = requests.get(url)
return r.text
def checkUrl(url):
try:
ret = urllib2.urlopen(url)
code = ret.code
#log('check url:%s - %d'%(url,code))
return ret.code < 400
except Exception, e:
#log('check url:%s - %s'%(url,str(e)))
return False
pass
def getZTVSource(source,quality=3):
result = None
if quality <0 :
return result
ss = []
if 'Video3GP' in source:
ss.append('http://'+source['Video3GP'])
else:
ss.append(None)
if 'Video480' in source:
ss.append('http://'+source['Video480'])
else:
ss.append(None)
if 'Video720' in source:
ss.append('http://'+source['Video720'])
else:
ss.append(None)
if 'Video1080' in source:
ss.append('http://'+source['Video1080'])
else:
ss.append(None)
if ss[quality]!=None:
result = ss[quality]
else:
for i in range(quality,-1,-1):
if ss[i] != None:
result = ss[i]
break
if result == None:
for i in range(quality,len(ss)):
if ss[i] != None:
result = ss[i]
break
if result !=None and checkUrl(result):
return result
else:
return getZTVSource(source,quality-1)
def getZVideoSource(source,quality=4):
log('getVideoSource:'+str(quality))
result = None
if quality <0 :
return result
ss = []
if '240' in source:
ss.append(source['240'])
else:
ss.append(None)
if '360' in source:
ss.append(source['360'])
else:
ss.append(None)
if '480' in source:
ss.append(source['480'])
else:
ss.append(None)
if '720' in source:
ss.append(source['720'])
else:
ss.append(None)
if '1080' in source:
ss.append(source['1080'])
else:
ss.append(None)
#log('Source:%d - %s'%(quality,ss[quality]))
if ss[quality]!=None:
result = ss[quality]
else:
for i in range(quality,-1,-1):
if ss[i] != None:
result = ss[i]
break
if result == None:
for i in range(quality,len(ss)):
if ss[i] != None:
result = ss[i]
break
if result !=None and checkUrl(result):
return result
else:
return getZVideoSource(source,quality-1)
def getZAudioSource(source,audio_quality=2):
log('getAudioSource:'+str(audio_quality))
result = None
if(audio_quality<0):
return result
ss = []
if '128' in source:
ss.append(source['128'])
else:
ss.append(None)
if '320' in source:
ss.append(source['320'])
else:
ss.append(None)
if 'lossless' in source:
ss.append(source['lossless'])
else:
ss.append(None)
if ss[audio_quality]!=None:
result = ss[audio_quality]
else:
for i in range(audio_quality,-1,-1):
if ss[i] != None:
result = ss[i]
if result != None:
for i in range(audio_quality,len(ss)):
if ss[i] != None:
result = ss[i]
if result != None and checkUrl(result):
return result
else:
return getZAudioSource(source,audio_quality-1)
def getMp3ZingSong(sid,q):
url = songInfoApi.replace('_ID_ENCODED_',sid)
js = json.loads(load(url))
url = getZAudioSource(js['source'],q)
return url
def getZingTVVideo(sid,q):
url = zingTvApi + sid
js = json.loads(load(url))
url = getZTVSource(js['response']['other_url'],q)
return url
def getMp3ZingVideo(sid,q):
url = videoInfoApi.replace('_ID_ENCODED_',sid)
js = json.loads(load(url))
source = getZVideoSource(js['source'],q)
return source
def getTalkTVVideo(sid):
#loadPlayer.manifestUrl = "http://live.csmtalk.vcdn.vn/hls/6b1cc68ba8735185ada742e8713567c4/55f10fd0/elorenhat/index.m3u8";
url = 'http://talktv.vn/'+sid
html = load(url)
lines = html.split('\n')
for line in lines:
line = line.strip()
if 'loadPlayer.manifestUrl' in line:
line = line.replace('loadPlayer.manifestUrl','').replace('"','').replace(';','').replace('=','').strip()
return line
return None
def log(m):
sys.stdout.write(m)
pass
class MyRequestHandler(SimpleHTTPServer.SimpleHTTPRequestHandler):
def redirect(self,link):
self.send_response(301)
self.send_header('Content-type','text/html')
self.send_header('Location', link)
self.end_headers()
#log('link:'+link)
pass
def do_HEAD(self):
self.do_GET()
def do_GET(self):
q = urlparse.urlparse(self.path)
queries = urlparse.parse_qs(q.query)
self.log_request()
if "mp3ZAudio?" in self.path:
link = getMp3ZingSong(queries['sid'][0],int(queries['q'][0]))
if link == None:
log(queries['sid'][0]+' link not found')
pass
self.redirect(link)
elif "mp3ZVideo?" in self.path:
link = getMp3ZingVideo(queries['sid'][0],int(queries['q'][0]))
self.redirect(link)
elif 'ZingTV?' in self.path:
link = getZingTVVideo(queries['sid'][0],int(queries['q'][0]))
self.redirect(link)
elif 'TalkTV?' in self.path:
link = getTalkTVVideo(queries['sid'][0])
self.redirect(link)
def log_request(self, code='-', size='-'):
sys.stdout.write("%s %s %s" % (self.requestline, str(code), str(size)))
if __name__ == '__main__':
PORT = 9998
handler = MyRequestHandler
httpd = SocketServer.TCPServer(("", PORT), handler)
sys.stdout.write("serving at port %d" % PORT)
httpd.serve_forever() | gpl-2.0 | -8,087,498,911,041,477,000 | 22.276151 | 146 | 0.642241 | false |
znerol/spreadflow-delta | spreadflow_delta/test/test_symlink.py | 1 | 1301 | # -*- coding: utf-8 -*-
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import copy
from mock import Mock, patch
from testtools import TestCase
from spreadflow_core.scheduler import Scheduler
from spreadflow_delta.test.matchers import MatchesSendDeltaItemInvocation
from spreadflow_delta.proc import Symlink
class SymlinkTestCase(TestCase):
def test_symlink(self):
"""
Test the symlink processor.
"""
sut = Symlink(key='test_path', destkey='test_linkpath')
insert = {
'inserts': ['a'],
'deletes': [],
'data': {
'a': {
'test_path': '/path/to/some/file.txt',
'test_linkpath': '/path/to/symlink/test.txt'
}
}
}
expected = copy.deepcopy(insert)
matches = MatchesSendDeltaItemInvocation(expected, sut)
send = Mock(spec=Scheduler.send)
with patch('spreadflow_delta.util.symlink_replace') as symlink_mock:
sut(insert, send)
self.assertEquals(send.call_count, 1)
self.assertThat(send.call_args, matches)
symlink_mock.assert_called_once_with('/path/to/some/file.txt', '/path/to/symlink/test.txt')
| mit | -5,483,107,710,838,320,000 | 28.568182 | 99 | 0.607225 | false |
unho/translate | translate/storage/test_yaml.py | 1 | 12717 | # -*- coding: utf-8 -*-
import pytest
import ruamel.yaml
from translate.storage import base, test_monolingual, yaml
class TestYAMLResourceUnit(test_monolingual.TestMonolingualUnit):
UnitClass = yaml.YAMLUnit
def test_getlocations(self):
unit = self.UnitClass("teststring")
unit.setid('some-key')
assert unit.getlocations() == ['some-key']
class TestYAMLResourceStore(test_monolingual.TestMonolingualStore):
StoreClass = yaml.YAMLFile
def test_serialize(self):
store = self.StoreClass()
store.parse('key: value')
assert bytes(store) == b'key: value\n'
def test_empty(self):
store = self.StoreClass()
store.parse('{}')
assert bytes(store) == b'{}\n'
def test_edit(self):
store = self.StoreClass()
store.parse('key: value')
store.units[0].target = 'second'
assert bytes(store) == b'key: second\n'
def test_edit_unicode(self):
store = self.StoreClass()
store.parse('key: value')
store.units[0].target = 'zkouška'
assert bytes(store) == 'key: zkouška\n'.encode('utf-8')
def test_parse_unicode_list(self):
data = '''list:
- zkouška
'''
store = self.StoreClass()
store.parse(data)
assert bytes(store).decode('utf-8') == data
store.units[0].target = 'změna'
assert bytes(store).decode('utf-8') == data.replace('zkouška', 'změna')
def test_ordering(self):
store = self.StoreClass()
store.parse('''
foo: foo
bar: bar
baz: baz
''')
assert len(store.units) == 3
assert store.units[0].source == 'foo'
assert store.units[2].source == 'baz'
def test_initial_comments(self):
data = '''# Hello world.
foo: bar
'''
store = self.StoreClass()
store.parse(data)
assert len(store.units) == 1
assert store.units[0].getid() == 'foo'
assert store.units[0].source == 'bar'
assert bytes(store).decode('ascii') == data
def test_string_key(self):
data = '''"yes": Oficina
'''
store = self.StoreClass()
store.parse(data)
assert len(store.units) == 1
assert store.units[0].getid() == 'yes'
assert store.units[0].source == 'Oficina'
assert bytes(store).decode('ascii') == data
def test_nested(self):
data = '''foo:
bar: bar
baz:
boo: booo
eggs: spam
'''
store = self.StoreClass()
store.parse(data)
assert len(store.units) == 3
assert store.units[0].getid() == 'foo->bar'
assert store.units[0].source == 'bar'
assert store.units[1].getid() == 'foo->baz->boo'
assert store.units[1].source == 'booo'
assert store.units[2].getid() == 'eggs'
assert store.units[2].source == 'spam'
assert bytes(store).decode('ascii') == data
def test_multiline(self):
"""These are used in Discourse and Diaspora* translation."""
data = '''invite: |-
Ola!
Recibiches unha invitación para unirte!
eggs: spam
'''
store = self.StoreClass()
store.parse(data)
assert len(store.units) == 2
assert store.units[0].getid() == 'invite'
assert store.units[0].source == """Ola!
Recibiches unha invitación para unirte!"""
assert store.units[1].getid() == 'eggs'
assert store.units[1].source == 'spam'
assert bytes(store).decode('utf-8') == data
def test_boolean(self):
store = self.StoreClass()
store.parse('''
foo: True
''')
assert len(store.units) == 1
assert store.units[0].getid() == 'foo'
assert store.units[0].source == 'True'
assert bytes(store) == b'''foo: 'True'
'''
def test_integer(self):
store = self.StoreClass()
store.parse('''
foo: 1
''')
assert len(store.units) == 1
assert store.units[0].getid() == 'foo'
assert store.units[0].source == '1'
assert bytes(store) == b'''foo: '1'
'''
def test_no_quote_strings(self):
"""These are used in OpenStreeMap translation."""
store = self.StoreClass()
store.parse('''
eggs: No quoting at all
''')
assert len(store.units) == 1
assert store.units[0].getid() == 'eggs'
assert store.units[0].source == 'No quoting at all'
assert bytes(store) == b'''eggs: No quoting at all
'''
def test_double_quote_strings(self):
"""These are used in OpenStreeMap translation."""
store = self.StoreClass()
store.parse('''
bar: "quote, double"
''')
assert len(store.units) == 1
assert store.units[0].getid() == 'bar'
assert store.units[0].source == 'quote, double'
assert bytes(store) == b'''bar: "quote, double"
'''
def test_single_quote_strings(self):
"""These are used in OpenStreeMap translation."""
store = self.StoreClass()
store.parse('''
foo: 'quote, single'
''')
assert len(store.units) == 1
assert store.units[0].getid() == 'foo'
assert store.units[0].source == 'quote, single'
assert bytes(store) == b'''foo: 'quote, single'
'''
def test_avoid_escaping_double_quote_strings(self):
"""These are used in OpenStreeMap translation."""
store = self.StoreClass()
store.parse('''
spam: 'avoid escaping "double quote"'
''')
assert len(store.units) == 1
assert store.units[0].getid() == 'spam'
assert store.units[0].source == 'avoid escaping "double quote"'
assert bytes(store) == b'''spam: 'avoid escaping "double quote"'
'''
def test_avoid_escaping_single_quote_strings(self):
"""Test avoid escaping single quotes."""
store = self.StoreClass()
store.parse('''
spam: "avoid escaping 'single quote'"
''')
assert len(store.units) == 1
assert store.units[0].getid() == 'spam'
assert store.units[0].source == "avoid escaping 'single quote'"
assert bytes(store) == b'''spam: "avoid escaping 'single quote'"
'''
def test_escaped_double_quotes(self):
"""These are used in OpenStreeMap translation."""
store = self.StoreClass()
store.parse(r'''
foo: "Hello \"World\"."
''')
assert len(store.units) == 1
assert store.units[0].getid() == 'foo'
assert store.units[0].source == 'Hello "World".'
assert bytes(store) == br'''foo: "Hello \"World\"."
'''
def test_newlines(self):
"""These are used in OpenStreeMap translation."""
store = self.StoreClass()
store.parse(r'''
foo: "Hello \n World."
''')
assert len(store.units) == 1
assert store.units[0].getid() == 'foo'
assert store.units[0].source == 'Hello \n World.'
assert bytes(store) == br'''foo: "Hello \n World."
'''
def test_abbreviated_list(self):
"""These are used in Redmine and Discourse translation."""
data = '''day_names: [Domingo, Luns, Martes, Mércores, Xoves, Venres, Sábado]
'''
store = self.StoreClass()
store.parse(data)
assert len(store.units) == 7
assert store.units[0].getid() == 'day_names->[0]'
assert store.units[0].source == 'Domingo'
assert store.units[1].getid() == 'day_names->[1]'
assert store.units[1].source == 'Luns'
assert store.units[2].getid() == 'day_names->[2]'
assert store.units[2].source == 'Martes'
assert store.units[3].getid() == 'day_names->[3]'
assert store.units[3].source == 'Mércores'
assert store.units[4].getid() == 'day_names->[4]'
assert store.units[4].source == 'Xoves'
assert store.units[5].getid() == 'day_names->[5]'
assert store.units[5].source == 'Venres'
assert store.units[6].getid() == 'day_names->[6]'
assert store.units[6].source == 'Sábado'
assert bytes(store).decode('utf-8') == data
def test_abbreviated_dictionary(self):
"""Test abbreviated dictionary syntax."""
data = '''martin: {name: Martin D'vloper, job: Developer, skill: Elite}
'''
store = self.StoreClass()
store.parse(data)
assert len(store.units) == 3
assert store.units[0].getid() == 'martin->name'
assert store.units[0].source == "Martin D'vloper"
assert store.units[1].getid() == 'martin->job'
assert store.units[1].source == 'Developer'
assert store.units[2].getid() == 'martin->skill'
assert store.units[2].source == 'Elite'
assert bytes(store).decode('ascii') == data
def test_key_nesting(self):
store = self.StoreClass()
unit = self.StoreClass.UnitClass("teststring")
unit.setid('key')
store.addunit(unit)
unit = self.StoreClass.UnitClass("teststring2")
unit.setid('key->value')
store.addunit(unit)
assert bytes(store) == b'''key:
value: teststring2
'''
def test_add_to_mepty(self):
store = self.StoreClass()
store.parse('')
unit = self.StoreClass.UnitClass("teststring")
unit.setid('key')
store.addunit(unit)
unit = self.StoreClass.UnitClass("teststring2")
unit.setid('key->value')
store.addunit(unit)
assert bytes(store).decode('utf-8') == '''key:
value: teststring2
'''
@pytest.mark.skipif(ruamel.yaml.version_info < (0, 16, 6), reason='Empty keys serialization broken in ruamel.yaml<0.16.6')
def test_empty_key(self):
yaml_souce = b''''': Jedna
foo:
'': Dve
'''
store = self.StoreClass()
store.parse(yaml_souce)
assert len(store.units) == 2
assert store.units[0].getid() == ''
assert store.units[0].source == 'Jedna'
assert store.units[1].getid() == 'foo->'
assert store.units[1].source == 'Dve'
assert bytes(store) == yaml_souce
def test_dict_in_list(self):
data = '''e1:
- s1: Subtag 1
'''
store = self.StoreClass()
store.parse(data)
assert len(store.units) == 1
assert bytes(store) == data.encode('ascii')
def test_dump_args(self):
data = '''e1:
- s1: Subtag 1
'''
store = self.StoreClass()
store.dump_args['line_break'] = '\r\n'
store.parse(data)
assert len(store.units) == 1
assert bytes(store) == data.replace('\n', '\r\n').encode('ascii')
def test_anchors(self):
data = '''location: &location_attributes
title: Location
temporary_question: Temporary?
temporary: Temporary
location_batch:
<<: *location_attributes
label: Label
prefix: Prefix
'''
store = self.StoreClass()
store.parse(data)
assert len(store.units) == 5
assert bytes(store).decode('ascii') == data
class TestRubyYAMLResourceStore(test_monolingual.TestMonolingualStore):
StoreClass = yaml.RubyYAMLFile
def test_ruby_list(self):
data = '''en-US:
date:
formats:
default: '%Y-%m-%d'
short: '%b %d'
long: '%B %d, %Y'
day_names:
- Sunday
- Monday
- Tuesday
- Wednesday
- Thursday
- Friday
- Saturday
'''
store = self.StoreClass()
store.parse(data)
assert bytes(store).decode('ascii') == data
def test_ruby(self):
data = '''en:
language_name: English
language_name_english: English
message:
unsubscribe: Unsubscribe from our emails
from_app: from %{app_name}
'''
store = self.StoreClass()
store.parse(data)
assert bytes(store) == data.encode('ascii')
def test_invalid_key(self):
store = yaml.YAMLFile()
with pytest.raises(base.ParseError):
store.parse('1: string')
def test_invalid_value(self):
store = yaml.YAMLFile()
with pytest.raises(base.ParseError):
store.parse('val: "\\u string"')
def test_ruby_plural(self):
data = '''en:
message:
one: There is one message
other: There are %{count} messages
'''
store = self.StoreClass()
store.parse(data)
assert len(store.units) == 1
assert bytes(store) == data.encode('ascii')
def test_empty(self):
store = self.StoreClass()
store.parse('{}')
assert bytes(store) == b'{}\n'
def test_anchors(self):
data = '''en:
location: &location_attributes
title: Location
temporary_question: Temporary?
temporary: Temporary
location_batch:
<<: *location_attributes
label: Label
prefix: Prefix
'''
store = self.StoreClass()
store.parse(data)
assert len(store.units) == 5
assert bytes(store).decode('ascii') == data
| gpl-2.0 | -7,368,948,542,874,367,000 | 29.25 | 126 | 0.578198 | false |
oxfishapp/RESTful | src/commons.py | 1 | 9031 | # -*- coding: utf-8 -*-
#!/usr/bin/env python
#!flask/bin/python
from flask.ext.restful.fields import Raw
from api_errors import error_handled
class Set_to_List(Raw):
def format(self, value):
""" (set) -> list
Convertir un set a list y retornarlo.
"""
return list(value)
class HashKey_Validation(Raw):
def format(self, value):
""" (str) -> str
Valida que el HashKey tenga un formato consistente
con UUID.
"""
return hashValidation(value)
def hashValidation(value):
""" (str) -> str
Recibe un string el cual es un UUID se valida
si su formato es correcto, de ser así se retorna
el string, de lo contrario se retorna un error.
>>> hashValidation('d6df4adc-b533-9545-9d61-7c877bb53b18')
'd6df4adc-b533-9545-9d61-7c877bb53b18'
>>> hashValidation('d6df4adc-b533-9545-9d61-7c877bb53b1')
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "<stdin>", line 12, in hashValidation
NameError: Hash Validation Crash
"""
from flask import abort
import re
regex = re.compile('[a-f0-9]{8}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{12}\Z', re.I)
boolhash = regex.match(value)
if bool(boolhash):
return value
#Genera error
abort(400)
def hashCreate():
""" () -> str
Retorna un string el cual es un UUID.
>>> hashCreate()
'd6df4adc-b533-9545-9d61-7c877bb53b18'
"""
import uuid
value = uuid.uuid4()
return str(value)
def pagination(data, value):
_return = dict()
_return['data'] = data
if value:
_return['pagination'] = value
else:
_return['pagination'] = None
return _return
def hashKeyList(value):
""" (str) -> list
Recibe un string el cual es un json encoder
y retorna una lista de diccionarios para ser
usado en un batch_get de dynamoDB.
>>> hashKeyList('["11EC2020-3AEA-4069-A2DD-08002B30309D"]')
[{'Key_Post': u'11EC2020-3AEA-4069-A2DD-08002B30309D'}]
>>> hashKeyList('["11EC2020-3AEA-4069-A2DD-08002B30309D","21EC2020-3AEA-4069-A2DD-08002B30309D"]')
[{'Key_Post': u'11EC2020-3AEA-4069-A2DD-08002B30309D'}, {'Key_Post': u'21EC2020-3AEA-4069-A2DD-08002B30309D'}]
"""
list_hashkey = []
#values = jsondecoder(value)
for item in value:
list_hashkey.append({'key_post': hashValidation(item)})
return list_hashkey
def jsondecoder(encoder):
""" (str) -> json_decoder
Recibe un string el cual es un json encoder
y retorna un json decoder
>>> jsondecoder('{"Key" : "zxc-123"}')
{u'Key': u'zxc-123'}
>>> jsondecoder('{"Key" : "abcd-1234", "List" : [0,1,2,3]}')
{u'List': [0, 1, 2, 3], u'Key': u'abcd-1234'}
"""
import json
values = json.loads(encoder)
return values
def timeUTCCreate():
""" () -> str
Retorna un string con la fecha actual en UTC Z.
>>> timeUTCCreate()
'2014-05-15 19:01:47.669254'
"""
from datetime import datetime
return str(datetime.utcnow())
#Post
#Respuestas
def items_to_list(items):
""" (items) -> list
Recibe un ResultSet o un Diccionario y
Retorna una lista de diccionarios.
>>> items_to_list({1:1})
[{1:1}]
"""
result = []
from boto.dynamodb2.results import ResultSet
if isinstance(items, ResultSet):
for item in items:
result.append(item_to_dict(item._data))
return result
#print(value)
result.append(items)
return result
def item_to_dict(value):
from dynamoDBqueries import User
user = User()
dictionary = {}
dictionary.update(value)
user_insert = user.get_by_key_user(value['key_user'])
dictionary.update(user_insert)
return dictionary
def twitter_credentials(access_token, token_secret):
'''
(str, str) -> dict
Valida si el access_token y token_secret del usuario son validos, retorna
un json con los datos del usuario el cual es suministrado por los servicios
de twitter y un status 200, en caso contrario retorna un mensaje de error y
status 401.
'''
from flask import current_app
from flask_oauth import OAuth
config = current_app.config
tw_auth = OAuth().remote_app(name=config['TW_NAME'],
base_url=config['TW_BASE_URL'],
request_token_url=config['TW_REQUEST_TOKEN_URL'],
access_token_url=config['TW_ACCESS_TOKEN_URL'],
authorize_url=config['TW_AUTHORIZE_URL'],
consumer_key=config['TW_CONSUMER_KEY'],
consumer_secret=config['TW_CONSUMER_SECRET'])
#definicion de una funcion tokengetter necesaria para el funcionamiento de
#Flask-Oauth retorna una tubla con el access_token y token_secret
def get_twitter_token(token=access_token, secret=token_secret):
return token, secret
tw_auth.tokengetter(get_twitter_token)
return tw_auth.get('account/verify_credentials.json')
def validate_user_auth(token):
'''
(str) -> boto.dynamodb2.items.Item
Valida si el token proporcionado es el mismos que tiene regitrado el
usuadio en la base de datos. En caso de ser correcta la validacion se
retorna el usuario y en caso de ser fallida retorna un status_code 401.
'''
from dynamoDBqueries import User
from flask import abort
user = User()
token_user = decrypt_token(token)
user = user.get_item(key_twitter=token_user['hash_key'])
#valida si el usuario esta registrado en la base de datos y el token
#proporcionado es igual al registrado en la base de datos.
if user and token == user._data['token_user']:
return user
abort(401)
def decrypt_token(token_user, secret_key=None):
'''
(str) -> dict
Permite verificar si el token_user es valido. En caso de ser valido es
retornado un dict con los datos encriptados en el token, en caso de fallar
la verificacion retorna None.
Si el secret_key es None, se toma como valor por defecto el OX_SECRET_KEY
defenido en la configuracion de la aplicacion para el proceso de cifrado y
descifrado el token.
JSON Web Token (JWT)
'''
from itsdangerous import TimedJSONWebSignatureSerializer as Serializer
from flask import current_app, abort
if not secret_key:
secret_key = current_app.config['OX_SECRET_KEY']
token = Serializer(secret_key)
try:
data = token.loads(token_user)
except:
abort(401)
return data
def generate_token(secret_key=None, expiration=0, **kwargs):
'''
(int, str, **kwargs) -> str
Permite generar un token_user temporal en el cual se encapsularan y
encriptaran los datos contenidos en el kwargs. Retorna un str con el
token_user con los datos encriptados.
Si el secret_key es None, se toma como valor por defecto el OX_SECRET_KEY
defenido en la configuracion de la aplicacion para el proceso de cifrado y
descifrado el token.
JSON Web Token (JWT)
'''
from itsdangerous import TimedJSONWebSignatureSerializer as Serializer
from flask import current_app
if not expiration:
expiration = current_app.config['OX_TOKEN_USER_LIFETIME']
if not secret_key:
secret_key = current_app.config['OX_SECRET_KEY']
token = Serializer(secret_key, expires_in=expiration)
return token.dumps(kwargs)
class Email_validation(Raw):
def format(self, value):
""" (str) -> str
Verifica que value tiene la estructura de un email
Valido: [email protected]
Errado: juanperez@dominio@com
"""
return validate_email(value)
#@error_handled
def validate_email(email):
""" (str) -> boolean
Permite verificar que el email proporcionado tiene el formato adecuado.
Si es correcto retorna el email de lo contrario lanza un NameError.
"""
from flask import abort
import re
result = re.match('^[(a-z0-9\_\-\.)]+@[(a-z0-9\_\-\.)]+\.[(a-z)]{2,4}$',
email.lower())
if result:
return email
abort(400)
# raise ValueError('Malformed email')
def manage_score(score, action=0, points=1):
'''
(int, int, int) -> int
Gestiona las acciones sobre un marcador, permite agregar o quitar points a
un score, el tipo de accion a realizar sobre score se definen como sigue:
* action=1 : se agrega la cantidad de points al score actual.
* action=-1 : se quita la cantidad de points al score actual.
Al finalizar la gestion del score se retorna el nuevo score con el tipo de
accion realizada. Si action tiene cualquier otro valor a los definidos
anteriormente se retorna el score sin modificaciones.
'''
new_score = score
if action == 1:
new_score += points
if action == -1:
new_score -= points
return new_score if new_score >= 0 else score
| unlicense | -4,525,323,916,411,038,000 | 25.955224 | 114 | 0.638095 | false |
her0e1c1/pystock | stock/signals.py | 1 | 2696 | import pandas as pd
from . import line, util
def rolling_mean(series, period):
"""現在の株価(短期)と長期移動平均線(長期)のクロス"""
slow = series.rolling(window=period, center=False).mean()
return util.cross(series, slow)
def rolling_mean_ratio(series, period, ratio):
"""長期移動平均線と現在の株価の最終日の差がratio乖離したら売買シグナル"""
mean = series.rolling(window=period, center=False).mean()
r = util.increment(util.last(series), util.last(mean))
return "BUY" if r > ratio else "SELL" if r < -ratio else None
def increment_ratio(series, ratio=25):
"""前日に比べてratio乖離してたら売買シグナル(変動が大きいので戻りの可能性が高いと考える)"""
curr = util.last(series)
prev = util.last(series, offset_from_last=1)
r = util.increment(curr, prev)
return "BUY" if r < -ratio else "SELL" if r > ratio else None
def rsi(series, period, buy, sell):
"""RSIは基本的に30%以下で売られ過ぎ, 70%で買われ過ぎ"""
rsi = line.rsi(series, period)
if rsi.empty:
return None
f = float(rsi[rsi.last_valid_index()])
return "BUY" if f < buy else "SELL" if f > sell else None
def min_low(series, period, ratio):
"""指定期間中の最安値に近いたら買い. (底値が支えになって反発する可能性があると考える)"""
m = float(series.tail(period).min())
if pd.isnull(m):
return None
last = series[series.last_valid_index()]
return "BUY" if util.increment(last, m) < ratio else None
def max_high(series, period, ratio):
"""min_lowの逆version"""
m = float(series.tail(period).max())
if pd.isnull(m):
return None
last = series[series.last_valid_index()]
return "SELL" if util.increment(m, last) < ratio else None
def macd_signal(series, fast, slow, signal):
"""macd(短期)とsignal(長期)のクロス"""
f = line.macd_line(series, fast, slow, signal)
s = line.macd_signal(series, fast, slow, signal)
return util.cross(f, s)
def stochastic(series, k, d, sd):
"""
macd(短期)とsignal(長期)のクロス
一般的に次の値を利用する (k, d, sd) = (14, 3, 3)
"""
fast = line.stochastic_d(series, k=k, d=d)
slow = line.stochastic_sd(series, k=k, d=d, sd=sd)
return util.cross(fast, slow)
def bollinger_band(series, period=20, ratio=3):
"""
2sigmaを超えたら、買われすぎと判断してSELL
-2sigmaを超えたら、売られ過ぎと判断してBUY
"""
s = util.sigma(series, period)
return "BUY" if s <= -ratio else "SELL" if s >= ratio else None
| gpl-3.0 | 7,619,161,558,827,822,000 | 28.868421 | 67 | 0.647137 | false |
ted-dunstone/ivs | hub_demo/test_match.py | 1 | 1043 | import pika
import sys
import logging
logging.basicConfig()
connection = pika.BlockingConnection(pika.ConnectionParameters(
host='localhost'))
channel = connection.channel()
channel.exchange_declare(exchange='Australia_NZ_Exchange',
type='headers')
result = channel.queue_declare(exclusive=True)
if not result:
print 'Queue didnt declare properly!'
sys.exit(1)
queue_name = result.method.queue
channel.queue_bind(exchange='Australia_NZ_Exchange',
queue = queue_name,
routing_key = '',
arguments = {'test': 'test', 'x-match':'any'})
def callback(ch, method, properties, body):
print properties.user_id
print "{headers}:{body}".format(headers = properties.headers,
body = body)
channel.basic_consume(callback,
queue = queue_name,
no_ack=True)
try:
channel.start_consuming()
except KeyboardInterrupt:
print 'Bye'
finally:
connection.close()
| mit | -1,545,338,382,569,837,600 | 26.447368 | 65 | 0.61745 | false |
SKIRT/PTS | do/core/convert.py | 1 | 4941 | #!/usr/bin/env python
# -*- coding: utf8 -*-
# *****************************************************************
# ** PTS -- Python Toolkit for working with SKIRT **
# ** © Astronomical Observatory, Ghent University **
# *****************************************************************
## \package pts.do.core.convert Convert a quantity from one unit to another.
# -----------------------------------------------------------------
# Ensure Python 3 compatibility
from __future__ import absolute_import, division, print_function
# Import the relevant PTS classes and modules
from pts.core.basics.configuration import ConfigurationDefinition, parse_arguments, prompt_yn
from pts.core.tools.stringify import tostr
from pts.core.units.parsing import is_photometric_unit, is_photometric_quantity, parse_unit, parse_quantity
from pts.core.units.parsing import possible_physical_types, possible_density_flags, possible_brightness_flags
# -----------------------------------------------------------------
# Create the configuration definition
definition = ConfigurationDefinition(write_config=False)
# Quantity to convert and unit to convert to
definition.add_required("quantity", "string", "quantity to convert to another unit")
definition.add_required("unit", "string", "unit to convert the quantity to")
# Extra information
definition.add_optional("distance", "length_quantity", "distance")
definition.add_optional("wavelength", "length_quantity", "wavelength")
definition.add_optional("frequency", "frequency_quantity", "frequency")
definition.add_optional("pixelscale", "pixelscale", "pixelscale")
definition.add_optional("solid_angle", "solid_angle", "solid angle")
definition.add_optional("filter", "filter", "filter")
# Create the configuration
config = parse_arguments("convert", definition, "Convert a quantity from one unit to another")
# -----------------------------------------------------------------
# Check quantity
if is_photometric_quantity(config.quantity):
#physical_types = possible_physical_types(config.quantity)
#print(physical_types)
# Density?
density_flags = possible_density_flags(config.quantity)
if len(density_flags) == 1: density = density_flags[0]
elif len(density_flags) == 2: density = prompt_yn("density", "is this quantity (" + config.quantity + ") a spectral density?")
else: raise RuntimeError("Something went wrong")
# Brightness?
brightness_flags = possible_brightness_flags(config.quantity)
if len(brightness_flags) == 1: brightness = brightness_flags[0]
elif len(brightness_flags) == 2: brightness = prompt_yn("brightness", "is this quantity (" + config.quantity + ") a surface brightness?")
else: raise RuntimeError("Something went wrong")
# Parse
quantity = parse_quantity(config.quantity, density=density, brightness=brightness, density_strict=True, brightness_strict=True)
# Not a photometric quantity
else: quantity = parse_quantity(config.quantity)
# -----------------------------------------------------------------
# Check unit
if is_photometric_unit(config.unit):
#physical_types = possible_physical_types(config.unit)
#print(physical_types)
# Density?
density_flags = possible_density_flags(config.unit)
if len(density_flags) == 1: density = density_flags[0]
elif len(density_flags) == 2: density = prompt_yn("density", "is this unit (" + config.unit + ") a spectral density?")
else: raise RuntimeError("Something went wrong")
# Brightness?
brightness_flags = possible_brightness_flags(config.unit)
if len(brightness_flags) == 1: brightness = brightness_flags[0]
elif len(brightness_flags) == 2: brightness = prompt_yn("brightness", "is this unit (" + config.unit + ") a surface brightness?")
else: raise RuntimeError("Something went wrong")
# Parse
unit = parse_unit(config.unit, density=density, brightness=brightness, density_strict=True, brightness_strict=True)
# Not a photometric unit
else: unit = parse_unit(config.unit)
# -----------------------------------------------------------------
# Set conversion info
conversion_info = dict()
if config.distance is not None: conversion_info["distance"] = config.distance
if config.wavelength is not None: conversion_info["wavelength"] = config.wavelength
if config.frequency is not None: conversion_info["frequency"] = config.frequency
if config.pixelscale is not None: conversion_info["pixelscale"] = config.pixelscale
if config.solid_angle is not None: conversion_info["solid_angle"] = config.solid_angle
if config.filter is not None: conversion_info["fltr"] = config.filter
# -----------------------------------------------------------------
# Convert
converted = quantity.to(unit, **conversion_info)
# -----------------------------------------------------------------
# Show
print(tostr(converted))
# -----------------------------------------------------------------
| agpl-3.0 | -4,891,003,356,254,152,000 | 42.333333 | 141 | 0.634615 | false |
m4tx/techswarm-server | tsserver/model.py | 1 | 1346 | from tsserver import db
from tsserver.strutils import to_camel_case
class Model(db.Model):
"""
Base class for all models. What differs it from default Model class is
serializable method, which is used to convert objects to JSON (or any other
representation) more easily.
"""
__abstract__ = True
@property
def serializable(self):
"""
Return representation of the model that is JSON-serializable
Basically what this function does is adding all columns with their
values to one dictionary. Some value types (such as datetime) are
beforehand converted to str to be JSON-serializable. Also, if model
has 'resource_url' attribute, its value is added to dictionary as
'url' key.
Please note that this function converts all column names (i.e.
dictionary keys) to camelCase. Reason for this behavior is while
snake_case is Pythonic and used for naming columns in models,
camelCase is more natural for JavaScript language, and therefore JSON.
:rtype: dict
"""
d = {}
for column in self.__table__.columns:
d[to_camel_case(column.name)] = getattr(self, column.name)
try:
d['url'] = self.resource_url
except AttributeError:
pass
return d
| mit | 8,821,978,695,869,308,000 | 31.829268 | 79 | 0.648588 | false |
gemelkelabs/timing_system_software | server_py_files/core/xstatus.py | 1 | 1031 | # -*- coding: utf-8 -*-
"""
Created on Sat Apr 05 21:30:07 2014
@author: Nate
"""
from xml.sax import saxutils
class xstatus_ready():
"""
Inheritance class to add a status output in xml-format to any object
"""
def xstatus(self):
"""
returns status of the listener as xml, and follows-on to child objects
"""
status="<"+self.__class__.__name__+">"
status+=self.iterate_dict(vars(self))
status+="</"+self.__class__.__name__+">"
return status
def iterate_dict(self,dictelm):
"""
steps through a dictionary
"""
status=''
for var in dictelm.keys():
status+="<"+var+">"
if type(dictelm[var])==type({}): status+=self.iterate_dict(dictelm[var])
elif hasattr(dictelm[var],'xstatus'):
status+=dictelm[var].xstatus()
else: status+=saxutils.escape(str(dictelm[var]))
status+="</"+var+">"
return status | mit | 3,538,536,760,451,454,000 | 28.485714 | 96 | 0.519884 | false |
cheery/essence | essence/selection.py | 1 | 12706 | # This file is part of Essential Editor Research Project (EERP)
#
# EERP is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# EERP is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with EERP. If not, see <http://www.gnu.org/licenses/>.
from document import editlength, star, dot, element, iselement, empty_template, filled_template, replace, copy, copyList, dotmarker, starmarker, holepartial
from random import randint
from util import makelist, pull, push
def clamp(star, base):
return max(star.first, min(base, star.last))
def mark0(hole):
if isinstance(hole, dot):
return dotmarker(hole, False)
if isinstance(hole, star):
return starmarker(hole, 0, 0)
if isinstance(hole, holepartial):
return starmarker(hole.hole, hole.start, hole.start)
def mark1(hole):
if isinstance(hole, dot):
return dotmarker(hole, True)
if isinstance(hole, star):
cursor = hole.last
return starmarker(hole, cursor, cursor)
if isinstance(hole, holepartial):
return starmarker(hole.hole, hole.stop, hole.stop)
def mark2(hole):
if isinstance(hole, dot):
return dotmarker(hole, True)
if isinstance(hole, star):
return starmarker(hole, hole.last, hole.first)
if isinstance(hole, holepartial):
return starmarker(hole.hole, hole.stop, hole.start)
def first_marker(obj):
if iselement(obj) and len(obj.holes) > 0:
return mark0(obj.holes[0])
def last_marker(obj):
if iselement(obj) and len(obj.holes) > 0:
return mark1(obj.holes[-1])
class Selection(object):
def __init__(self, buffer, path, parent, marker):
self.buffer = buffer
self.path = list(path)
self.parent = parent
self.marker = marker
def remove(self):
if self.marker.dot:
self.buffer.do(self.marker, replace([None]))
else:
self.replace([])
def replace(self, data, branch_in=False):
length = sum(editlength(obj) for obj in data)
self.buffer.do(self.marker, replace(data))
if self.marker.dot:
self.marker = dotmarker(self.marker.hole, visited = not branch_in)
else:
cursor = self.marker.start + length * (not branch_in)
self.marker = starmarker(self.marker.hole, cursor, cursor)
if branch_in:
self.walk()
# works only if there's starmarker
def move(self, offset, selection=False, relative=True):
if not self.marker.dot:
cursor = clamp(self.marker.hole, relative*self.marker.cursor + offset)
tail = self.marker.tail if selection else cursor
self.marker = starmarker(self.marker.hole, cursor, tail)
@property
def empty(self):
marker = self.marker
if isinstance(marker, starmarker):
return marker.start == marker.stop
return False
@property
def bounds(self):
marker = self.marker
start0, stop0 = marker.hole.last, marker.hole.first
for partial in marker.hole.partials:
start1, stop1 = partial.start, partial.stop
if start1 <= marker.cursor <= stop1:
start0 = min(start0, start1)
stop0 = max(stop0, stop1)
return start0, stop0
def walk(self):
marker = self.marker
parent = self.parent
if marker.dot and marker.at_top:
new_marker = first_marker(marker.hole.a)
if new_marker is not None:
self.path.append((self.parent, marker.hole))
self.parent = marker.hole.a
self.marker = new_marker
return
marker = mark1(marker.hole)
if marker.at_bottom:
index = parent.holes.index(marker.hole)
if index + 1 < len(self.parent.holes):
self.marker = mark0(parent.holes[index + 1])
elif len(self.path) > 0:
parent, partial = self.path.pop(-1)
self.parent = parent
self.marker = mark1(partial)
return
if marker.dot:
return
for partial in marker.hole.partials:
if partial.start <= marker.cursor < partial.stop:
new_marker = first_marker(partial.a)
if new_marker is None:
self.move(partial.stop, relative=False)
return
else:
self.path.append((self.parent, partial))
self.parent = partial.a
self.marker = new_marker
return
def walk_backwards(self):
marker = self.marker
parent = self.parent
if marker.dot and marker.at_bottom:
new_marker = last_marker(marker.hole.a)
if new_marker is not None:
self.path.append((self.parent, marker.hole))
self.parent = marker.hole.a
self.marker = new_marker
return
marker = mark0(marker.hole)
if marker.at_top:
index = parent.holes.index(marker.hole)
if index > 0:
self.marker = mark1(parent.holes[index - 1])
elif len(self.path) > 0:
parent, partial = self.path.pop(-1)
self.parent = parent
self.marker = mark0(partial)
return
for partial in marker.hole.partials:
if partial.start < marker.cursor <= partial.stop:
new_marker = last_marker(partial.a)
if new_marker is None:
self.move(partial.start, relative=False)
return
else:
self.path.append((self.parent, partial))
self.parent = partial.a
self.marker = new_marker
return
def select_parent(self, mark=mark2):
if len(self.path) > 0:
self.parent, hole = self.path.pop(-1)
self.marker = mark(hole)
return True
return False
def expand(self):
marker = self.marker
if marker.dot or (marker.start == marker.hole.first and marker.stop == marker.hole.last):
self.select_parent()
else:
tail_loc = marker.cursor < marker.tail
tail = (marker.hole.first, marker.hole.last)[tail_loc]
cursor = (marker.hole.first, marker.hole.last)[not tail_loc]
self.marker = starmarker(marker.hole, cursor, tail)
def at_leaf(self):
if self.marker.dot:
obj = self.marker.hole.a
if obj is None or len(obj.holes) == 0:
return True
return False
#class Selection(object):
# def __init__(self, buffer, finger, cursor, tail):
# self.buffer = buffer
# self.finger = finger
# self.cursor = cursor
# self.tail = tail
#
# @property
# def start(self):
# return min(self.cursor, self.tail)
#
# @property
# def stop(self):
# return max(self.cursor, self.tail)
#
# @property
# def top(self):
# return self.buffer.document.context(self.finger)[-1]
#
# @property
# def ascendable(self):
# return len(self.finger) > 0
#
# def descendable(self, index):
# top = self.top
# return index < len(top) and isinstance(top[index], element)
#
# @property
# def yank(self):
# return copyList(self.top[self.start:self.stop])
#
# @property
# @makelist
# def frame_context(self):
# current = self.buffer.visual
# it = iter(self.finger)
# while current is not None:
# yield current
# index = it.next()
# next = None
# for frame in current.find():
# base, _ = frame.range
# if base == index:
# next = frame
# current = next
#
# def splice(self, data):
# length = len(data)
# operation = splice(self.start, self.stop, data)
# self.buffer.do(self.finger, operation)
# return Selection(
# self.buffer,
# self.finger,
# self.start + length,
# self.start + length,
# )
# # SPLICE (text, elements, range)
#
# @property
# def ascend(self):
# finger, cursor = pull(self.finger)
# return Selection(self.buffer, finger, self.cursor, self.tail).grasp(cursor, cursor+1)
#
# def descend(self, base):
# finger = push(self.finger, base)
# start, stop = 0, len(self.top[base])
# return Selection(self.buffer, finger, self.cursor, self.tail).grasp(start,stop)
#
# def build(self, outside=False, kw=None):
# kw = {'which':'scratch'} if kw is None else kw
# start = self.start
# stop = self.stop
# operation = build(start, stop, kw)
# self.buffer.do(self.finger, operation)
# if outside:
# finger = self.finger
# stop = start + 1
# else:
# finger = push(self.finger, start)
# start -= start
# stop -= start
# return Selection(self.buffer, finger, self.cursor, self.tail).grasp(start, stop)
# # BUILD (with selection and type)
#
# def collapse(self):
# finger, base = pull(self.finger)
# start = self.start + base
# stop = self.stop + base
# operation = collapse(base)
# self.buffer.do(finger, operation)
# return Selection(self.buffer, finger, start, stop)
# # COLLAPSE
#
# def modify(self, kw):
# self.buffer.do(self.finger, modify(kw))
# return self
# # MODIFY
#
# @property
# def bounds(self):
# top = self.top
# cursor = self.cursor - 1
#
# while cursor > 0:
# if isinstance(top[cursor], element):
# break
# if isinstance(top[cursor-1], element):
# break
# cursor -= 1
# start = cursor
#
# cursor = self.cursor + 1
# while cursor < len(top):
# if isinstance(top[cursor], element):
# break
# if isinstance(top[cursor-1], element):
# break
# cursor += 1
# stop = cursor
#
# return start, stop
#
# @property
# def textbounds(self):
# top = self.top
# cursor = self.cursor
# while cursor > 0:
# if isinstance(top[cursor-1], element):
# break
# cursor -= 1
# start = cursor
#
# while cursor < len(top):
# if isinstance(top[cursor], element):
# break
# cursor += 1
# stop = cursor
#
# return start, stop
#
# def walk_backward(self):
# top = self.top
# cursor = self.cursor
# if cursor == 0:
# if self.ascendable:
# finger, cursor = pull(self.finger)
# return Selection(self.buffer, finger, cursor, cursor)
# return self
# elif isinstance(top[cursor-1], element):
# finger = push(self.finger, cursor-1)
# cursor = len(top[cursor-1])
# return Selection(self.buffer, finger, cursor, cursor)
# else:
# cursor = self.bounds[0]
# return Selection(self.buffer, self.finger, cursor, cursor)
#
# def walk_forward(self):
# top = self.top
# cursor = self.cursor
# if cursor >= len(top):
# if self.ascendable:
# finger, cursor = pull(self.finger)
# return Selection(self.buffer, finger, cursor+1, cursor+1)
# return self
# elif isinstance(top[cursor], element):
# finger = push(self.finger, cursor)
# cursor = 0
# return Selection(self.buffer, finger, cursor, cursor)
# else:
# cursor = self.bounds[1]
# return Selection(self.buffer, self.finger, cursor, cursor)
# # NAVIGATE (UP, DOWN, LEFT+[SH], RIGHT+[SH], LB, RB)
#
# def grasp(self, start, stop):
# if self.cursor < self.tail:
# cursor, tail = start, stop
# else:
# tail, cursor = start, stop
# return Selection(self.buffer, self.finger, cursor, tail)
| gpl-3.0 | 40,778,738,927,370,540 | 33.064343 | 156 | 0.557217 | false |
google/material-design-icons | update/venv/lib/python3.9/site-packages/fontTools/pens/basePen.py | 3 | 13126 | """fontTools.pens.basePen.py -- Tools and base classes to build pen objects.
The Pen Protocol
A Pen is a kind of object that standardizes the way how to "draw" outlines:
it is a middle man between an outline and a drawing. In other words:
it is an abstraction for drawing outlines, making sure that outline objects
don't need to know the details about how and where they're being drawn, and
that drawings don't need to know the details of how outlines are stored.
The most basic pattern is this:
outline.draw(pen) # 'outline' draws itself onto 'pen'
Pens can be used to render outlines to the screen, but also to construct
new outlines. Eg. an outline object can be both a drawable object (it has a
draw() method) as well as a pen itself: you *build* an outline using pen
methods.
The AbstractPen class defines the Pen protocol. It implements almost
nothing (only no-op closePath() and endPath() methods), but is useful
for documentation purposes. Subclassing it basically tells the reader:
"this class implements the Pen protocol.". An examples of an AbstractPen
subclass is fontTools.pens.transformPen.TransformPen.
The BasePen class is a base implementation useful for pens that actually
draw (for example a pen renders outlines using a native graphics engine).
BasePen contains a lot of base functionality, making it very easy to build
a pen that fully conforms to the pen protocol. Note that if you subclass
BasePen, you _don't_ override moveTo(), lineTo(), etc., but _moveTo(),
_lineTo(), etc. See the BasePen doc string for details. Examples of
BasePen subclasses are fontTools.pens.boundsPen.BoundsPen and
fontTools.pens.cocoaPen.CocoaPen.
Coordinates are usually expressed as (x, y) tuples, but generally any
sequence of length 2 will do.
"""
from typing import Tuple
from fontTools.misc.loggingTools import LogMixin
__all__ = ["AbstractPen", "NullPen", "BasePen",
"decomposeSuperBezierSegment", "decomposeQuadraticSegment"]
class AbstractPen:
def moveTo(self, pt: Tuple[float, float]) -> None:
"""Begin a new sub path, set the current point to 'pt'. You must
end each sub path with a call to pen.closePath() or pen.endPath().
"""
raise NotImplementedError
def lineTo(self, pt: Tuple[float, float]) -> None:
"""Draw a straight line from the current point to 'pt'."""
raise NotImplementedError
def curveTo(self, *points: Tuple[float, float]) -> None:
"""Draw a cubic bezier with an arbitrary number of control points.
The last point specified is on-curve, all others are off-curve
(control) points. If the number of control points is > 2, the
segment is split into multiple bezier segments. This works
like this:
Let n be the number of control points (which is the number of
arguments to this call minus 1). If n==2, a plain vanilla cubic
bezier is drawn. If n==1, we fall back to a quadratic segment and
if n==0 we draw a straight line. It gets interesting when n>2:
n-1 PostScript-style cubic segments will be drawn as if it were
one curve. See decomposeSuperBezierSegment().
The conversion algorithm used for n>2 is inspired by NURB
splines, and is conceptually equivalent to the TrueType "implied
points" principle. See also decomposeQuadraticSegment().
"""
raise NotImplementedError
def qCurveTo(self, *points: Tuple[float, float]) -> None:
"""Draw a whole string of quadratic curve segments.
The last point specified is on-curve, all others are off-curve
points.
This method implements TrueType-style curves, breaking up curves
using 'implied points': between each two consequtive off-curve points,
there is one implied point exactly in the middle between them. See
also decomposeQuadraticSegment().
The last argument (normally the on-curve point) may be None.
This is to support contours that have NO on-curve points (a rarely
seen feature of TrueType outlines).
"""
raise NotImplementedError
def closePath(self) -> None:
"""Close the current sub path. You must call either pen.closePath()
or pen.endPath() after each sub path.
"""
pass
def endPath(self) -> None:
"""End the current sub path, but don't close it. You must call
either pen.closePath() or pen.endPath() after each sub path.
"""
pass
def addComponent(
self,
glyphName: str,
transformation: Tuple[float, float, float, float, float, float]
) -> None:
"""Add a sub glyph. The 'transformation' argument must be a 6-tuple
containing an affine transformation, or a Transform object from the
fontTools.misc.transform module. More precisely: it should be a
sequence containing 6 numbers.
"""
raise NotImplementedError
class NullPen(AbstractPen):
"""A pen that does nothing.
"""
def moveTo(self, pt):
pass
def lineTo(self, pt):
pass
def curveTo(self, *points):
pass
def qCurveTo(self, *points):
pass
def closePath(self):
pass
def endPath(self):
pass
def addComponent(self, glyphName, transformation):
pass
class LoggingPen(LogMixin, AbstractPen):
"""A pen with a `log` property (see fontTools.misc.loggingTools.LogMixin)
"""
pass
class MissingComponentError(KeyError):
"""Indicates a component pointing to a non-existent glyph in the glyphset."""
class DecomposingPen(LoggingPen):
""" Implements a 'addComponent' method that decomposes components
(i.e. draws them onto self as simple contours).
It can also be used as a mixin class (e.g. see ContourRecordingPen).
You must override moveTo, lineTo, curveTo and qCurveTo. You may
additionally override closePath, endPath and addComponent.
By default a warning message is logged when a base glyph is missing;
set the class variable ``skipMissingComponents`` to False if you want
to raise a :class:`MissingComponentError` exception.
"""
skipMissingComponents = True
def __init__(self, glyphSet):
""" Takes a single 'glyphSet' argument (dict), in which the glyphs
that are referenced as components are looked up by their name.
"""
super(DecomposingPen, self).__init__()
self.glyphSet = glyphSet
def addComponent(self, glyphName, transformation):
""" Transform the points of the base glyph and draw it onto self.
"""
from fontTools.pens.transformPen import TransformPen
try:
glyph = self.glyphSet[glyphName]
except KeyError:
if not self.skipMissingComponents:
raise MissingComponentError(glyphName)
self.log.warning(
"glyph '%s' is missing from glyphSet; skipped" % glyphName)
else:
tPen = TransformPen(self, transformation)
glyph.draw(tPen)
class BasePen(DecomposingPen):
"""Base class for drawing pens. You must override _moveTo, _lineTo and
_curveToOne. You may additionally override _closePath, _endPath,
addComponent and/or _qCurveToOne. You should not override any other
methods.
"""
def __init__(self, glyphSet=None):
super(BasePen, self).__init__(glyphSet)
self.__currentPoint = None
# must override
def _moveTo(self, pt):
raise NotImplementedError
def _lineTo(self, pt):
raise NotImplementedError
def _curveToOne(self, pt1, pt2, pt3):
raise NotImplementedError
# may override
def _closePath(self):
pass
def _endPath(self):
pass
def _qCurveToOne(self, pt1, pt2):
"""This method implements the basic quadratic curve type. The
default implementation delegates the work to the cubic curve
function. Optionally override with a native implementation.
"""
pt0x, pt0y = self.__currentPoint
pt1x, pt1y = pt1
pt2x, pt2y = pt2
mid1x = pt0x + 0.66666666666666667 * (pt1x - pt0x)
mid1y = pt0y + 0.66666666666666667 * (pt1y - pt0y)
mid2x = pt2x + 0.66666666666666667 * (pt1x - pt2x)
mid2y = pt2y + 0.66666666666666667 * (pt1y - pt2y)
self._curveToOne((mid1x, mid1y), (mid2x, mid2y), pt2)
# don't override
def _getCurrentPoint(self):
"""Return the current point. This is not part of the public
interface, yet is useful for subclasses.
"""
return self.__currentPoint
def closePath(self):
self._closePath()
self.__currentPoint = None
def endPath(self):
self._endPath()
self.__currentPoint = None
def moveTo(self, pt):
self._moveTo(pt)
self.__currentPoint = pt
def lineTo(self, pt):
self._lineTo(pt)
self.__currentPoint = pt
def curveTo(self, *points):
n = len(points) - 1 # 'n' is the number of control points
assert n >= 0
if n == 2:
# The common case, we have exactly two BCP's, so this is a standard
# cubic bezier. Even though decomposeSuperBezierSegment() handles
# this case just fine, we special-case it anyway since it's so
# common.
self._curveToOne(*points)
self.__currentPoint = points[-1]
elif n > 2:
# n is the number of control points; split curve into n-1 cubic
# bezier segments. The algorithm used here is inspired by NURB
# splines and the TrueType "implied point" principle, and ensures
# the smoothest possible connection between two curve segments,
# with no disruption in the curvature. It is practical since it
# allows one to construct multiple bezier segments with a much
# smaller amount of points.
_curveToOne = self._curveToOne
for pt1, pt2, pt3 in decomposeSuperBezierSegment(points):
_curveToOne(pt1, pt2, pt3)
self.__currentPoint = pt3
elif n == 1:
self.qCurveTo(*points)
elif n == 0:
self.lineTo(points[0])
else:
raise AssertionError("can't get there from here")
def qCurveTo(self, *points):
n = len(points) - 1 # 'n' is the number of control points
assert n >= 0
if points[-1] is None:
# Special case for TrueType quadratics: it is possible to
# define a contour with NO on-curve points. BasePen supports
# this by allowing the final argument (the expected on-curve
# point) to be None. We simulate the feature by making the implied
# on-curve point between the last and the first off-curve points
# explicit.
x, y = points[-2] # last off-curve point
nx, ny = points[0] # first off-curve point
impliedStartPoint = (0.5 * (x + nx), 0.5 * (y + ny))
self.__currentPoint = impliedStartPoint
self._moveTo(impliedStartPoint)
points = points[:-1] + (impliedStartPoint,)
if n > 0:
# Split the string of points into discrete quadratic curve
# segments. Between any two consecutive off-curve points
# there's an implied on-curve point exactly in the middle.
# This is where the segment splits.
_qCurveToOne = self._qCurveToOne
for pt1, pt2 in decomposeQuadraticSegment(points):
_qCurveToOne(pt1, pt2)
self.__currentPoint = pt2
else:
self.lineTo(points[0])
def decomposeSuperBezierSegment(points):
"""Split the SuperBezier described by 'points' into a list of regular
bezier segments. The 'points' argument must be a sequence with length
3 or greater, containing (x, y) coordinates. The last point is the
destination on-curve point, the rest of the points are off-curve points.
The start point should not be supplied.
This function returns a list of (pt1, pt2, pt3) tuples, which each
specify a regular curveto-style bezier segment.
"""
n = len(points) - 1
assert n > 1
bezierSegments = []
pt1, pt2, pt3 = points[0], None, None
for i in range(2, n+1):
# calculate points in between control points.
nDivisions = min(i, 3, n-i+2)
for j in range(1, nDivisions):
factor = j / nDivisions
temp1 = points[i-1]
temp2 = points[i-2]
temp = (temp2[0] + factor * (temp1[0] - temp2[0]),
temp2[1] + factor * (temp1[1] - temp2[1]))
if pt2 is None:
pt2 = temp
else:
pt3 = (0.5 * (pt2[0] + temp[0]),
0.5 * (pt2[1] + temp[1]))
bezierSegments.append((pt1, pt2, pt3))
pt1, pt2, pt3 = temp, None, None
bezierSegments.append((pt1, points[-2], points[-1]))
return bezierSegments
def decomposeQuadraticSegment(points):
"""Split the quadratic curve segment described by 'points' into a list
of "atomic" quadratic segments. The 'points' argument must be a sequence
with length 2 or greater, containing (x, y) coordinates. The last point
is the destination on-curve point, the rest of the points are off-curve
points. The start point should not be supplied.
This function returns a list of (pt1, pt2) tuples, which each specify a
plain quadratic bezier segment.
"""
n = len(points) - 1
assert n > 0
quadSegments = []
for i in range(n - 1):
x, y = points[i]
nx, ny = points[i+1]
impliedPt = (0.5 * (x + nx), 0.5 * (y + ny))
quadSegments.append((points[i], impliedPt))
quadSegments.append((points[-2], points[-1]))
return quadSegments
class _TestPen(BasePen):
"""Test class that prints PostScript to stdout."""
def _moveTo(self, pt):
print("%s %s moveto" % (pt[0], pt[1]))
def _lineTo(self, pt):
print("%s %s lineto" % (pt[0], pt[1]))
def _curveToOne(self, bcp1, bcp2, pt):
print("%s %s %s %s %s %s curveto" % (bcp1[0], bcp1[1],
bcp2[0], bcp2[1], pt[0], pt[1]))
def _closePath(self):
print("closepath")
if __name__ == "__main__":
pen = _TestPen(None)
pen.moveTo((0, 0))
pen.lineTo((0, 100))
pen.curveTo((50, 75), (60, 50), (50, 25), (0, 0))
pen.closePath()
pen = _TestPen(None)
# testing the "no on-curve point" scenario
pen.qCurveTo((0, 0), (0, 100), (100, 100), (100, 0), None)
pen.closePath()
| apache-2.0 | 4,968,359,647,964,168,000 | 31.490099 | 78 | 0.710879 | false |
almarklein/bokeh | bokeh/server/views/data.py | 1 | 2797 |
import json
from flask import jsonify, request
from six import iteritems
from bokeh import protocol
from bokeh.models import Range1d
from .backbone import init_bokeh
from ..app import bokeh_app
from ..crossdomain import crossdomain
from ..serverbb import prune
from ..views import make_json
@bokeh_app.route("/bokeh/data/<username>", methods=['GET', 'OPTIONS'])
@crossdomain(origin="*", headers=['BOKEH-API-KEY', 'Continuum-Clientid'])
def list_sources(username):
bokehuser = bokeh_app.authentication.current_user()
request_username = bokehuser.username
sources = bokeh_app.datamanager.list_data_sources(request_username,
username)
return jsonify(sources=sources)
def _make_range(r):
"""Create a range from the start/end values passed.
This function is required because some BokehJS Range objects
have ids but some don't and some have docs but some don't...
so this is sort of a #Hack....
This may be removed when a better plot_state mechanism is created.
"""
return Range1d(start=r['start'], end=r['end'])
@bokeh_app.route("/bokeh/data/<username>/<docid>/<datasourceid>", methods=['GET', 'OPTIONS'])
@crossdomain(origin="*", headers=['BOKEH-API-KEY', 'Continuum-Clientid'])
def get_data(username, docid, datasourceid):
bokehuser = bokeh_app.authentication.current_user()
request_username = bokehuser.username
# handle docid later...
clientdoc = bokeh_app.backbone_storage.get_document(docid)
prune(clientdoc)
init_bokeh(clientdoc)
serverdatasource = clientdoc._models[datasourceid]
parameters = json.loads(request.values.get('resample_parameters'))
plot_state = json.loads(request.values.get('plot_state'))
render_state = json.loads(request.values.get('render_state')) if 'render_state' in request.values else None
# TODO: Desserializing directly to ranges....awk-ward.
# There is probably a better way via the properties system that detects type...probably...
# Possibly pass the whole plot_view object through instead of just the fragments we get with this mechanism
plot_state=dict([(k, _make_range(r)) for k,r in iteritems(plot_state)])
result = bokeh_app.datamanager.get_data(
request_username,
serverdatasource,
parameters,
plot_state,
render_state)
json_result = make_json(protocol.serialize_json(result))
return json_result
@bokeh_app.route("/bokeh/data/upload/<username>/<name>", methods=['POST'])
def upload(username, name):
bokehuser = bokeh_app.authentication.current_user()
request_username = bokehuser.username
f = request.files['file']
url = bokeh_app.datamanager.write(request_username, name, f)
return url
| bsd-3-clause | -7,211,432,925,990,131,000 | 37.847222 | 111 | 0.6936 | false |
dpinney/omf | omf/scratch/MPUPV/solarAdd.py | 1 | 1552 | from omf import feeder
import omf.solvers.gridlabd
feed = feeder.parse('GC-12.47-1.glm')
maxKey = feeder.getMaxKey(feed)
print(feed[1])
feed[maxKey + 1] = {
'object': 'node', 'name': 'test_solar_node', 'phases': 'ABCN',
'nominal_voltage': '7200'
}
feed[maxKey + 2] = {
'object': 'underground_line', 'name': 'test_solar_line', 'phases': 'ABCN',
'from': 'test_solar_node', 'to': 'GC-12-47-1_node_26', 'length': '100',
'configuration': 'line_configuration:6'
}
feed[maxKey + 3] = {
'object': 'meter', 'name': 'test_solar_meter', 'parent': 'test_solar_node',
'phases': 'ABCN', 'nominal_voltage': '480'
}
feed[maxKey + 4] = {
'object': 'inverter', 'name': 'test_solar_inverter', 'parent': 'test_solar_meter',
'phases': 'AS', 'inverter_type': 'PWM', 'power_factor': '1.0',
'generator_status': 'ONLINE', 'generator_mode': 'CONSTANT_PF'
}
feed[maxKey + 5] = {
'object': 'solar', 'name': 'test_solar', 'parent': 'test_solar_inverter', 'area': '1000000 sf',
'generator_status': 'ONLINE', 'efficiency': '0.2', 'generator_mode': 'SUPPLY_DRIVEN',
'panel_type': 'SINGLE_CRYSTAL_SILICON'
}
feed[maxKey + 6] = {
'object': 'recorder', 'parent': 'test_solar_meter', 'property': 'voltage_A.real,voltage_A.imag,voltage_B.real,voltage_B.imag,voltage_C.real,voltage_C.imag',
'file': 'GC-addSolar-voltages.csv', 'interval': '60', 'limit': '1440'
}
omf.solvers.gridlabd.runInFilesystem(feed, keepFiles = True, workDir = '.', glmName = 'GC-solarAdd.glm')
'''
output = open('GC-solarAdd.glm', 'w')
output.write(feeder.write(feed))
output.close()
''' | gpl-2.0 | -2,700,435,537,165,388,300 | 35.116279 | 158 | 0.642397 | false |
mricharleon/HatosGanaderos | userena/urls.py | 1 | 4539 | from django.conf.urls import *
from django.contrib.auth import views as auth_views
from userena import views as userena_views
from userena import settings as userena_settings
from userena.compat import auth_views_compat_quirks, password_reset_uid_kwarg
def merged_dict(dict_a, dict_b):
"""Merges two dicts and returns output. It's purpose is to ease use of
``auth_views_compat_quirks``
"""
dict_a.update(dict_b)
return dict_a
urlpatterns = patterns('',
# Signup, signin and signout
url(r'^add_profile/$',
userena_views.add_profile,
name='add_profile'),
url(r'^signup/$',
userena_views.signup,
name='userena_signup'),
url(r'^signin/$',
userena_views.signin,
name='userena_signin'),
url(r'^signout/$',
userena_views.signout,
name='userena_signout'),
# Reset password
url(r'^password/reset/$',
auth_views.password_reset,
merged_dict({'template_name': 'userena/password_reset_form.html',
'email_template_name': 'userena/emails/password_reset_message.txt',
'extra_context': {'without_usernames': userena_settings.USERENA_WITHOUT_USERNAMES}
}, auth_views_compat_quirks['userena_password_reset']),
name='userena_password_reset'),
url(r'^password/reset/done/$',
auth_views.password_reset_done,
{'template_name': 'userena/password_reset_done.html',},
name='userena_password_reset_done'),
url(r'^password/reset/confirm/(?P<%s>[0-9A-Za-z]+)-(?P<token>.+)/$' % password_reset_uid_kwarg,
auth_views.password_reset_confirm,
merged_dict({'template_name': 'userena/password_reset_confirm_form.html',
}, auth_views_compat_quirks['userena_password_reset_confirm']),
name='userena_password_reset_confirm'),
url(r'^password/reset/confirm/complete/$',
auth_views.password_reset_complete,
{'template_name': 'userena/password_reset_complete.html'},
name='userena_password_reset_complete'),
# Signup
url(r'^(?P<username>[\.\w-]+)/signup/complete/$',
userena_views.direct_to_user_template,
{'template_name': 'userena/signup_complete.html',
'extra_context': {'userena_activation_required': userena_settings.USERENA_ACTIVATION_REQUIRED,
'userena_activation_days': userena_settings.USERENA_ACTIVATION_DAYS}},
name='userena_signup_complete'),
# Activate
url(r'^activate/(?P<activation_key>\w+)/$',
userena_views.activate,
name='userena_activate'),
# Retry activation
url(r'^activate/retry/(?P<activation_key>\w+)/$',
userena_views.activate_retry,
name='userena_activate_retry'),
# Change email and confirm it
url(r'^(?P<username>[\.\w-]+)/email/$',
userena_views.email_change,
name='userena_email_change'),
url(r'^(?P<username>[\.\w-]+)/email/complete/$',
userena_views.direct_to_user_template,
{'template_name': 'userena/email_change_complete.html'},
name='userena_email_change_complete'),
url(r'^(?P<username>[\.\w-]+)/confirm-email/complete/$',
userena_views.direct_to_user_template,
{'template_name': 'userena/email_confirm_complete.html'},
name='userena_email_confirm_complete'),
url(r'^confirm-email/(?P<confirmation_key>\w+)/$',
userena_views.email_confirm,
name='userena_email_confirm'),
# Disabled account
url(r'^(?P<username>[\.\w-]+)/disabled/$',
userena_views.disabled_account,
{'template_name': 'userena/disabled.html'},
name='userena_disabled'),
# Change password
url(r'^(?P<username>[\.\w-]+)/password/$',
userena_views.password_change,
name='userena_password_change'),
url(r'^(?P<username>[\.\w-]+)/password/complete/$',
userena_views.direct_to_user_template,
{'template_name': 'userena/password_complete.html'},
name='userena_password_change_complete'),
# Edit profile
url(r'^(?P<username>[\.\w-]+)/edit/$',
userena_views.profile_edit,
name='userena_profile_edit'),
# View profiles
url(r'^(?P<username>(?!signout|signup|signin)[\.\w-]+)/$',
userena_views.profile_detail,
name='userena_profile_detail'),
url(r'^page/(?P<page>[0-9]+)/$',
userena_views.ProfileListView.as_view(),
name='userena_profile_list_paginated'),
url(r'^$',
userena_views.ProfileListView.as_view(),
name='userena_profile_list'),
)
| gpl-2.0 | -6,829,509,290,407,092,000 | 37.142857 | 102 | 0.625028 | false |
tianhao64/vsphere-automation-sdk-python | samples/vsphere/vcenter/vm/main.py | 1 | 5800 | """
* *******************************************************
* Copyright (c) VMware, Inc. 2016-2018. All Rights Reserved.
* SPDX-License-Identifier: MIT
* *******************************************************
*
* DISCLAIMER. THIS PROGRAM IS PROVIDED TO YOU "AS IS" WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, WHETHER ORAL OR WRITTEN,
* EXPRESS OR IMPLIED. THE AUTHOR SPECIFICALLY DISCLAIMS ANY IMPLIED
* WARRANTIES OR CONDITIONS OF MERCHANTABILITY, SATISFACTORY QUALITY,
* NON-INFRINGEMENT AND FITNESS FOR A PARTICULAR PURPOSE.
"""
__author__ = 'VMware, Inc.'
__vcenter_version__ = '6.5+'
from com.vmware.vcenter_client import Network
import samples.vsphere.vcenter.helper.network_helper
import samples.vsphere.vcenter.vm.hardware.main
import samples.vsphere.vcenter.vm.placement
import samples.vsphere.vcenter.vm.power
from samples.vsphere.common.sample_util import pp
from samples.vsphere.vcenter.setup import testbed_setup
from samples.vsphere.vcenter.setup import testbed
from samples.vsphere.vcenter.vm.create.create_default_vm import CreateDefaultVM
from samples.vsphere.vcenter.vm.create.create_basic_vm import CreateBasicVM
from samples.vsphere.vcenter.vm.create.create_exhaustive_vm import \
CreateExhaustiveVM
class VMSetup(object):
def __init__(self, context=None):
self.context = context
self.basic_vm = None
self.default_vm = None
self.exhaustive_vm = None
def setup(self, context):
print('Setup Samples Started')
self.context = context
###########################################################################
# Getting a PlacementSpec
###########################################################################
placement_spec = samples.vsphere.vcenter.vm.placement.get_placement_spec_for_resource_pool(context)
print('=' * 79)
print('= Resource selection')
print('=' * 79)
print('placement_spec={}'.format(pp(placement_spec)))
###########################################################################
# Getting a Network
# Choose one of the following ways to get the PlacementSpec
# 1. STANDARD_PORTGROUP on DATACENTER2
# 2. DISTRIBUTED_PORTGROUP on DATACENTER2
###########################################################################
standard_network = samples.vsphere.vcenter.helper \
.network_helper.get_network_backing(
context.client,
context.testbed.config['STDPORTGROUP_NAME'],
context.testbed.config['VM_DATACENTER_NAME'],
Network.Type.STANDARD_PORTGROUP)
print('standard_network={}'.format(standard_network))
distributed_network = samples.vsphere.vcenter.helper \
.network_helper.get_network_backing(
context.client,
context.testbed.config['VDPORTGROUP1_NAME'],
context.testbed.config['VM_DATACENTER_NAME'],
Network.Type.DISTRIBUTED_PORTGROUP)
print('distributed_network={}'.format(distributed_network))
print('=' * 79)
self.default_vm = CreateDefaultVM(context.client,
placement_spec)
self.basic_vm = CreateBasicVM(context.client, placement_spec)
self.exhaustive_vm = CreateExhaustiveVM(context.client,
placement_spec,
standard_network,
distributed_network)
print('Setup Samples Complete')
def cleanup(self):
print('Cleanup Samples Started')
CreateDefaultVM(self.context.client).cleanup()
CreateBasicVM(self.context.client).cleanup()
CreateExhaustiveVM(self.context.client).cleanup()
print('Cleanup Samples Complete\n')
def validate(self):
print('Validating and Detecting Resources in vcenter.vm Samples')
r = testbed_setup.validate(self.context)
if r:
print('==> Samples Setup validated')
return True
else:
print('==> Samples Setup has errors')
return False
def run(self):
# Clean up in case of past failures
self.cleanup()
# Check that sample is ready to run
if self.context.option['DO_SAMPLES']:
if not self.validate():
exit(0)
###########################################################################
# Create VM samples
#
# Choose one of the following ways to create the VM
# 1. Default
# 2. Basic (2 disks, 1 nic)
# 3. Exhaustive (3 disks, 2 nics, 2 vcpu, 2 GB memory, boot=BIOS, 1 cdrom,
# 1 serial port, 1 parallel port, 1 floppy,
# boot_devices= [CDROM, DISK, ETHERNET])
###########################################################################
self.default_vm.run()
self.basic_vm.run()
self.exhaustive_vm.run()
###########################################################################
# Incremental device CRUDE + connect/disconnect samples
#
###########################################################################
if self.context.option['DO_SAMPLES_INCREMENTAL']:
samples.vsphere.vcenter.vm.hardware.main.setup(self.context)
samples.vsphere.vcenter.vm.hardware.main.validate(self.context)
samples.vsphere.vcenter.vm.hardware.main.run()
if self.context.option['DO_SAMPLES_CLEANUP']:
samples.vsphere.vcenter.vm.hardware.main.cleanup()
# Sample cleanup
if self.context.option['DO_SAMPLES_CLEANUP']:
self.cleanup()
| mit | -7,201,189,507,724,110,000 | 40.134752 | 107 | 0.545 | false |
Morijarti/dota-2-heatmap | heroes.py | 1 | 15331 | __author__ = 'yanbo'
HEROES = [{'dt_name': 'DT_DOTA_Unit_Hero_AntiMage',
'id': 2,
'localized_name': 'Anti-Mage',
'name': 'npc_dota_hero_antimage'},
{'dt_name': 'DT_DOTA_Unit_Hero_Axe',
'id': 3,
'localized_name': 'Axe',
'name': 'npc_dota_hero_axe'},
{'dt_name': 'DT_DOTA_Unit_Hero_Bane',
'id': 4,
'localized_name': 'Bane',
'name': 'npc_dota_hero_bane'},
{'dt_name': 'DT_DOTA_Unit_Hero_Bloodseeker',
'id': 5,
'localized_name': 'Bloodseeker',
'name': 'npc_dota_hero_bloodseeker'},
{'dt_name': 'DT_DOTA_Unit_Hero_CrystalMaiden',
'id': 6,
'localized_name': 'Crystal Maiden',
'name': 'npc_dota_hero_crystal_maiden'},
{'dt_name': 'DT_DOTA_Unit_Hero_DrowRanger',
'id': 7,
'localized_name': 'Drow Ranger',
'name': 'npc_dota_hero_drow_ranger'},
{'dt_name': 'DT_DOTA_Unit_Hero_Earthshaker',
'id': 8,
'localized_name': 'Earthshaker',
'name': 'npc_dota_hero_earthshaker'},
{'dt_name': 'DT_DOTA_Unit_Hero_Juggernaut',
'id': 9,
'localized_name': 'Juggernaut',
'name': 'npc_dota_hero_juggernaut'},
{'dt_name': 'DT_DOTA_Unit_Hero_Mirana',
'id': 10,
'localized_name': 'Mirana',
'name': 'npc_dota_hero_mirana'},
{'dt_name': 'DT_DOTA_Unit_Hero_Nevermore',
'id': 11,
'localized_name': 'Shadow Fiend',
'name': 'npc_dota_hero_nevermore'},
{'dt_name': 'DT_DOTA_Unit_Hero_Morphling',
'id': 12,
'localized_name': 'Morphling',
'name': 'npc_dota_hero_morphling'},
{'dt_name': 'DT_DOTA_Unit_Hero_PhantomLancer',
'id': 13,
'localized_name': 'Phantom Lancer',
'name': 'npc_dota_hero_phantom_lancer'},
{'dt_name': 'DT_DOTA_Unit_Hero_Puck',
'id': 14,
'localized_name': 'Puck',
'name': 'npc_dota_hero_puck'},
{'dt_name': 'DT_DOTA_Unit_Hero_Pudge',
'id': 15,
'localized_name': 'Pudge',
'name': 'npc_dota_hero_pudge'},
{'dt_name': 'DT_DOTA_Unit_Hero_Razor',
'id': 16,
'localized_name': 'Razor',
'name': 'npc_dota_hero_razor'},
{'dt_name': 'DT_DOTA_Unit_Hero_SandKing',
'id': 17,
'localized_name': 'Sand King',
'name': 'npc_dota_hero_sand_king'},
{'dt_name': 'DT_DOTA_Unit_Hero_StormSpirit',
'id': 18,
'localized_name': 'Storm Spirit',
'name': 'npc_dota_hero_storm_spirit'},
{'dt_name': 'DT_DOTA_Unit_Hero_Sven',
'id': 19,
'localized_name': 'Sven',
'name': 'npc_dota_hero_sven'},
{'dt_name': 'DT_DOTA_Unit_Hero_Tiny',
'id': 20,
'localized_name': 'Tiny',
'name': 'npc_dota_hero_tiny'},
{'dt_name': 'DT_DOTA_Unit_Hero_VengefulSpirit',
'id': 21,
'localized_name': 'Vengeful Spirit',
'name': 'npc_dota_hero_vengefulspirit'},
{'dt_name': 'DT_DOTA_Unit_Hero_Windrunner',
'id': 22,
'localized_name': 'Windrunner',
'name': 'npc_dota_hero_windrunner'},
{'dt_name': 'DT_DOTA_Unit_Hero_Zuus',
'id': 23,
'localized_name': 'Zeus',
'name': 'npc_dota_hero_zuus'},
{'dt_name': 'DT_DOTA_Unit_Hero_Kunkka',
'id': 24,
'localized_name': 'Kunkka',
'name': 'npc_dota_hero_kunkka'},
{'dt_name': 'DT_DOTA_Unit_Hero_Lina',
'id': 25,
'localized_name': 'Lina',
'name': 'npc_dota_hero_lina'},
{'dt_name': 'DT_DOTA_Unit_Hero_Lich',
'id': 26,
'localized_name': 'Lich',
'name': 'npc_dota_hero_lich'},
{'dt_name': 'DT_DOTA_Unit_Hero_Lion',
'id': 27,
'localized_name': 'Lion',
'name': 'npc_dota_hero_lion'},
{'dt_name': 'DT_DOTA_Unit_Hero_ShadowShaman',
'id': 28,
'localized_name': 'Shadow Shaman',
'name': 'npc_dota_hero_shadow_shaman'},
{'dt_name': 'DT_DOTA_Unit_Hero_Slardar',
'id': 29,
'localized_name': 'Slardar',
'name': 'npc_dota_hero_slardar'},
{'dt_name': 'DT_DOTA_Unit_Hero_Tidehunter',
'id': 30,
'localized_name': 'Tidehunter',
'name': 'npc_dota_hero_tidehunter'},
{'dt_name': 'DT_DOTA_Unit_Hero_WitchDoctor',
'id': 31,
'localized_name': 'Witch Doctor',
'name': 'npc_dota_hero_witch_doctor'},
{'dt_name': 'DT_DOTA_Unit_Hero_Riki',
'id': 32,
'localized_name': 'Riki',
'name': 'npc_dota_hero_riki'},
{'dt_name': 'DT_DOTA_Unit_Hero_Enigma',
'id': 33,
'localized_name': 'Enigma',
'name': 'npc_dota_hero_enigma'},
{'dt_name': 'DT_DOTA_Unit_Hero_Tinker',
'id': 34,
'localized_name': 'Tinker',
'name': 'npc_dota_hero_tinker'},
{'dt_name': 'DT_DOTA_Unit_Hero_Sniper',
'id': 35,
'localized_name': 'Sniper',
'name': 'npc_dota_hero_sniper'},
{'dt_name': 'DT_DOTA_Unit_Hero_Necrolyte',
'id': 36,
'localized_name': 'Necrolyte',
'name': 'npc_dota_hero_necrolyte'},
{'dt_name': 'DT_DOTA_Unit_Hero_Warlock',
'id': 37,
'localized_name': 'Warlock',
'name': 'npc_dota_hero_warlock'},
{'dt_name': 'DT_DOTA_Unit_Hero_Beastmaster',
'id': 38,
'localized_name': 'Beastmaster',
'name': 'npc_dota_hero_beastmaster'},
{'dt_name': 'DT_DOTA_Unit_Hero_QueenOfPain',
'id': 39,
'localized_name': 'Queen of Pain',
'name': 'npc_dota_hero_queenofpain'},
{'dt_name': 'DT_DOTA_Unit_Hero_Venomancer',
'id': 40,
'localized_name': 'Venomancer',
'name': 'npc_dota_hero_venomancer'},
{'dt_name': 'DT_DOTA_Unit_Hero_FacelessVoid',
'id': 41,
'localized_name': 'Faceless Void',
'name': 'npc_dota_hero_faceless_void'},
{'dt_name': 'DT_DOTA_Unit_Hero_SkeletonKing',
'id': 42,
'localized_name': 'Skeleton King',
'name': 'npc_dota_hero_skeleton_king'},
{'dt_name': 'DT_DOTA_Unit_Hero_DeathProphet',
'id': 43,
'localized_name': 'Death Prophet',
'name': 'npc_dota_hero_death_prophet'},
{'dt_name': 'DT_DOTA_Unit_Hero_PhantomAssassin',
'id': 44,
'localized_name': 'Phantom Assassin',
'name': 'npc_dota_hero_phantom_assassin'},
{'dt_name': 'DT_DOTA_Unit_Hero_Pugna',
'id': 45,
'localized_name': 'Pugna',
'name': 'npc_dota_hero_pugna'},
{'dt_name': 'DT_DOTA_Unit_Hero_TemplarAssassin',
'id': 46,
'localized_name': 'Templar Assassin',
'name': 'npc_dota_hero_templar_assassin'},
{'dt_name': 'DT_DOTA_Unit_Hero_Viper',
'id': 47,
'localized_name': 'Viper',
'name': 'npc_dota_hero_viper'},
{'dt_name': 'DT_DOTA_Unit_Hero_Luna',
'id': 48,
'localized_name': 'Luna',
'name': 'npc_dota_hero_luna'},
{'dt_name': 'DT_DOTA_Unit_Hero_DragonKnight',
'id': 49,
'localized_name': 'Dragon Knight',
'name': 'npc_dota_hero_dragon_knight'},
{'dt_name': 'DT_DOTA_Unit_Hero_Dazzle',
'id': 50,
'localized_name': 'Dazzle',
'name': 'npc_dota_hero_dazzle'},
{'dt_name': 'DT_DOTA_Unit_Hero_Rattletrap',
'id': 51,
'localized_name': 'Clockwerk',
'name': 'npc_dota_hero_rattletrap'},
{'dt_name': 'DT_DOTA_Unit_Hero_Leshrac',
'id': 52,
'localized_name': 'Leshrac',
'name': 'npc_dota_hero_leshrac'},
{'dt_name': 'DT_DOTA_Unit_Hero_Furion',
'id': 53,
'localized_name': "Nature's Prophet",
'name': 'npc_dota_hero_furion'},
{'dt_name': 'DT_DOTA_Unit_Hero_Life_Stealer',
'id': 54,
'localized_name': 'Lifestealer',
'name': 'npc_dota_hero_life_stealer'},
{'dt_name': 'DT_DOTA_Unit_Hero_DarkSeer',
'id': 55,
'localized_name': 'Dark Seer',
'name': 'npc_dota_hero_dark_seer'},
{'dt_name': 'DT_DOTA_Unit_Hero_Clinkz',
'id': 56,
'localized_name': 'Clinkz',
'name': 'npc_dota_hero_clinkz'},
{'dt_name': 'DT_DOTA_Unit_Hero_Omniknight',
'id': 57,
'localized_name': 'Omniknight',
'name': 'npc_dota_hero_omniknight'},
{'dt_name': 'DT_DOTA_Unit_Hero_Enchantress',
'id': 58,
'localized_name': 'Enchantress',
'name': 'npc_dota_hero_enchantress'},
{'dt_name': 'DT_DOTA_Unit_Hero_Huskar',
'id': 59,
'localized_name': 'Huskar',
'name': 'npc_dota_hero_huskar'},
{'dt_name': 'DT_DOTA_Unit_Hero_NightStalker',
'id': 60,
'localized_name': 'Night Stalker',
'name': 'npc_dota_hero_night_stalker'},
{'dt_name': 'DT_DOTA_Unit_Hero_Broodmother',
'id': 61,
'localized_name': 'Broodmother',
'name': 'npc_dota_hero_broodmother'},
{'dt_name': 'DT_DOTA_Unit_Hero_BountyHunter',
'id': 62,
'localized_name': 'Bounty Hunter',
'name': 'npc_dota_hero_bounty_hunter'},
{'dt_name': 'DT_DOTA_Unit_Hero_Weaver',
'id': 63,
'localized_name': 'Weaver',
'name': 'npc_dota_hero_weaver'},
{'dt_name': 'DT_DOTA_Unit_Hero_Jakiro',
'id': 64,
'localized_name': 'Jakiro',
'name': 'npc_dota_hero_jakiro'},
{'dt_name': 'DT_DOTA_Unit_Hero_Batrider',
'id': 65,
'localized_name': 'Batrider',
'name': 'npc_dota_hero_batrider'},
{'dt_name': 'DT_DOTA_Unit_Hero_Chen',
'id': 66,
'localized_name': 'Chen',
'name': 'npc_dota_hero_chen'},
{'dt_name': 'DT_DOTA_Unit_Hero_Spectre',
'id': 67,
'localized_name': 'Spectre',
'name': 'npc_dota_hero_spectre'},
{'dt_name': 'DT_DOTA_Unit_Hero_DoomBringer',
'id': 69,
'localized_name': 'Doom',
'name': 'npc_dota_hero_doom_bringer'},
{'dt_name': 'DT_DOTA_Unit_Hero_AncientApparition',
'id': 68,
'localized_name': 'Ancient Apparition',
'name': 'npc_dota_hero_ancient_apparition'},
{'dt_name': 'DT_DOTA_Unit_Hero_Ursa',
'id': 70,
'localized_name': 'Ursa',
'name': 'npc_dota_hero_ursa'},
{'dt_name': 'DT_DOTA_Unit_Hero_SpiritBreaker',
'id': 71,
'localized_name': 'Spirit Breaker',
'name': 'npc_dota_hero_spirit_breaker'},
{'dt_name': 'DT_DOTA_Unit_Hero_Gyrocopter',
'id': 72,
'localized_name': 'Gyrocopter',
'name': 'npc_dota_hero_gyrocopter'},
{'dt_name': 'DT_DOTA_Unit_Hero_Alchemist',
'id': 73,
'localized_name': 'Alchemist',
'name': 'npc_dota_hero_alchemist'},
{'dt_name': 'DT_DOTA_Unit_Hero_Invoker',
'id': 74,
'localized_name': 'Invoker',
'name': 'npc_dota_hero_invoker'},
{'dt_name': 'DT_DOTA_Unit_Hero_Silencer',
'id': 75,
'localized_name': 'Silencer',
'name': 'npc_dota_hero_silencer'},
{'dt_name': 'DT_DOTA_Unit_Hero_Obsidian_Destroyer',
'id': 76,
'localized_name': 'Outworld Devourer',
'name': 'npc_dota_hero_obsidian_destroyer'},
{'dt_name': 'DT_DOTA_Unit_Hero_Lycan',
'id': 77,
'localized_name': 'Lycanthrope',
'name': 'npc_dota_hero_lycan'},
{'dt_name': 'DT_DOTA_Unit_Hero_Brewmaster',
'id': 78,
'localized_name': 'Brewmaster',
'name': 'npc_dota_hero_brewmaster'},
{'dt_name': 'DT_DOTA_Unit_Hero_Shadow_Demon',
'id': 79,
'localized_name': 'Shadow Demon',
'name': 'npc_dota_hero_shadow_demon'},
{'dt_name': 'DT_DOTA_Unit_Hero_LoneDruid',
'id': 80,
'localized_name': 'Lone Druid',
'name': 'npc_dota_hero_lone_druid'},
{'dt_name': 'DT_DOTA_Unit_Hero_ChaosKnight',
'id': 81,
'localized_name': 'Chaos Knight',
'name': 'npc_dota_hero_chaos_knight'},
{'dt_name': 'DT_DOTA_Unit_Hero_Meepo',
'id': 82,
'localized_name': 'Meepo',
'name': 'npc_dota_hero_meepo'},
{'dt_name': 'DT_DOTA_Unit_Hero_Treant',
'id': 83,
'localized_name': 'Treant Protector',
'name': 'npc_dota_hero_treant'},
{'dt_name': 'DT_DOTA_Unit_Hero_Ogre_Magi',
'id': 84,
'localized_name': 'Ogre Magi',
'name': 'npc_dota_hero_ogre_magi'},
{'dt_name': 'DT_DOTA_Unit_Hero_Undying',
'id': 85,
'localized_name': 'Undying',
'name': 'npc_dota_hero_undying'},
{'dt_name': 'DT_DOTA_Unit_Hero_Rubick',
'id': 86,
'localized_name': 'Rubick',
'name': 'npc_dota_hero_rubick'},
{'dt_name': 'DT_DOTA_Unit_Hero_Disruptor',
'id': 87,
'localized_name': 'Disruptor',
'name': 'npc_dota_hero_disruptor'},
{'dt_name': 'DT_DOTA_Unit_Hero_Nyx_Assassin',
'id': 88,
'localized_name': 'Nyx Assassin',
'name': 'npc_dota_hero_nyx_assassin'},
{'dt_name': 'DT_DOTA_Unit_Hero_Naga_Siren',
'id': 89,
'localized_name': 'Naga Siren',
'name': 'npc_dota_hero_naga_siren'},
{'dt_name': 'DT_DOTA_Unit_Hero_KeeperOfTheLight',
'id': 90,
'localized_name': 'Keeper of the Light',
'name': 'npc_dota_hero_keeper_of_the_light'},
{'dt_name': 'DT_DOTA_Unit_Hero_Wisp',
'id': 91,
'localized_name': 'Io',
'name': 'npc_dota_hero_wisp'},
{'dt_name': 'DT_DOTA_Unit_Hero_Visage',
'id': 92,
'localized_name': 'Visage',
'name': 'npc_dota_hero_visage'},
{'dt_name': 'DT_DOTA_Unit_Hero_Slark',
'id': 93,
'localized_name': 'Slark',
'name': 'npc_dota_hero_slark'},
{'dt_name': 'DT_DOTA_Unit_Hero_Medusa',
'id': 94,
'localized_name': 'Medusa',
'name': 'npc_dota_hero_medusa'},
{'dt_name': 'DT_DOTA_Unit_Hero_TrollWarlord',
'id': 95,
'localized_name': 'Troll Warlord',
'name': 'npc_dota_hero_troll_warlord'},
{'dt_name': 'DT_DOTA_Unit_Hero_Centaur',
'id': 96,
'localized_name': 'Centaur Warrunner',
'name': 'npc_dota_hero_centaur'},
{'dt_name': 'DT_DOTA_Unit_Hero_Magnataur',
'id': 97,
'localized_name': 'Magnus',
'name': 'npc_dota_hero_magnataur'},
{'dt_name': 'DT_DOTA_Unit_Hero_Shredder',
'id': 98,
'localized_name': 'Timbersaw',
'name': 'npc_dota_hero_shredder'},
{'dt_name': 'DT_DOTA_Unit_Hero_Bristleback',
'id': 99,
'localized_name': 'Bristleback',
'name': 'npc_dota_hero_bristleback'},
{'dt_name': 'DT_DOTA_Unit_Hero_Tusk',
'id': 100,
'localized_name': 'Tusk',
'name': 'npc_dota_hero_tusk'},
{'dt_name': 'DT_DOTA_Unit_Hero_Skywrath_Mage',
'id': 101,
'localized_name': 'Skywrath Mage',
'name': 'npc_dota_hero_skywrath_mage'},
{'dt_name': 'DT_DOTA_Unit_Hero_Abaddon',
'id': 102,
'localized_name': 'Abaddon',
'name': 'npc_dota_hero_abaddon'},
{'dt_name': 'DT_DOTA_Unit_Hero_Elder_Titan',
'id': 103,
'localized_name': 'Elder Titan',
'name': 'npc_dota_hero_elder_titan'},
{'dt_name': 'DT_DOTA_Unit_Hero_Legion_Commander',
'id': 104,
'localized_name': 'Legion Commander',
'name': 'npc_dota_hero_legion_commander'},
{'dt_name': 'DT_DOTA_Unit_Hero_EmberSpirit',
'id': 106,
'localized_name': 'Ember Spirit',
'name': 'npc_dota_hero_ember_spirit'},
{'dt_name': 'DT_DOTA_Unit_Hero_EarthSpirit',
'id': 107,
'localized_name': 'Earth Spirit',
'name': 'npc_dota_hero_earth_spirit'},
{'dt_name': 'DT_DOTA_Unit_Hero_AbyssalUnderlord',
'id': 108,
'localized_name': 'Abyssal Underlord',
'name': 'npc_dota_hero_abyssal_underlord'},
{'dt_name': 'DT_DOTA_Unit_Hero_Terrorblade',
'id': 109,
'localized_name': 'Terrorblade',
'name': 'npc_dota_hero_terrorblade'}]
| mit | 1,758,962,667,588,618,200 | 34.736597 | 56 | 0.546866 | false |
thica/ORCA-Remote | src/ORCA/settings/setttingtypes/SettingActions.py | 1 | 4810 | # -*- coding: utf-8 -*-
"""
ORCA Open Remote Control Application
Copyright (C) 2013-2020 Carsten Thielepape
Please contact me by : http://www.orca-remote.org/
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
from typing import List
from typing import Union
from xml.etree.ElementTree import Element
from kivy.uix.popup import Popup
from kivy.uix.widget import Widget
import ORCA.Globals as Globals
from ORCA.settings.setttingtypes.SettingScrollOptionsWithOptions import SettingScrollOptionsWithOptions
from ORCA.settings.setttingtypes.SettingScrollOptions import ScrollOptionsPopUp
from ORCA.utils.XML import LoadXMLFile
from ORCA.utils.XML import Orca_include
from ORCA.utils.XML import orca_et_loader
from ORCA.utils.XML import GetXMLTextAttribute
from ORCA.utils.LogError import LogError
from ORCA.utils.FileName import cFileName
from ORCA.vars.Replace import ReplaceVars
__all__ = ['SettingActions']
class SettingActions(SettingScrollOptionsWithOptions):
""" A setting class to select actions from the action list or from a codeset """
def __init__(self, **kwargs):
self.aCodesetCmds:List[str] = []
self.oActionPopup:Union[Popup,None] = None
self.oCodeSetActionsScrollOptionsPopup:Union[Popup,None]=None
kwargs["options"] = [ReplaceVars("$lvar(742)"),ReplaceVars("$lvar(743)"),ReplaceVars("$lvar(744)")]
kwargs["suboptions"] = [["$ACTIONLISTSEND"], ["$ACTIONLIST"], ["$FILELIST[%s]" % Globals.oPathCodesets.string]]
super().__init__(**kwargs)
def _set_suboption(self, instance:Widget) -> None:
""" called, when the second option is selected """
if instance.text.startswith('CODESET_'):
self.subpopup.dismiss()
self.popup.dismiss()
self._ReadCodeset(instance.text)
self._ShowCodesetCodesPopup(instance.text)
else:
self.value = instance.text
self.subpopup.dismiss()
self.popup.dismiss()
def _set_suboptioncodesetaction(self, instance:Widget) -> None:
""" called, when a codesetcode is selected """
self.value = "SendCommand "+instance.text
self.oActionPopup.dismiss()
def _ReadCodeset(self,uFN:str) -> None:
oXMLCode:Element
uCmd:str
del self.aCodesetCmds[:]
try:
oXMLCodeset:Element = LoadXMLFile(oFile=cFileName(Globals.oPathCodesets) + uFN)
Orca_include(oXMLCodeset,orca_et_loader)
if oXMLCodeset is not None:
# First read imported codesets
oXMLImports:Element = oXMLCodeset.find('imports')
if oXMLImports is not None:
oXMLImportCodesets:Element=oXMLImports.find('codeset')
if oXMLImportCodesets is not None:
for oXMLCode in oXMLImportCodesets.findall('code'):
uCmd=GetXMLTextAttribute(oXMLNode=oXMLCode,uTag='action',bMandatory=False,vDefault='')
if uCmd:
self.aCodesetCmds.append(uCmd)
for oXMLCode in oXMLCodeset.findall('code'):
uCmd=GetXMLTextAttribute(oXMLNode=oXMLCode,uTag='action',bMandatory=False,vDefault='')
if uCmd:
self.aCodesetCmds.append(uCmd)
except Exception as e:
LogError(uMsg='Error Reading Codeset',oException=e)
def _ShowCodesetCodesPopup(self,uFN:str) -> None:
kwargs={'title':uFN,'options':sorted(self.aCodesetCmds)}
self.oCodeSetActionsScrollOptionsPopup=ScrollOptionsPopUp(**kwargs)
self.oCodeSetActionsScrollOptionsPopup.CreatePopup(self.value,self._set_suboptioncodesetaction,None)
self.oActionPopup=self.oCodeSetActionsScrollOptionsPopup.popup
| gpl-3.0 | -6,715,954,626,130,124,000 | 45.156863 | 119 | 0.619543 | false |
GoodgameStudios/crossbar | crossbar/adapter/rest/test/test_common.py | 1 | 9136 | #####################################################################################
#
# Copyright (C) Tavendo GmbH
#
# Unless a separate license agreement exists between you and Tavendo GmbH (e.g. you
# have purchased a commercial license), the license terms below apply.
#
# Should you enter into a separate license agreement after having received a copy of
# this software, then the terms of such license agreement replace the terms below at
# the time at which such license agreement becomes effective.
#
# In case a separate license agreement ends, and such agreement ends without being
# replaced by another separate license agreement, the license terms below apply
# from the time at which said agreement ends.
#
# LICENSE TERMS
#
# This program is free software: you can redistribute it and/or modify it under the
# terms of the GNU Affero General Public License, version 3, as published by the
# Free Software Foundation. This program is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
#
# See the GNU Affero General Public License Version 3 for more details.
#
# You should have received a copy of the GNU Affero General Public license along
# with this program. If not, see <http://www.gnu.org/licenses/agpl-3.0.en.html>.
#
#####################################################################################
from __future__ import absolute_import
from twisted.trial.unittest import TestCase
from twisted.internet.defer import inlineCallbacks
from twisted.python.compat import nativeString
from crossbar.adapter.rest import PublisherResource
from crossbar.adapter.rest.test import MockPublisherSession, renderResource
publishBody = b'{"topic": "com.test.messages", "args": [1]}'
class IPWhitelistingTestCase(TestCase):
"""
Unit tests for the IP address checking parts of L{_CommonResource}.
"""
@inlineCallbacks
def test_allowed_IP(self):
"""
The client having an allowed IP address allows the request.
"""
session = MockPublisherSession(self)
resource = PublisherResource({"require_ip": ["127.0.0.1"]}, session)
request = yield renderResource(
resource, b"/", method=b"POST",
headers={b"Content-Type": [b"application/json"]},
body=publishBody)
self.assertEqual(request.code, 202)
@inlineCallbacks
def test_allowed_IP_range(self):
"""
The client having an IP in an allowed address range allows the request.
"""
session = MockPublisherSession(self)
resource = PublisherResource({"require_ip": ["127.0.0.0/8"]}, session)
request = yield renderResource(
resource, b"/", method=b"POST",
headers={b"Content-Type": [b"application/json"]},
body=publishBody)
self.assertEqual(request.code, 202)
@inlineCallbacks
def test_disallowed_IP_range(self):
"""
The client having an IP not in allowed address range denies the request.
"""
session = MockPublisherSession(self)
resource = PublisherResource({"require_ip": ["192.168.0.0/16", "10.0.0.0/8"]}, session)
request = yield renderResource(
resource, b"/", method=b"POST",
headers={b"Content-Type": [b"application/json"]},
body=publishBody)
self.assertEqual(request.code, 400)
self.assertIn(b"request denied based on IP address",
request.getWrittenData())
class SecureTransportTestCase(TestCase):
"""
Unit tests for the transport security testing parts of L{_CommonResource}.
"""
@inlineCallbacks
def test_required_tls_with_tls(self):
"""
Required TLS, plus a request over TLS, will allow the request.
"""
session = MockPublisherSession(self)
resource = PublisherResource({"require_tls": True}, session)
request = yield renderResource(
resource, b"/", method=b"POST",
headers={b"Content-Type": [b"application/json"]},
body=publishBody, isSecure=True)
self.assertEqual(request.code, 202)
@inlineCallbacks
def test_not_required_tls_with_tls(self):
"""
A request over TLS even when not required, will allow the request.
"""
session = MockPublisherSession(self)
resource = PublisherResource({}, session)
request = yield renderResource(
resource, b"/", method=b"POST",
headers={b"Content-Type": [b"application/json"]},
body=publishBody, isSecure=True)
self.assertEqual(request.code, 202)
@inlineCallbacks
def test_required_tls_without_tls(self):
"""
Required TLS, plus a request NOT over TLS, will deny the request.
"""
session = MockPublisherSession(self)
resource = PublisherResource({"require_tls": True}, session)
request = yield renderResource(
resource, b"/", method=b"POST",
headers={b"Content-Type": [b"application/json"]},
body=publishBody, isSecure=False)
self.assertEqual(request.code, 400)
class RequestBodyTestCase(TestCase):
"""
Unit tests for the body validation parts of L{_CommonResource}.
"""
@inlineCallbacks
def test_bad_content_type(self):
"""
An incorrect content type will mean the request is rejected.
"""
session = MockPublisherSession(self)
resource = PublisherResource({}, session)
request = yield renderResource(
resource, b"/", method=b"POST",
headers={b"Content-Type": [b"application/text"]},
body=publishBody)
self.assertEqual(request.code, 400)
self.assertIn(b"bad or missing content type ('application/text')",
request.getWrittenData())
@inlineCallbacks
def test_bad_method(self):
"""
An incorrect method will mean the request is rejected.
"""
session = MockPublisherSession(self)
resource = PublisherResource({}, session)
request = yield renderResource(
resource, b"/", method=b"PUT",
headers={b"Content-Type": [b"application/json"]},
body=publishBody)
self.assertEqual(request.code, 405)
self.assertIn(b"HTTP/PUT not allowed",
request.getWrittenData())
@inlineCallbacks
def test_too_large_body(self):
"""
A too large body will mean the request is rejected.
"""
session = MockPublisherSession(self)
resource = PublisherResource({"post_body_limit": 1}, session)
request = yield renderResource(
resource, b"/", method=b"POST",
headers={b"Content-Type": [b"application/json"]},
body=publishBody)
self.assertEqual(request.code, 400)
self.assertIn("HTTP/POST body length ({}) exceeds maximum ({})".format(len(publishBody), 1),
nativeString(request.getWrittenData()))
@inlineCallbacks
def test_not_matching_bodylength(self):
"""
A body length that is different than the Content-Length header will mean
the request is rejected.
"""
session = MockPublisherSession(self)
resource = PublisherResource({"post_body_limit": 1}, session)
request = yield renderResource(
resource, b"/", method=b"POST",
headers={b"Content-Type": [b"application/json"],
b"Content-Length": [1]},
body=publishBody)
self.assertEqual(request.code, 400)
self.assertIn("HTTP/POST body length ({}) is different to Content-Length ({})".format(len(publishBody), 1),
nativeString(request.getWrittenData()))
@inlineCallbacks
def test_invalid_JSON_body(self):
"""
A body that is not valid JSON will be rejected by the server.
"""
session = MockPublisherSession(self)
resource = PublisherResource({}, session)
request = yield renderResource(
resource, b"/", method=b"POST",
headers={b"Content-Type": [b"application/json"]},
body=b"sometext")
self.assertEqual(request.code, 400)
self.assertIn(b"invalid request event - HTTP/POST body must be valid JSON:",
request.getWrittenData())
@inlineCallbacks
def test_JSON_list_body(self):
"""
A body that is not a JSON dict will be rejected by the server.
"""
session = MockPublisherSession(self)
resource = PublisherResource({}, session)
request = yield renderResource(
resource, b"/", method=b"POST",
headers={b"Content-Type": [b"application/json"]},
body=b"[{},{}]")
self.assertEqual(request.code, 400)
self.assertIn(b"invalid request event - HTTP/POST body must be JSON dict",
request.getWrittenData())
| agpl-3.0 | -1,753,702,394,484,008,000 | 35.398406 | 115 | 0.617995 | false |
whiteclover/Choco | test/test_inheritance.py | 1 | 10179 | from choco import lookup, compat
import unittest
from test.util import result_lines
class InheritanceTest(unittest.TestCase):
def test_basic(self):
collection = lookup.TemplateLookup()
collection.put_string('main', """
<%inherit file="base"/>
<%def name="header()">
main header.
</%def>
this is the content.
""")
collection.put_string('base', """
This is base.
header: ${self.header()}
body: ${self.body()}
footer: ${self.footer()}
<%def name="footer()">
this is the footer. header again ${next.header()}
</%def>
""")
assert result_lines(collection.get_template('main').render()) == [
'This is base.',
'header:',
'main header.',
'body:',
'this is the content.',
'footer:',
'this is the footer. header again',
'main header.'
]
def test_multilevel_nesting(self):
collection = lookup.TemplateLookup()
collection.put_string('main', """
<%inherit file="layout"/>
<%def name="d()">main_d</%def>
main_body ${parent.d()}
full stack from the top:
${self.name} ${parent.name} ${parent.context['parent'].name} ${parent.context['parent'].context['parent'].name}
""")
collection.put_string('layout', """
<%inherit file="general"/>
<%def name="d()">layout_d</%def>
layout_body
parent name: ${parent.name}
${parent.d()}
${parent.context['parent'].d()}
${next.body()}
""")
collection.put_string('general', """
<%inherit file="base"/>
<%def name="d()">general_d</%def>
general_body
${next.d()}
${next.context['next'].d()}
${next.body()}
""")
collection.put_string('base', """
base_body
full stack from the base:
${self.name} ${self.context['parent'].name} ${self.context['parent'].context['parent'].name} ${self.context['parent'].context['parent'].context['parent'].name}
${next.body()}
<%def name="d()">base_d</%def>
""")
assert result_lines(collection.get_template('main').render()) == [
'base_body',
'full stack from the base:',
'self:main self:layout self:general self:base',
'general_body',
'layout_d',
'main_d',
'layout_body',
'parent name: self:general',
'general_d',
'base_d',
'main_body layout_d',
'full stack from the top:',
'self:main self:layout self:general self:base'
]
def test_includes(self):
"""test that an included template also has its full hierarchy invoked."""
collection = lookup.TemplateLookup()
collection.put_string("base", """
<%def name="a()">base_a</%def>
This is the base.
${next.body()}
End base.
""")
collection.put_string("index","""
<%inherit file="base"/>
this is index.
a is: ${self.a()}
<%include file="secondary"/>
""")
collection.put_string("secondary","""
<%inherit file="base"/>
this is secondary.
a is: ${self.a()}
""")
assert result_lines(collection.get_template("index").render()) == [
'This is the base.',
'this is index.',
'a is: base_a',
'This is the base.',
'this is secondary.',
'a is: base_a',
'End base.',
'End base.'
]
def test_namespaces(self):
"""test that templates used via <%namespace> have access to an inheriting 'self', and that
the full 'self' is also exported."""
collection = lookup.TemplateLookup()
collection.put_string("base", """
<%def name="a()">base_a</%def>
<%def name="b()">base_b</%def>
This is the base.
${next.body()}
""")
collection.put_string("layout", """
<%inherit file="base"/>
<%def name="a()">layout_a</%def>
This is the layout..
${next.body()}
""")
collection.put_string("index","""
<%inherit file="base"/>
<%namespace name="sc" file="secondary"/>
this is index.
a is: ${self.a()}
sc.a is: ${sc.a()}
sc.b is: ${sc.b()}
sc.c is: ${sc.c()}
sc.body is: ${sc.body()}
""")
collection.put_string("secondary","""
<%inherit file="layout"/>
<%def name="c()">secondary_c. a is ${self.a()} b is ${self.b()} d is ${self.d()}</%def>
<%def name="d()">secondary_d.</%def>
this is secondary.
a is: ${self.a()}
c is: ${self.c()}
""")
assert result_lines(collection.get_template('index').render()) == ['This is the base.',
'this is index.',
'a is: base_a',
'sc.a is: layout_a',
'sc.b is: base_b',
'sc.c is: secondary_c. a is layout_a b is base_b d is secondary_d.',
'sc.body is:',
'this is secondary.',
'a is: layout_a',
'c is: secondary_c. a is layout_a b is base_b d is secondary_d.'
]
def test_pageargs(self):
collection = lookup.TemplateLookup()
collection.put_string("base", """
this is the base.
<%
sorted_ = pageargs.items()
sorted_ = sorted(sorted_)
%>
pageargs: (type: ${type(pageargs)}) ${sorted_}
<%def name="foo()">
${next.body(**context.kwargs)}
</%def>
${foo()}
""")
collection.put_string("index", """
<%inherit file="base"/>
<%page args="x, y, z=7"/>
print ${x}, ${y}, ${z}
""")
if compat.py3k:
assert result_lines(collection.get_template('index').render_unicode(x=5,y=10)) == [
"this is the base.",
"pageargs: (type: <class 'dict'>) [('x', 5), ('y', 10)]",
"print 5, 10, 7"
]
else:
assert result_lines(collection.get_template('index').render_unicode(x=5,y=10)) == [
"this is the base.",
"pageargs: (type: <type 'dict'>) [('x', 5), ('y', 10)]",
"print 5, 10, 7"
]
def test_pageargs_2(self):
collection = lookup.TemplateLookup()
collection.put_string("base", """
this is the base.
${next.body(**context.kwargs)}
<%def name="foo(**kwargs)">
${next.body(**kwargs)}
</%def>
<%def name="bar(**otherargs)">
${next.body(z=16, **context.kwargs)}
</%def>
${foo(x=12, y=15, z=8)}
${bar(x=19, y=17)}
""")
collection.put_string("index", """
<%inherit file="base"/>
<%page args="x, y, z=7"/>
pageargs: ${x}, ${y}, ${z}
""")
assert result_lines(collection.get_template('index').render(x=5,y=10)) == [
"this is the base.",
"pageargs: 5, 10, 7",
"pageargs: 12, 15, 8",
"pageargs: 5, 10, 16"
]
def test_pageargs_err(self):
collection = lookup.TemplateLookup()
collection.put_string("base", """
this is the base.
${next.body()}
""")
collection.put_string("index", """
<%inherit file="base"/>
<%page args="x, y, z=7"/>
print ${x}, ${y}, ${z}
""")
try:
print(collection.get_template('index').render(x=5,y=10))
assert False
except TypeError:
assert True
def test_toplevel(self):
collection = lookup.TemplateLookup()
collection.put_string("base", """
this is the base.
${next.body()}
""")
collection.put_string("index", """
<%inherit file="base"/>
this is the body
""")
assert result_lines(collection.get_template('index').render()) == [
"this is the base.",
"this is the body"
]
assert result_lines(collection.get_template('index').get_def("body").render()) == [
"this is the body"
]
def test_dynamic(self):
collection = lookup.TemplateLookup()
collection.put_string("base", """
this is the base.
${next.body()}
""")
collection.put_string("index", """
<%!
def dyn(context):
if context.get('base', None) is not None:
return 'base'
else:
return None
%>
<%inherit file="${dyn(context)}"/>
this is index.
""")
assert result_lines(collection.get_template('index').render()) == [
'this is index.'
]
assert result_lines(collection.get_template('index').render(base=True)) == [
'this is the base.',
'this is index.'
]
def test_in_call(self):
collection = lookup.TemplateLookup()
collection.put_string("/layout.html","""
Super layout!
<%call expr="self.grid()">
${next.body()}
</%call>
Oh yea!
<%def name="grid()">
Parent grid
${caller.body()}
End Parent
</%def>
""")
collection.put_string("/subdir/layout.html", """
${next.body()}
<%def name="grid()">
Subdir grid
${caller.body()}
End subdir
</%def>
<%inherit file="/layout.html"/>
""")
collection.put_string("/subdir/renderedtemplate.html","""
Holy smokes!
<%inherit file="/subdir/layout.html"/>
""")
#print collection.get_template("/layout.html").code
#print collection.get_template("/subdir/renderedtemplate.html").render()
assert result_lines(collection.get_template("/subdir/renderedtemplate.html").render()) == [
"Super layout!",
"Subdir grid",
"Holy smokes!",
"End subdir",
"Oh yea!"
]
| mit | 347,781,610,667,428,540 | 28.166189 | 163 | 0.483938 | false |
census-instrumentation/opencensus-python | contrib/opencensus-ext-zipkin/opencensus/ext/zipkin/trace_exporter/__init__.py | 1 | 6920 | # Copyright 2017, OpenCensus Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Export the spans data to Zipkin Collector."""
import json
import logging
import requests
from opencensus.common.transports import sync
from opencensus.common.utils import check_str_length, timestamp_to_microseconds
from opencensus.trace import base_exporter
DEFAULT_ENDPOINT = '/api/v2/spans'
DEFAULT_HOST_NAME = 'localhost'
DEFAULT_PORT = 9411
DEFAULT_PROTOCOL = 'http'
ZIPKIN_HEADERS = {'Content-Type': 'application/json'}
SPAN_KIND_MAP = {
0: None, # span kind unspecified
1: "SERVER",
2: "CLIENT",
}
SUCCESS_STATUS_CODE = (200, 202)
class ZipkinExporter(base_exporter.Exporter):
"""Export the spans to Zipkin.
See: http://zipkin.io/zipkin-api/#
:type service_name: str
:param service_name: Service that logged an annotation in a trace.
Classifier when query for spans.
:type host_name: str
:param host_name: (Optional) The host name of the Zipkin server.
:type port: int
:param port: (Optional) The port of the Zipkin server.
:type end_point: str
:param end_point: (Optional) The path for the span exporting endpoint.
:type protocol: str
:param protocol: (Optional) The protocol used for the request.
:type transport: :class:`type`
:param transport: Class for creating new transport objects. It should
extend from the base_exporter :class:`.Transport` type
and implement :meth:`.Transport.export`. Defaults to
:class:`.SyncTransport`. The other option is
:class:`.AsyncTransport`.
"""
def __init__(
self,
service_name='my_service',
host_name=DEFAULT_HOST_NAME,
port=DEFAULT_PORT,
endpoint=DEFAULT_ENDPOINT,
protocol=DEFAULT_PROTOCOL,
transport=sync.SyncTransport,
ipv4=None,
ipv6=None):
self.service_name = service_name
self.host_name = host_name
self.port = port
self.endpoint = endpoint
self.protocol = protocol
self.url = self.get_url
self.transport = transport(self)
self.ipv4 = ipv4
self.ipv6 = ipv6
@property
def get_url(self):
return '{}://{}:{}{}'.format(
self.protocol,
self.host_name,
self.port,
self.endpoint)
def emit(self, span_datas):
"""Send SpanData tuples to Zipkin server, default using the v2 API.
:type span_datas: list of :class:
`~opencensus.trace.span_data.SpanData`
:param list of opencensus.trace.span_data.SpanData span_datas:
SpanData tuples to emit
"""
try:
zipkin_spans = self.translate_to_zipkin(span_datas)
result = requests.post(
url=self.url,
data=json.dumps(zipkin_spans),
headers=ZIPKIN_HEADERS)
if result.status_code not in SUCCESS_STATUS_CODE:
logging.error(
"Failed to send spans to Zipkin server! Spans are {}"
.format(zipkin_spans))
except Exception as e: # pragma: NO COVER
logging.error(getattr(e, 'message', e))
def export(self, span_datas):
self.transport.export(span_datas)
def translate_to_zipkin(self, span_datas):
"""Translate the opencensus spans to zipkin spans.
:type span_datas: list of :class:
`~opencensus.trace.span_data.SpanData`
:param span_datas:
SpanData tuples to emit
:rtype: list
:returns: List of zipkin format spans.
"""
local_endpoint = {
'serviceName': self.service_name,
'port': self.port,
}
if self.ipv4 is not None:
local_endpoint['ipv4'] = self.ipv4
if self.ipv6 is not None:
local_endpoint['ipv6'] = self.ipv6
zipkin_spans = []
for span in span_datas:
# Timestamp in zipkin spans is int of microseconds.
start_timestamp_mus = timestamp_to_microseconds(span.start_time)
end_timestamp_mus = timestamp_to_microseconds(span.end_time)
duration_mus = end_timestamp_mus - start_timestamp_mus
zipkin_span = {
'traceId': span.context.trace_id,
'id': str(span.span_id),
'name': span.name,
'timestamp': int(round(start_timestamp_mus)),
'duration': int(round(duration_mus)),
'localEndpoint': local_endpoint,
'tags': _extract_tags_from_span(span.attributes),
'annotations': _extract_annotations_from_span(span),
}
span_kind = span.span_kind
parent_span_id = span.parent_span_id
if span_kind is not None:
kind = SPAN_KIND_MAP.get(span_kind)
# Zipkin API for span kind only accept
# enum(CLIENT|SERVER|PRODUCER|CONSUMER|Absent)
if kind is not None:
zipkin_span['kind'] = kind
if parent_span_id is not None:
zipkin_span['parentId'] = str(parent_span_id)
zipkin_spans.append(zipkin_span)
return zipkin_spans
def _extract_tags_from_span(attr):
if attr is None:
return {}
tags = {}
for attribute_key, attribute_value in attr.items():
if isinstance(attribute_value, (int, bool, float)):
value = str(attribute_value)
elif isinstance(attribute_value, str):
res, _ = check_str_length(str_to_check=attribute_value)
value = res
else:
logging.warning('Could not serialize tag %s', attribute_key)
continue
tags[attribute_key] = value
return tags
def _extract_annotations_from_span(span):
"""Extract and convert time event annotations to zipkin annotations"""
if span.annotations is None:
return []
annotations = []
for annotation in span.annotations:
event_timestamp_mus = timestamp_to_microseconds(annotation.timestamp)
annotations.append({'timestamp': int(round(event_timestamp_mus)),
'value': annotation.description})
return annotations
| apache-2.0 | -6,535,133,408,901,106,000 | 31.641509 | 79 | 0.598555 | false |
pihito/myHomeBox | devAsset/python/main.py | 1 | 1055 | from flask import Flask,render_template,request
from flask.ext.script import Manager
#déclare le serveur flask
app = Flask(__name__)
#déclare le plug-in flask-script
manager = Manager(app)
#crée la route web de la racine du site
#et la lie à la fonction index
@app.route("/")
def index():
return render_template('index.html')
#on crée la nouvelle route et on la lie à fonction Hello
@app.route('/hello/')
@app.route('/hello/<name>')
def hello(name=None):
if name == None :
#Si le nom n'est pas dans l'url, je tente de l'extraire depuis la requête
name = request.args.get('name',None)
return render_template('hello.html', name=name)
@app.route('/hello2/')
@app.route('/hello2/<name>')
def hello2(name=None):
if name == None :
#Si le nom n'est pas dans l'url, je tente de l'extraire depuis la requête
name = request.args.get('name',None)
return render_template('hello2.html', name=name)
if __name__ == "__main__":
#lance le serveur Flask via le plug-in flask-script
manager.run() | apache-2.0 | -385,943,290,818,522,430 | 27.324324 | 82 | 0.671442 | false |
thanatoskira/AndroGuard | build/scripts-2.7/androapkinfo.py | 1 | 3821 | #!/usr/bin/python
# This file is part of Androguard.
#
# Copyright (C) 2012, Anthony Desnos <desnos at t0t0.fr>
# All rights reserved.
#
# Androguard is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Androguard is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Androguard. If not, see <http://www.gnu.org/licenses/>.
import sys, os
from optparse import OptionParser
from androguard.core import androconf
from androguard.core.bytecodes import apk
from androguard.core.bytecodes import dvm
from androguard.core.analysis import analysis
option_0 = { 'name' : ('-i', '--input'), 'help' : 'file : use this filename (APK)', 'nargs' : 1 }
option_1 = { 'name' : ('-d', '--directory'), 'help' : 'directory : use this directory', 'nargs' : 1 }
option_2 = { 'name' : ('-t', '--tag'), 'help' : 'display tags', 'action' : 'count' }
option_3 = { 'name' : ('-v', '--version'), 'help' : 'version', 'action' : 'count' }
options = [option_0, option_1, option_2, option_3]
def display_dvm_info(apk) :
vm = dvm.DalvikVMFormat( apk.get_dex() )
vmx = analysis.uVMAnalysis( vm )
print "Native code:", analysis.is_native_code(vmx)
print "Dynamic code:", analysis.is_dyn_code(vmx)
print "Reflection code:", analysis.is_reflection_code(vmx)
for i in vmx.get_methods() :
i.create_tags()
if not i.tags.empty() :
print i.method.get_class_name(), i.method.get_name(), i.tags
def main(options, arguments) :
if options.input != None :
ret_type = androconf.is_android( options.input )
print os.path.basename(options.input), ":"
if ret_type == "APK" :
try :
a = apk.APK( options.input )
if a.is_valid_APK() :
a.show()
display_dvm_info( a )
else :
print "INVALID"
except Exception, e :
print "ERROR", e
elif options.directory != None :
for root, dirs, files in os.walk( options.directory, followlinks=True ) :
if files != [] :
for f in files :
real_filename = root
if real_filename[-1] != "/" :
real_filename += "/"
real_filename += f
ret_type = androconf.is_android( real_filename )
if ret_type == "APK" :
print os.path.basename( real_filename ), ":"
try :
a = apk.APK( real_filename )
if a.is_valid_APK() :
a.show()
display_dvm_info( a )
else :
print "INVALID APK"
raise("ooos")
except Exception, e :
print "ERROR", e
raise("ooos")
elif options.version != None :
print "Androapkinfo version %s" % androconf.ANDROGUARD_VERSION
if __name__ == "__main__" :
parser = OptionParser()
for option in options :
param = option['name']
del option['name']
parser.add_option(*param, **option)
options, arguments = parser.parse_args()
sys.argv[:] = arguments
main(options, arguments)
| lgpl-3.0 | 2,697,242,861,988,364,300 | 36.460784 | 101 | 0.548548 | false |
ray-project/ray | python/ray/autoscaler/_private/azure/node_provider.py | 1 | 12092 | import json
import logging
from pathlib import Path
from threading import RLock
from uuid import uuid4
from azure.common.client_factory import get_client_from_cli_profile
from msrestazure.azure_active_directory import MSIAuthentication
from azure.mgmt.compute import ComputeManagementClient
from azure.mgmt.network import NetworkManagementClient
from azure.mgmt.resource import ResourceManagementClient
from azure.mgmt.resource.resources.models import DeploymentMode
from knack.util import CLIError
from ray.autoscaler.node_provider import NodeProvider
from ray.autoscaler.tags import TAG_RAY_CLUSTER_NAME, TAG_RAY_NODE_NAME
from ray.autoscaler._private.azure.config import bootstrap_azure
VM_NAME_MAX_LEN = 64
VM_NAME_UUID_LEN = 8
logger = logging.getLogger(__name__)
def synchronized(f):
def wrapper(self, *args, **kwargs):
self.lock.acquire()
try:
return f(self, *args, **kwargs)
finally:
self.lock.release()
return wrapper
class AzureNodeProvider(NodeProvider):
"""Node Provider for Azure
This provider assumes Azure credentials are set by running ``az login``
and the default subscription is configured through ``az account``
or set in the ``provider`` field of the autoscaler configuration.
Nodes may be in one of three states: {pending, running, terminated}. Nodes
appear immediately once started by ``create_node``, and transition
immediately to terminated when ``terminate_node`` is called.
"""
def __init__(self, provider_config, cluster_name):
NodeProvider.__init__(self, provider_config, cluster_name)
kwargs = {}
if "subscription_id" in provider_config:
kwargs["subscription_id"] = provider_config["subscription_id"]
try:
self.compute_client = get_client_from_cli_profile(
client_class=ComputeManagementClient, **kwargs)
self.network_client = get_client_from_cli_profile(
client_class=NetworkManagementClient, **kwargs)
self.resource_client = get_client_from_cli_profile(
client_class=ResourceManagementClient, **kwargs)
except CLIError as e:
if str(e) != "Please run 'az login' to setup account.":
raise
else:
logger.info("CLI profile authentication failed. Trying MSI")
credentials = MSIAuthentication()
self.compute_client = ComputeManagementClient(
credentials=credentials, **kwargs)
self.network_client = NetworkManagementClient(
credentials=credentials, **kwargs)
self.resource_client = ResourceManagementClient(
credentials=credentials, **kwargs)
self.lock = RLock()
# cache node objects
self.cached_nodes = {}
@synchronized
def _get_filtered_nodes(self, tag_filters):
def match_tags(vm):
for k, v in tag_filters.items():
if vm.tags.get(k) != v:
return False
return True
vms = self.compute_client.virtual_machines.list(
resource_group_name=self.provider_config["resource_group"])
nodes = [self._extract_metadata(vm) for vm in filter(match_tags, vms)]
self.cached_nodes = {node["name"]: node for node in nodes}
return self.cached_nodes
def _extract_metadata(self, vm):
# get tags
metadata = {"name": vm.name, "tags": vm.tags, "status": ""}
# get status
resource_group = self.provider_config["resource_group"]
instance = self.compute_client.virtual_machines.instance_view(
resource_group_name=resource_group, vm_name=vm.name).as_dict()
for status in instance["statuses"]:
code, state = status["code"].split("/")
# skip provisioning status
if code == "PowerState":
metadata["status"] = state
break
# get ip data
nic_id = vm.network_profile.network_interfaces[0].id
metadata["nic_name"] = nic_id.split("/")[-1]
nic = self.network_client.network_interfaces.get(
resource_group_name=resource_group,
network_interface_name=metadata["nic_name"])
ip_config = nic.ip_configurations[0]
if not self.provider_config.get("use_internal_ips", False):
public_ip_id = ip_config.public_ip_address.id
metadata["public_ip_name"] = public_ip_id.split("/")[-1]
public_ip = self.network_client.public_ip_addresses.get(
resource_group_name=resource_group,
public_ip_address_name=metadata["public_ip_name"])
metadata["external_ip"] = public_ip.ip_address
metadata["internal_ip"] = ip_config.private_ip_address
return metadata
def non_terminated_nodes(self, tag_filters):
"""Return a list of node ids filtered by the specified tags dict.
This list must not include terminated nodes. For performance reasons,
providers are allowed to cache the result of a call to nodes() to
serve single-node queries (e.g. is_running(node_id)). This means that
nodes() must be called again to refresh results.
Examples:
>>> provider.non_terminated_nodes({TAG_RAY_NODE_KIND: "worker"})
["node-1", "node-2"]
"""
nodes = self._get_filtered_nodes(tag_filters=tag_filters)
return [
k for k, v in nodes.items()
if not v["status"].startswith("deallocat")
]
def is_running(self, node_id):
"""Return whether the specified node is running."""
# always get current status
node = self._get_node(node_id=node_id)
return node["status"] == "running"
def is_terminated(self, node_id):
"""Return whether the specified node is terminated."""
# always get current status
node = self._get_node(node_id=node_id)
return node["status"].startswith("deallocat")
def node_tags(self, node_id):
"""Returns the tags of the given node (string dict)."""
return self._get_cached_node(node_id=node_id)["tags"]
def external_ip(self, node_id):
"""Returns the external ip of the given node."""
ip = (self._get_cached_node(node_id=node_id)["external_ip"]
or self._get_node(node_id=node_id)["external_ip"])
return ip
def internal_ip(self, node_id):
"""Returns the internal ip (Ray ip) of the given node."""
ip = (self._get_cached_node(node_id=node_id)["internal_ip"]
or self._get_node(node_id=node_id)["internal_ip"])
return ip
def create_node(self, node_config, tags, count):
"""Creates a number of nodes within the namespace."""
# TODO: restart deallocated nodes if possible
resource_group = self.provider_config["resource_group"]
# load the template file
current_path = Path(__file__).parent
template_path = current_path.joinpath("azure-vm-template.json")
with open(template_path, "r") as template_fp:
template = json.load(template_fp)
# get the tags
config_tags = node_config.get("tags", {}).copy()
config_tags.update(tags)
config_tags[TAG_RAY_CLUSTER_NAME] = self.cluster_name
name_tag = config_tags.get(TAG_RAY_NODE_NAME, "node")
unique_id = uuid4().hex[:VM_NAME_UUID_LEN]
vm_name = "{name}-{id}".format(name=name_tag, id=unique_id)
use_internal_ips = self.provider_config.get("use_internal_ips", False)
template_params = node_config["azure_arm_parameters"].copy()
template_params["vmName"] = vm_name
template_params["provisionPublicIp"] = not use_internal_ips
template_params["vmTags"] = config_tags
template_params["vmCount"] = count
parameters = {
"properties": {
"mode": DeploymentMode.incremental,
"template": template,
"parameters": {
key: {
"value": value
}
for key, value in template_params.items()
}
}
}
# TODO: we could get the private/public ips back directly
self.resource_client.deployments.create_or_update(
resource_group_name=resource_group,
deployment_name="ray-vm-{}".format(name_tag),
parameters=parameters).wait()
@synchronized
def set_node_tags(self, node_id, tags):
"""Sets the tag values (string dict) for the specified node."""
node_tags = self._get_cached_node(node_id)["tags"]
node_tags.update(tags)
if hasattr(self.compute_client.virtual_machines, "update"):
self.compute_client.virtual_machines.update(
resource_group_name=self.provider_config["resource_group"],
vm_name=node_id,
parameters={"tags": node_tags})
else:
# Newer versions of the client use begin_update, not update
self.compute_client.virtual_machines.begin_update(
resource_group_name=self.provider_config["resource_group"],
vm_name=node_id,
parameters={"tags": node_tags})
self.cached_nodes[node_id]["tags"] = node_tags
def terminate_node(self, node_id):
"""Terminates the specified node. This will delete the VM and
associated resources (NIC, IP, Storage) for the specified node."""
resource_group = self.provider_config["resource_group"]
try:
# get metadata for node
metadata = self._get_node(node_id)
except KeyError:
# node no longer exists
return
# TODO: deallocate instead of delete to allow possible reuse
# self.compute_client.virtual_machines.deallocate(
# resource_group_name=resource_group,
# vm_name=node_id)
# gather disks to delete later
vm = self.compute_client.virtual_machines.get(
resource_group_name=resource_group, vm_name=node_id)
disks = {d.name for d in vm.storage_profile.data_disks}
disks.add(vm.storage_profile.os_disk.name)
try:
# delete machine, must wait for this to complete
self.compute_client.virtual_machines.delete(
resource_group_name=resource_group, vm_name=node_id).wait()
except Exception as e:
logger.warning("Failed to delete VM: {}".format(e))
try:
# delete nic
self.network_client.network_interfaces.delete(
resource_group_name=resource_group,
network_interface_name=metadata["nic_name"])
except Exception as e:
logger.warning("Failed to delete nic: {}".format(e))
# delete ip address
if "public_ip_name" in metadata:
try:
self.network_client.public_ip_addresses.delete(
resource_group_name=resource_group,
public_ip_address_name=metadata["public_ip_name"])
except Exception as e:
logger.warning("Failed to delete public ip: {}".format(e))
# delete disks
for disk in disks:
try:
self.compute_client.disks.delete(
resource_group_name=resource_group, disk_name=disk)
except Exception as e:
logger.warning("Failed to delete disk: {}".format(e))
def _get_node(self, node_id):
self._get_filtered_nodes({}) # Side effect: updates cache
return self.cached_nodes[node_id]
def _get_cached_node(self, node_id):
if node_id in self.cached_nodes:
return self.cached_nodes[node_id]
return self._get_node(node_id=node_id)
@staticmethod
def bootstrap_config(cluster_config):
return bootstrap_azure(cluster_config)
| apache-2.0 | -5,612,354,719,580,587,000 | 38.645902 | 78 | 0.606517 | false |
lhaze/dharma | pca/data/dao/abstract.py | 1 | 7281 | import typing as t
from abc import abstractmethod
from functools import reduce
from operator import and_
from pca.data.errors import QueryErrors
from pca.data.predicate import Predicate
from pca.interfaces.dao import (
BatchOfDto,
Dto,
Id,
IDao,
Ids,
IQueryChain,
Kwargs,
)
from pca.utils.dependency_injection import Component
class QueryChain(IQueryChain):
"""
Technical detail of chaining queries.
A proxy for a query interface of DAO, gathering lazy evaluated queries
(ie. filter, sort, aggregate, etc) to call owning DAO to resolve them when non-lazy
(ie. get, exists, count, update, etc) is called.
"""
# TODO lazy queries: order_by, aggregate, annotate
# TODO evaluating queries: slicing
_ids: Ids = None
_filters: t.List[Predicate] = None
def __init__(self, dao: "AbstractDao"):
self._dao = dao
@classmethod
def _construct(
cls, dao: "AbstractDao", filters: t.List[Predicate] = None, ids: t.List[Id] = None
) -> "QueryChain":
"""
Technical detail of creating a new QueryChain with specified
argument.
"""
qc = cls(dao)
qc._ids = ids
qc._filters = filters
return qc
def _clone(self, filters: t.List[Predicate] = None, ids: t.List[Id] = None):
"""
Technical detail of cloning current QueryChain object extended by an additional
argument.
"""
qc = self.__class__(self._dao)
qc._ids = self._ids or ids
if filters:
qc._filters = (self._filters or []) + filters
else:
qc._filters = self._filters
return qc
def __repr__(self):
return f"<QueryChain ids={self._ids}, filters={self._filters}>"
@property
def _is_trivial(self) -> bool:
"""Trivial QueryChain is the one that has no lazy operations defined."""
return not (self._filters or self._ids)
@property
def _reduced_filter(self) -> t.Optional[Predicate]:
"""Before evaluation, sum up all filter predicates into a single one"""
return None if self._is_trivial else reduce(and_, self._filters)
# lazy queries
def filter(self, predicate: Predicate) -> "QueryChain":
"""
Filters out objects by the predicate specifying conditions that they should met.
"""
return self._clone(filters=[predicate])
def filter_by(self, id_: Id = None, ids: Ids = None) -> "QueryChain":
"""
Filters objects by a single id or a iterable of ids.
:raises: InvalidQueryError if:
* both `id_` and `ids` arguments are defined
* or the query is already filtered by id
"""
if self._ids or bool(id_) == bool(ids):
raise QueryErrors.CONFLICTING_QUERY_ARGUMENTS.with_params(id=id_, ids=ids)
ids = ids or [id_]
return self._clone(ids=ids)
# evaluating queries
def __iter__(self) -> Dto:
"""Yields values"""
yield from self._dao._resolve_filter(self)
def __len__(self) -> int:
"""Proxy for `count`."""
return self.count()
def get(self, id_: Id) -> t.Optional[Dto]:
"""Returns object of given id, or None iff not present."""
qc = self.filter_by(id_=id_)
filtered = self._dao._resolve_filter(qc)
return self._dao._resolve_get(filtered, id_, nullable=True)
def exists(self) -> bool:
"""Returns whether any object specified by the query exist."""
return self._dao._resolve_exists(self)
def count(self) -> int:
"""
Counts objects filtering them out by the query specifying conditions that they
should met.
"""
return self._dao._resolve_count(self)
# evaluating commands
def update(self, **update) -> Ids:
"""
Updates all objects specified by the query with given update.
"""
return self._dao._resolve_update(self, update)
def remove(self) -> Ids:
"""
Removes all objects specified by the query from the collection.
"""
return self._dao._resolve_remove(self)
class AbstractDao(IDao[Id], Component):
"""Base abstract implementation for Data Access Object."""
# lazy queries
def all(self) -> QueryChain:
"""
Returns a query chain representing all objects.
Useful to explicitly denote counting, updating or removing all objects.
"""
return QueryChain(self)
def filter(self, predicate: Predicate) -> QueryChain:
"""
Filters out objects by the predicate specifying conditions that they
should met. Can be chained via `QueryChain` helper class.
"""
return QueryChain._construct(self, filters=[predicate])
def filter_by(self, id_: Id = None, ids: Ids = None) -> IQueryChain:
"""
Filters objects by a single id or a iterable of ids.
Can be chained with other queries via `IQueryChain` helper.
:raises: InvalidQueryError iff both `id_` and `ids` arguments are defined.
"""
if bool(id_) == bool(ids):
raise QueryErrors.CONFLICTING_QUERY_ARGUMENTS.with_params(id=id_, ids=ids)
ids = ids or [id_]
return QueryChain._construct(self, ids=ids)
# evaluating queries
def get(self, id_: Id) -> t.Optional[Dto]:
"""
Returns object of given id, or None iff not present.
Shortcut for querying via `QueryChain.all`.
"""
qc = QueryChain._construct(self, ids=[id_])
filtered = self._resolve_filter(qc)
return self._resolve_get(filtered, id_, nullable=True)
@abstractmethod
def _resolve_filter(self, query_chain: QueryChain) -> BatchOfDto:
"""Resolves filtering for any other resolving operation to compute."""
@abstractmethod
def _resolve_get(self, dtos: BatchOfDto, id_: Id, nullable: bool = False) -> t.Optional[Dto]:
"""Resolves `get`query described by the ids."""
@abstractmethod
def _resolve_exists(self, query_chain: QueryChain) -> bool:
"""Returns whether any object specified by the query exist."""
@abstractmethod
def _resolve_count(self, query_chain: QueryChain) -> int:
"""
Counts objects filtering them out by the query specifying conditions that they should met.
"""
# evaluating commands
@abstractmethod
def _resolve_update(self, query_chain: QueryChain, update: Kwargs) -> Ids:
"""
Updates all objects specified by the query with given update.
"""
@abstractmethod
def _resolve_remove(self, query_chain: QueryChain) -> Ids:
"""
Removes all objects specified by the query from the collection.
"""
# instant commands
@abstractmethod
def insert(self, dto: Dto) -> Id:
"""
Inserts the object into the collection.
:returns: id of the inserted object
"""
@abstractmethod
def batch_insert(self, dtos: BatchOfDto) -> Ids:
"""
Inserts multiple objects into the collection.
:returns: a iterable of ids
"""
@abstractmethod
def clear(self) -> None:
"""Clears the collection."""
| mit | -3,672,697,594,967,121,400 | 29.851695 | 98 | 0.609944 | false |
avedaee/DIRAC | WorkloadManagementSystem/Agent/JobCleaningAgent.py | 1 | 8926 | ########################################################################
# $HeadURL$
# File : JobCleaningAgent.py
# Author : A.T.
########################################################################
"""
The Job Cleaning Agent controls removing jobs from the WMS in the end of their life cycle.
"""
from DIRAC import S_OK, gLogger
from DIRAC.Core.Base.AgentModule import AgentModule
from DIRAC.ConfigurationSystem.Client.Helpers.Operations import Operations
from DIRAC.WorkloadManagementSystem.DB.JobDB import JobDB
from DIRAC.WorkloadManagementSystem.DB.TaskQueueDB import TaskQueueDB
from DIRAC.WorkloadManagementSystem.DB.JobLoggingDB import JobLoggingDB
from DIRAC.WorkloadManagementSystem.Client.SandboxStoreClient import SandboxStoreClient
from DIRAC.RequestManagementSystem.Client.Request import Request
from DIRAC.RequestManagementSystem.Client.Operation import Operation
from DIRAC.RequestManagementSystem.Client.File import File
from DIRAC.RequestManagementSystem.Client.ReqClient import ReqClient
import DIRAC.Core.Utilities.Time as Time
import time
import os
REMOVE_STATUS_DELAY = { 'Done':7,
'Killed':1,
'Failed':7 }
class JobCleaningAgent( AgentModule ):
"""
The specific agents must provide the following methods:
- initialize() for initial settings
- beginExecution()
- execute() - the main method called in the agent cycle
- endExecution()
- finalize() - the graceful exit of the method, this one is usually used
for the agent restart
"""
#############################################################################
def initialize( self ):
"""Sets defaults
"""
self.am_setOption( "PollingTime", 60 )
self.jobDB = JobDB()
self.taskQueueDB = TaskQueueDB()
self.jobLoggingDB = JobLoggingDB()
# self.sandboxDB = SandboxDB( 'SandboxDB' )
agentTSTypes = self.am_getOption('ProductionTypes', [])
if agentTSTypes:
self.prod_types = agentTSTypes
else:
self.prod_types = Operations().getValue( 'Transformations/DataProcessing', ['MCSimulation', 'Merge'] )
gLogger.info('Will exclude the following Production types from cleaning %s' % ( ', '.join(self.prod_types) ) )
self.maxJobsAtOnce = self.am_getOption('MaxJobsAtOnce', 100)
self.jobByJob = self.am_getOption('JobByJob', True)
self.throttlingPeriod = self.am_getOption('ThrottlingPeriod', 0.)
return S_OK()
def __getAllowedJobTypes( self ):
#Get valid jobTypes
result = self.jobDB.getDistinctJobAttributes( 'JobType' )
if not result[ 'OK' ]:
return result
cleanJobTypes = []
for jobType in result[ 'Value' ]:
if jobType not in self.prod_types:
cleanJobTypes.append( jobType )
self.log.notice( "JobTypes to clean %s" % cleanJobTypes )
return S_OK( cleanJobTypes )
#############################################################################
def execute( self ):
"""The PilotAgent execution method.
"""
#Delete jobs in "Deleted" state
result = self.removeJobsByStatus( { 'Status' : 'Deleted' } )
if not result[ 'OK' ]:
return result
#Get all the Job types that can be cleaned
result = self.__getAllowedJobTypes()
if not result[ 'OK' ]:
return result
baseCond = { 'JobType' : result[ 'Value' ] }
# Remove jobs with final status
for status in REMOVE_STATUS_DELAY:
delay = REMOVE_STATUS_DELAY[ status ]
condDict = dict( baseCond )
condDict[ 'Status' ] = status
delTime = str( Time.dateTime() - delay * Time.day )
result = self.removeJobsByStatus( condDict, delTime )
if not result['OK']:
gLogger.warn( 'Failed to remove jobs in status %s' % status )
return S_OK()
def removeJobsByStatus( self, condDict, delay = False ):
""" Remove deleted jobs
"""
if delay:
gLogger.verbose( "Removing jobs with %s and older than %s" % ( condDict, delay ) )
result = self.jobDB.selectJobs( condDict, older = delay, limit = self.maxJobsAtOnce )
else:
gLogger.verbose( "Removing jobs with %s " % condDict )
result = self.jobDB.selectJobs( condDict, limit = self.maxJobsAtOnce )
if not result['OK']:
return result
jobList = result['Value']
if len(jobList) > self.maxJobsAtOnce:
jobList = jobList[:self.maxJobsAtOnce]
if not jobList:
return S_OK()
self.log.notice( "Deleting %s jobs for %s" % ( len( jobList ), condDict ) )
count = 0
error_count = 0
result = SandboxStoreClient( useCertificates = True ).unassignJobs( jobList )
if not result[ 'OK' ]:
gLogger.warn( "Cannot unassign jobs to sandboxes", result[ 'Message' ] )
result = self.deleteJobOversizedSandbox( jobList )
if not result[ 'OK' ]:
gLogger.warn( "Cannot schedle removal of oversized sandboxes", result[ 'Message' ] )
return result
failedJobs = result['Value']['Failed']
for job in failedJobs:
jobList.pop( jobList.index( job ) )
if self.jobByJob:
for jobID in jobList:
resultJobDB = self.jobDB.removeJobFromDB( jobID )
resultTQ = self.taskQueueDB.deleteJob( jobID )
resultLogDB = self.jobLoggingDB.deleteJob( jobID )
errorFlag = False
if not resultJobDB['OK']:
gLogger.warn( 'Failed to remove job %d from JobDB' % jobID, result['Message'] )
errorFlag = True
if not resultTQ['OK']:
gLogger.warn( 'Failed to remove job %d from TaskQueueDB' % jobID, result['Message'] )
errorFlag = True
if not resultLogDB['OK']:
gLogger.warn( 'Failed to remove job %d from JobLoggingDB' % jobID, result['Message'] )
errorFlag = True
if errorFlag:
error_count += 1
else:
count += 1
if self.throttlingPeriod:
time.sleep(self.throttlingPeriod)
else:
result = self.jobDB.removeJobFromDB( jobList )
if not result['OK']:
gLogger.error('Failed to delete %d jobs from JobDB' % len(jobList) )
else:
gLogger.info('Deleted %d jobs from JobDB' % len(jobList) )
for jobID in jobList:
resultTQ = self.taskQueueDB.deleteJob( jobID )
if not resultTQ['OK']:
gLogger.warn( 'Failed to remove job %d from TaskQueueDB' % jobID, resultTQ['Message'] )
error_count += 1
else:
count += 1
result = self.jobLoggingDB.deleteJob( jobList )
if not result['OK']:
gLogger.error('Failed to delete %d jobs from JobLoggingDB' % len(jobList) )
else:
gLogger.info('Deleted %d jobs from JobLoggingDB' % len(jobList) )
if count > 0 or error_count > 0 :
gLogger.info( 'Deleted %d jobs from JobDB, %d errors' % ( count, error_count ) )
return S_OK()
def deleteJobOversizedSandbox( self, jobIDList ):
""" Delete the job oversized sandbox files from storage elements
"""
failed = {}
successful = {}
lfnDict = {}
for jobID in jobIDList:
result = self.jobDB.getJobParameter( jobID, 'OutputSandboxLFN' )
if result['OK']:
lfn = result['Value']
if lfn:
lfnDict[lfn] = jobID
else:
successful[jobID] = 'No oversized sandbox found'
else:
gLogger.warn( 'Error interrogting JobDB: %s' % result['Message'] )
if not lfnDict:
return S_OK( {'Successful':successful, 'Failed':failed} )
# Schedule removal of the LFNs now
for lfn, jobID in lfnDict.items():
result = self.jobDB.getJobAttributes( jobID, ['OwnerDN', 'OwnerGroup'] )
if not result['OK']:
failed[jobID] = lfn
continue
if not result['Value']:
failed[jobID] = lfn
continue
ownerDN = result['Value']['OwnerDN']
ownerGroup = result['Value']['OwnerGroup']
result = self.__setRemovalRequest( lfn, ownerDN, ownerGroup )
if not result['OK']:
failed[jobID] = lfn
else:
successful[jobID] = lfn
result = {'Successful':successful, 'Failed':failed}
return S_OK( result )
def __setRemovalRequest( self, lfn, ownerDN, ownerGroup ):
""" Set removal request with the given credentials
"""
oRequest = Request()
oRequest.OwnerDN = ownerDN
oRequest.OwnerGroup = ownerGroup
oRequest.RequestName = os.path.basename( lfn ).strip() + '_removal_request.xml'
oRequest.SourceComponent = 'JobCleaningAgent'
removeFile = Operation()
removeFile.Type = 'RemoveFile'
removedFile = File()
removedFile.LFN = lfn
removeFile.addFile( removedFile )
oRequest.addOperation( removeFile )
return ReqClient().putRequest( oRequest )
| gpl-3.0 | 4,453,427,842,608,304,000 | 35.73251 | 114 | 0.608895 | false |
NightscoutFoundation/dataxfer | oh_data_source/celery.py | 1 | 1415 | """
Celery set up, as recommended by celery
http://celery.readthedocs.org/en/latest/django/first-steps-with-django.html
Celery will automatically discover and use methods within INSTALLED_APPs that
have the @shared_task decorator.
"""
# absolute_import prevents conflicts between project celery.py file
# and the celery package.
from __future__ import absolute_import
import os
from celery import Celery
from django.conf import settings
CELERY_BROKER_URL = os.getenv('CLOUDAMQP_URL', 'amqp://')
# set the default Django settings module for the 'celery' program.
os.environ.setdefault('DJANGO_SETTINGS_MODULE',
'oh_data_source.settings')
app = Celery('oh_data_source', broker=CELERY_BROKER_URL)
# Set up Celery with Heroku CloudAMQP (or AMQP in local dev).
app.conf.update({
'BROKER_URL': CELERY_BROKER_URL,
# Recommended settings. See: https://www.cloudamqp.com/docs/celery.html
'BROKER_POOL_LIMIT': 1,
'BROKER_HEARTBEAT': None,
'BROKER_CONNECTION_TIMEOUT': 30,
'CELERY_RESULT_BACKEND': None,
'CELERY_SEND_EVENTS': False,
'CELERY_EVENT_QUEUE_EXPIRES': 60,
})
# Using a string here means the worker will not have to
# pickle the object when using Windows.
app.config_from_object('django.conf:settings')
app.autodiscover_tasks(lambda: settings.INSTALLED_APPS)
@app.task(bind=True)
def debug_task(self):
print('Request: {0!r}'.format(self.request))
| gpl-3.0 | 4,745,985,152,428,473,000 | 29.76087 | 77 | 0.728622 | false |
GoIncremental/gi-ansible | inventory/digital_ocean.py | 1 | 19937 | #!/usr/bin/env python
'''
DigitalOcean external inventory script
======================================
Generates Ansible inventory of DigitalOcean Droplets.
In addition to the --list and --host options used by Ansible, there are options
for generating JSON of other DigitalOcean data. This is useful when creating
droplets. For example, --regions will return all the DigitalOcean Regions.
This information can also be easily found in the cache file, whose default
location is /tmp/ansible-digital_ocean.cache).
The --pretty (-p) option pretty-prints the output for better human readability.
----
Although the cache stores all the information received from DigitalOcean,
the cache is not used for current droplet information (in --list, --host,
--all, and --droplets). This is so that accurate droplet information is always
found. You can force this script to use the cache with --force-cache.
----
Configuration is read from `digital_ocean.ini`, then from environment variables,
then and command-line arguments.
Most notably, the DigitalOcean Client ID and API Key must be specified. They
can be specified in the INI file or with the following environment variables:
export DO_CLIENT_ID='DO123' DO_API_KEY='abc123'
Alternatively, they can be passed on the command-line with --client-id and
--api-key.
If you specify DigitalOcean credentials in the INI file, a handy way to
get them into your environment (e.g., to use the digital_ocean module)
is to use the output of the --env option with export:
export $(digital_ocean.py --env)
----
The following groups are generated from --list:
- ID (droplet ID)
- NAME (droplet NAME)
- image_ID
- image_NAME
- distro_NAME (distribution NAME from image)
- region_ID
- region_NAME
- size_ID
- size_NAME
- status_STATUS
When run against a specific host, this script returns the following variables:
- do_created_at
- do_distroy
- do_id
- do_image
- do_image_id
- do_ip_address
- do_name
- do_region
- do_region_id
- do_size
- do_size_id
- do_status
-----
```
usage: digital_ocean.py [-h] [--list] [--host HOST] [--all]
[--droplets] [--regions] [--images] [--sizes]
[--ssh-keys] [--domains] [--pretty]
[--cache-path CACHE_PATH]
[--cache-max_age CACHE_MAX_AGE]
[--refresh-cache] [--client-id CLIENT_ID]
[--api-key API_KEY]
Produce an Ansible Inventory file based on DigitalOcean credentials
optional arguments:
-h, --help show this help message and exit
--list List all active Droplets as Ansible inventory
(default: True)
--host HOST Get all Ansible inventory variables about a specific
Droplet
--all List all DigitalOcean information as JSON
--droplets List Droplets as JSON
--regions List Regions as JSON
--images List Images as JSON
--sizes List Sizes as JSON
--ssh-keys List SSH keys as JSON
--domains List Domains as JSON
--pretty, -p Pretty-print results
--cache-path CACHE_PATH
Path to the cache files (default: .)
--cache-max_age CACHE_MAX_AGE
Maximum age of the cached items (default: 0)
--refresh-cache Force refresh of cache by making API requests to
DigitalOcean (default: False - use cache files)
--client-id CLIENT_ID, -c CLIENT_ID
DigitalOcean Client ID
--api-key API_KEY, -a API_KEY
DigitalOcean API Key
```
'''
# (c) 2013, Evan Wies <[email protected]>
#
# Inspired by the EC2 inventory plugin:
# https://github.com/ansible/ansible/blob/devel/plugins/inventory/ec2.py
#
# This file is part of Ansible,
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
######################################################################
import os
import sys
import re
import argparse
from time import time
import ConfigParser
try:
import json
except ImportError:
import simplejson as json
try:
from dopy.manager import DoError, DoManager
except ImportError as e:
print "failed=True msg='`dopy` library required for this script'"
sys.exit(1)
class DigitalOceanInventory(object):
###########################################################################
# Main execution path
###########################################################################
def __init__(self):
''' Main execution path '''
# DigitalOceanInventory data
self.data = {} # All DigitalOcean data
self.inventory = {} # Ansible Inventory
self.index = {} # Varous indices of Droplet metadata
# Define defaults
self.cache_path = '.'
self.cache_max_age = 0
# Read settings, environment variables, and CLI arguments
self.read_settings()
self.read_environment()
self.read_cli_args()
# Verify credentials were set
if not hasattr(self, 'client_id') or not hasattr(self, 'api_key'):
print '''Could not find values for DigitalOcean client_id and api_key.
They must be specified via either ini file, command line argument (--client-id and --api-key),
or environment variables (DO_CLIENT_ID and DO_API_KEY)'''
sys.exit(-1)
# env command, show DigitalOcean credentials
if self.args.env:
print "DO_CLIENT_ID=%s DO_API_KEY=%s" % (self.client_id, self.api_key)
sys.exit(0)
# Manage cache
self.cache_filename = self.cache_path + "/ansible-digital_ocean.cache"
self.cache_refreshed = False
if not self.args.force_cache and self.args.refresh_cache or not self.is_cache_valid():
self.load_all_data_from_digital_ocean()
else:
self.load_from_cache()
if len(self.data) == 0:
if self.args.force_cache:
print '''Cache is empty and --force-cache was specified'''
sys.exit(-1)
self.load_all_data_from_digital_ocean()
else:
# We always get fresh droplets for --list, --host, --all, and --droplets
# unless --force-cache is specified
if not self.args.force_cache and (
self.args.list or self.args.host or self.args.all or self.args.droplets):
self.load_droplets_from_digital_ocean()
# Pick the json_data to print based on the CLI command
if self.args.droplets: json_data = { 'droplets': self.data['droplets'] }
elif self.args.regions: json_data = { 'regions': self.data['regions'] }
elif self.args.images: json_data = { 'images': self.data['images'] }
elif self.args.sizes: json_data = { 'sizes': self.data['sizes'] }
elif self.args.ssh_keys: json_data = { 'ssh_keys': self.data['ssh_keys'] }
elif self.args.domains: json_data = { 'domains': self.data['domains'] }
elif self.args.all: json_data = self.data
elif self.args.host: json_data = self.load_droplet_variables_for_host()
else: # '--list' this is last to make it default
json_data = self.inventory
if self.args.pretty:
print json.dumps(json_data, sort_keys=True, indent=2)
else:
print json.dumps(json_data)
# That's all she wrote...
###########################################################################
# Script configuration
###########################################################################
def read_settings(self):
''' Reads the settings from the digital_ocean.ini file '''
config = ConfigParser.SafeConfigParser()
config.read(os.path.dirname(os.path.realpath(__file__)) + '/digital_ocean.ini')
# Credentials
if config.has_option('digital_ocean', 'client_id'):
self.client_id = config.get('digital_ocean', 'client_id')
if config.has_option('digital_ocean', 'api_key'):
self.api_key = config.get('digital_ocean', 'api_key')
# Cache related
if config.has_option('digital_ocean', 'cache_path'):
self.cache_path = config.get('digital_ocean', 'cache_path')
if config.has_option('digital_ocean', 'cache_max_age'):
self.cache_max_age = config.getint('digital_ocean', 'cache_max_age')
def read_environment(self):
''' Reads the settings from environment variables '''
# Setup credentials
if os.getenv("DO_CLIENT_ID"): self.client_id = os.getenv("DO_CLIENT_ID")
if os.getenv("DO_API_KEY"): self.api_key = os.getenv("DO_API_KEY")
def read_cli_args(self):
''' Command line argument processing '''
parser = argparse.ArgumentParser(description='Produce an Ansible Inventory file based on DigitalOcean credentials')
parser.add_argument('--list', action='store_true', help='List all active Droplets as Ansible inventory (default: True)')
parser.add_argument('--host', action='store', help='Get all Ansible inventory variables about a specific Droplet')
parser.add_argument('--all', action='store_true', help='List all DigitalOcean information as JSON')
parser.add_argument('--droplets','-d', action='store_true', help='List Droplets as JSON')
parser.add_argument('--regions', action='store_true', help='List Regions as JSON')
parser.add_argument('--images', action='store_true', help='List Images as JSON')
parser.add_argument('--sizes', action='store_true', help='List Sizes as JSON')
parser.add_argument('--ssh-keys', action='store_true', help='List SSH keys as JSON')
parser.add_argument('--domains', action='store_true',help='List Domains as JSON')
parser.add_argument('--pretty','-p', action='store_true', help='Pretty-print results')
parser.add_argument('--cache-path', action='store', help='Path to the cache files (default: .)')
parser.add_argument('--cache-max_age', action='store', help='Maximum age of the cached items (default: 0)')
parser.add_argument('--force-cache', action='store_true', default=False, help='Only use data from the cache')
parser.add_argument('--refresh-cache','-r', action='store_true', default=False, help='Force refresh of cache by making API requests to DigitalOcean (default: False - use cache files)')
parser.add_argument('--env','-e', action='store_true', help='Display DO_CLIENT_ID and DO_API_KEY')
parser.add_argument('--client-id','-c', action='store', help='DigitalOcean Client ID')
parser.add_argument('--api-key','-a', action='store', help='DigitalOcean API Key')
self.args = parser.parse_args()
if self.args.client_id: self.client_id = self.args.client_id
if self.args.api_key: self.api_key = self.args.api_key
if self.args.cache_path: self.cache_path = self.args.cache_path
if self.args.cache_max_age: self.cache_max_age = self.args.cache_max_age
# Make --list default if none of the other commands are specified
if (not self.args.droplets and not self.args.regions and not self.args.images and
not self.args.sizes and not self.args.ssh_keys and not self.args.domains and
not self.args.all and not self.args.host):
self.args.list = True
###########################################################################
# Data Management
###########################################################################
def load_all_data_from_digital_ocean(self):
''' Use dopy to get all the information from DigitalOcean and save data in cache files '''
manager = DoManager(self.client_id, self.api_key)
self.data = {}
self.data['droplets'] = self.sanitize_list(manager.all_active_droplets())
self.data['regions'] = self.sanitize_list(manager.all_regions())
self.data['images'] = self.sanitize_list(manager.all_images(filter=None))
self.data['sizes'] = self.sanitize_list(manager.sizes())
self.data['ssh_keys'] = self.sanitize_list(manager.all_ssh_keys())
self.data['domains'] = self.sanitize_list(manager.all_domains())
self.index = {}
self.index['region_to_name'] = self.build_index(self.data['regions'], 'id', 'name')
self.index['size_to_name'] = self.build_index(self.data['sizes'], 'id', 'name')
self.index['image_to_name'] = self.build_index(self.data['images'], 'id', 'name')
self.index['image_to_distro'] = self.build_index(self.data['images'], 'id', 'distribution')
self.index['host_to_droplet'] = self.build_index(self.data['droplets'], 'ip_address', 'id', False)
self.build_inventory()
self.write_to_cache()
def load_droplets_from_digital_ocean(self):
''' Use dopy to get droplet information from DigitalOcean and save data in cache files '''
manager = DoManager(self.client_id, self.api_key)
self.data['droplets'] = self.sanitize_list(manager.all_active_droplets())
self.index['host_to_droplet'] = self.build_index(self.data['droplets'], 'ip_address', 'id', False)
self.build_inventory()
self.write_to_cache()
def build_index(self, source_seq, key_from, key_to, use_slug=True):
dest_dict = {}
for item in source_seq:
name = (use_slug and item.has_key('slug')) and item['slug'] or item[key_to]
key = item[key_from]
dest_dict[key] = name
return dest_dict
def build_inventory(self):
'''Build Ansible inventory of droplets'''
self.inventory = {}
self.inventory['localhost'] = ['127.0.0.1']
# add all droplets by id and name
for droplet in self.data['droplets']:
dest = droplet['ip_address']
self.inventory[droplet['id']] = [dest]
self.push(self.inventory, droplet['name'], dest)
self.push(self.inventory, 'region_'+droplet['region_id'], dest)
self.push(self.inventory, 'image_' +droplet['image_id'], dest)
self.push(self.inventory, 'size_' +droplet['size_id'], dest)
self.push(self.inventory, 'status_'+droplet['status'], dest)
region_name = self.index['region_to_name'].get(droplet['region_id'])
if region_name:
self.push(self.inventory, 'region_'+region_name, dest)
size_name = self.index['size_to_name'].get(droplet['size_id'])
if size_name:
self.push(self.inventory, 'size_'+size_name, dest)
image_name = self.index['image_to_name'].get(droplet['image_id'])
if image_name:
self.push(self.inventory, 'image_'+image_name, dest)
distro_name = self.index['image_to_distro'].get(droplet['image_id'])
if distro_name:
self.push(self.inventory, 'distro_'+distro_name, dest)
def load_droplet_variables_for_host(self):
'''Generate a JSON reponse to a --host call'''
host = self.to_safe(str(self.args.host))
if not host in self.index['host_to_droplet']:
# try updating cache
if not self.args.force_cache:
self.load_all_data_from_digital_ocean()
if not host in self.index['host_to_droplet']:
# host might not exist anymore
return {}
droplet = None
if self.cache_refreshed:
for drop in self.data['droplets']:
if drop['ip_address'] == host:
droplet = self.sanitize_dict(drop)
break
else:
# Cache wasn't refreshed this run, so hit DigitalOcean API
manager = DoManager(self.client_id, self.api_key)
droplet_id = self.index['host_to_droplet'][host]
droplet = self.sanitize_dict(manager.show_droplet(droplet_id))
if not droplet:
return {}
# Put all the information in a 'do_' namespace
info = {}
for k, v in droplet.items():
info['do_'+k] = v
# Generate user-friendly variables (i.e. not the ID's)
if droplet.has_key('region_id'):
info['do_region'] = self.index['region_to_name'].get(droplet['region_id'])
if droplet.has_key('size_id'):
info['do_size'] = self.index['size_to_name'].get(droplet['size_id'])
if droplet.has_key('image_id'):
info['do_image'] = self.index['image_to_name'].get(droplet['image_id'])
info['do_distro'] = self.index['image_to_distro'].get(droplet['image_id'])
return info
###########################################################################
# Cache Management
###########################################################################
def is_cache_valid(self):
''' Determines if the cache files have expired, or if it is still valid '''
if os.path.isfile(self.cache_filename):
mod_time = os.path.getmtime(self.cache_filename)
current_time = time()
if (mod_time + self.cache_max_age) > current_time:
return True
return False
def load_from_cache(self):
''' Reads the data from the cache file and assigns it to member variables as Python Objects'''
cache = open(self.cache_filename, 'r')
json_data = cache.read()
cache.close()
data = json.loads(json_data)
self.data = data['data']
self.inventory = data['inventory']
self.index = data['index']
def write_to_cache(self):
''' Writes data in JSON format to a file '''
data = { 'data': self.data, 'index': self.index, 'inventory': self.inventory }
json_data = json.dumps(data, sort_keys=True, indent=2)
cache = open(self.cache_filename, 'w')
cache.write(json_data)
cache.close()
###########################################################################
# Utilities
###########################################################################
def push(self, my_dict, key, element):
''' Pushed an element onto an array that may not have been defined in the dict '''
if key in my_dict:
my_dict[key].append(element);
else:
my_dict[key] = [element]
def to_safe(self, word):
''' Converts 'bad' characters in a string to underscores so they can be used as Ansible groups '''
return re.sub("[^A-Za-z0-9\-\.]", "_", word)
def sanitize_dict(self, d):
new_dict = {}
for k, v in d.items():
if v != None:
new_dict[self.to_safe(str(k))] = self.to_safe(str(v))
return new_dict
def sanitize_list(self, seq):
new_seq = []
for d in seq:
new_seq.append(self.sanitize_dict(d))
return new_seq
###########################################################################
# Run the script
DigitalOceanInventory() | apache-2.0 | -7,804,996,669,653,076,000 | 39.940452 | 192 | 0.580679 | false |
yveskaufmann/Naive-Bayes | NaiveBayes/BagOfWords.py | 1 | 1801 | class BagOfWords(object):
"""
Implementing a bag of words, words corresponding with their frequency of usages in a "document"
for usage by the Document class, DocumentClass class and the Pool class.
"""
def __init__(self):
self.__number_of_words = 0
self.__bag_of_words = {}
def __add__(self, other):
"""
Overloading of the "+" operator to join two BagOfWords
"""
erg = BagOfWords()
sum = erg.__bag_of_words
for key in self.__bag_of_words:
sum[key] = self.__bag_of_words[key]
if key in other.__bag_of_words:
sum[key] += other.__bag_of_words[key]
for key in other.__bag_of_words:
if key not in sum:
sum[key] = other.__bag_of_words[key]
return erg
def add_word(self, word):
"""
A word is added in the dictionary __bag_of_words
"""
self.__number_of_words += 1
if word in self.__bag_of_words:
self.__bag_of_words[word] += 1
else:
self.__bag_of_words[word] = 1
def len(self):
"""
Returning the number of different words of an object
"""
return len(self.__bag_of_words)
def Words(self):
"""
Returning a list of the words contained in the object
"""
return self.__bag_of_words.keys()
def BagOfWords(self):
"""
Returning the dictionary, containing the words (keys) with their frequency (values)
"""
return self.__bag_of_words
def WordFreq(self, word):
"""
Returning the frequency of a word
"""
if word in self.__bag_of_words:
return self.__bag_of_words[word]
else:
return 0
| mit | 204,471,557,903,905,180 | 26.707692 | 99 | 0.52804 | false |
lmazuel/azure-sdk-for-python | azure-mgmt-consumption/azure/mgmt/consumption/models/__init__.py | 1 | 2327 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from .meter_details import MeterDetails
from .usage_detail import UsageDetail
from .marketplace import Marketplace
from .reservation_summaries import ReservationSummaries
from .reservation_details import ReservationDetails
from .budget_time_period import BudgetTimePeriod
from .filters import Filters
from .current_spend import CurrentSpend
from .notification import Notification
from .budget import Budget
from .error_details import ErrorDetails
from .error_response import ErrorResponse, ErrorResponseException
from .operation_display import OperationDisplay
from .operation import Operation
from .resource import Resource
from .proxy_resource import ProxyResource
from .price_sheet_properties import PriceSheetProperties
from .price_sheet_result import PriceSheetResult
from .usage_detail_paged import UsageDetailPaged
from .marketplace_paged import MarketplacePaged
from .reservation_summaries_paged import ReservationSummariesPaged
from .reservation_details_paged import ReservationDetailsPaged
from .budget_paged import BudgetPaged
from .operation_paged import OperationPaged
from .consumption_management_client_enums import (
CategoryType,
TimeGrainType,
OperatorType,
Datagrain,
)
__all__ = [
'MeterDetails',
'UsageDetail',
'Marketplace',
'ReservationSummaries',
'ReservationDetails',
'BudgetTimePeriod',
'Filters',
'CurrentSpend',
'Notification',
'Budget',
'ErrorDetails',
'ErrorResponse', 'ErrorResponseException',
'OperationDisplay',
'Operation',
'Resource',
'ProxyResource',
'PriceSheetProperties',
'PriceSheetResult',
'UsageDetailPaged',
'MarketplacePaged',
'ReservationSummariesPaged',
'ReservationDetailsPaged',
'BudgetPaged',
'OperationPaged',
'CategoryType',
'TimeGrainType',
'OperatorType',
'Datagrain',
]
| mit | -6,159,015,588,938,572,000 | 31.319444 | 76 | 0.7211 | false |
xR86/ml-stuff | labs-AI/hw-lab5/pawn_chess.py | 1 | 14528 | # /usr/bin/python
'''
Based on
Representing a chess set in Python
Part 2
Brendan Scott
https://python4kids.brendanscott.com/2013/04/28/a-different-view-on-our-chess-model/
'''
import Tkinter as tk
from Tkinter import PhotoImage
import os.path
import os
# column_reference = "1 2 3 4 5 6 7 8".split(" ")
column_reference = "a b c d e f g h".split(" ")
EMPTY_SQUARE = " "
TILE_WIDTH = 60
'''We have used a tile width of 60 because the images we are used are 60x60 pixels
The original svg files were obtained from
http://commons.wikimedia.org/wiki/Category:SVG_chess_pieces/Standard_transparent
after downloading they were batch converted to png, then gif files. Bash one liners
to do this:
for i in $(ls *.svg); do inkscape -e ${i%.svg}.png -w 60 -h 60 $i ; done
for i in $(ls *.png); do convert $i ${i%.png}.gif ; done
white and black tiles were created in inkscape
'''
BOARD_WIDTH = 8 * TILE_WIDTH
BOARD_HEIGHT = BOARD_WIDTH
DATA_DIR = "chess_data"
TILES = {"black_tile": "black_tile.gif",
"p": "chess_p45.gif",
"P": "chess_p451.gif",
"white_tile": "white_tile.gif"
}
class Model(object):
def __init__(self):
'''create a chess board with pieces positioned for a new game
row ordering is reversed from normal chess representations
but corresponds to a top left screen coordinate
'''
self.board = []
pawn_base = "P " * 8
white_pawns = pawn_base.strip()
black_pawns = white_pawns.lower()
self.board.append([EMPTY_SQUARE] * 8)
self.board.append(black_pawns.split(" "))
for i in range(4):
self.board.append([EMPTY_SQUARE] * 8)
self.board.append(white_pawns.split(" "))
self.board.append([EMPTY_SQUARE] * 8)
def color(self, i, j):
''' checks the color of the piece located at the i, j coordinates
'''
color = -1 # 0 - white, 1 - black
if self.board[i][j] == 'p':
color = 1
elif self.board[i][j] == 'P':
color = 0
return color
def move(self, start, destination):
''' move a piece located at the start location to destination
(each an instance of BoardLocation)
Does not check whether the move is valid for the piece
'''
# check piece color
color = self.color(start.i, start.j)
print "Piece color: ", 'black' if color == 1 else ('white' if color == 0 else 'position empty')
print "start.j, %d, destination.j %d" % (start.j, destination.j)
print "start.i, %d, destination.i %d" % (start.i, destination.i)
print "---"
print self.board
print "---"
# ### error checking ### #
# check coordinates are valid
for c in [start, destination]:
if c.i > 7 or c.j > 7 or c.i < 0 or c.j < 0:
print 'err - coordinates are not valid (outside of board size)\n---'
return
# don't move to same location
if start.i == destination.i and start.j == destination.j:
print 'err - move to same location\n---'
return
# nothing to move
if self.board[start.i][start.j] == EMPTY_SQUARE:
print 'err - nothing to move\n---'
return
# don't move more than one step
# if at initial location don't move more than two steps
if color == 1 and start.i == 1 or color == 0 and start.i == 6:
if abs(destination.i - start.i) > 2 or abs(destination.j - start.j) > 1:
print 'err - more than two steps at init location\n---'
return
# don't move more than one step
elif abs(destination.i - start.i) > 1 or abs(destination.j - start.j) > 1:
print 'err - more than one step\n---'
return
# capture move
capture_color = self.color(destination.i, destination.j)
print 'capture color: ', capture_color
print '---'
#prevent capture of same color
if capture_color == color and (start.j - 1 == destination.j or start.j + 1 == destination.j):
print 'err - capture of same color\n---'
return
# prevent capture on an empty square
if capture_color != color and (start.j - 1 == destination.j or start.j + 1 == destination.j) \
and self.board[destination.i][destination.j] == EMPTY_SQUARE:
print 'err - capture of empty square\n---'
return
#prevent capture on walk (vertical)
if capture_color != color and capture_color != -1 and start.j == destination.j:
print 'err - capture on walk\n---'
return
# no retreat # and start.j == destination.j
#if start.i - 1 != destination.i and color == 0: #allows only one step
if start.i < destination.i and color == 0: # white goes up
print 'err - retreat attempt\n---'
return
#if start.i + 1 != destination.i and color == 1: #allows only one step
if start.i > destination.i and color == 1: # black goes down
print 'err - retreat attempt\n---'
return
f = self.board[start.i][start.j]
self.board[destination.i][destination.j] = f
self.board[start.i][start.j] = EMPTY_SQUARE
print '---\n'
class BoardLocation(object):
def __init__(self, i, j):
self.i = i
self.j = j
class View(tk.Frame):
def __init__(self, parent=None):
tk.Frame.__init__(self, parent)
# label = tk.Label(self, text="Error rate for a perceptron")
# label.pack(pady=10, padx=10)
label = tk.Button(self, text="Error ")
label.pack(pady=10, padx=10, side = tk.RIGHT)
self.canvas = tk.Canvas(self, width=BOARD_WIDTH, height=BOARD_HEIGHT)
self.canvas.pack()
self.images = {}
for image_file_name in TILES:
f = os.path.join(DATA_DIR, TILES[image_file_name])
if not os.path.exists(f):
print("Error: Cannot find image file: %s at %s - aborting" % (TILES[image_file_name], f))
exit(-1)
self.images[image_file_name] = PhotoImage(file=f)
'''This opens each of the image files, converts the data into a form that Tkinter
can use, then stores that converted form in the attribute self.images
self.images is a dictionary, keyed by the letters we used in our model to
represent the pieces - ie PRNBKQ for white and prnbkq for black
eg self.images['N'] is a PhotoImage of a white knight
this means we can directly translate a board entry from the model into a picture
'''
self.pack()
def clear_canvas(self):
''' delete everything from the canvas'''
items = self.canvas.find_all()
for i in items:
self.canvas.delete(i)
def draw_row(self, y, first_tile_white=True, debug_board=False):
''' draw a single row of alternating black and white tiles,
the colour of the first tile is determined by first_tile_white
if debug_board is set show the coordinates of each of the tile corners
'''
if first_tile_white:
remainder = 1
else:
remainder = 0
for i in range(8):
x = i * TILE_WIDTH
if i % 2 == remainder:
# i %2 is the remainder after dividing i by 2
# so i%2 will always be either 0 (no remainder- even numbers) or
# 1 (remainder 1 - odd numbers)
# this tests whether the number i is even or odd
tile = self.images['black_tile']
else:
tile = self.images['white_tile']
self.canvas.create_image(x, y, anchor=tk.NW, image=tile)
# NW is a constant in the Tkinter module. It stands for "north west"
# that is, the top left corner of the picture is to be located at x,y
# if we used another anchor, the grid would not line up properly with
# the canvas size
if debug_board: # implicitly this means if debug_board == True.
''' If we are drawing a debug board, draw an arrow showing top left
and its coordinates. '''
text_pos = (x + TILE_WIDTH / 2, y + TILE_WIDTH / 2)
line_end = (x + TILE_WIDTH / 4, y + TILE_WIDTH / 4)
self.canvas.create_line((x, y), line_end, arrow=tk.FIRST)
text_content = "(%s,%s)" % (x, y)
self.canvas.create_text(text_pos, text=text_content)
def draw_empty_board(self, debug_board=False):
''' draw an empty board on the canvas
if debug_board is set show the coordinates of each of the tile corners'''
y = 0
for i in range(8): # draw 8 rows
y = i * TILE_WIDTH
# each time, advance the y value at which the row is drawn
# by the length of the tile
first_tile_white = not (i % 2)
self.draw_row(y, first_tile_white, debug_board)
def draw_pieces(self, board):
for i, row in enumerate(board):
# using enumerate we get an integer index
# for each row which we can use to calculate y
# because rows run down the screen, they correspond to the y axis
# and the columns correspond to the x axis
for j, piece in enumerate(row):
if piece == EMPTY_SQUARE:
continue # skip empty tiles
tile = self.images[piece]
x = j * TILE_WIDTH
y = i * TILE_WIDTH
self.canvas.create_image(x, y, anchor=tk.NW, image=tile)
def display(self, board, debug_board=False):
''' draw an empty board then draw each of the
pieces in the board over the top'''
self.clear_canvas()
self.draw_empty_board(debug_board=debug_board)
if not debug_board:
self.draw_pieces(board)
# first draw the empty board
# then draw the pieces
# if the order was reversed, the board would be drawn over the pieces
# so we couldn't see them
def display_debug_board(self):
self.clear_canvas()
self.draw_empty_board()
class Controller(object):
def __init__(self, parent=None, model=None):
if model is None:
self.m = Model()
else:
self.m = model
self.v = View(parent)
''' we have created both a model and a view within the controller
the controller doesn't inherit from either model or view
'''
self.v.canvas.bind("<Button-1>", self.handle_click)
# this binds the handle_click method to the view's canvas for left button down
self.clickList = []
# I have kept clickList here, and not in the model, because it is a record of what is happening
# in the view (ie click events) rather than something that the model deals with (eg moves).
def run(self, debug_mode=False):
self.update_display(debug_board=debug_mode)
tk.mainloop()
def handle_click(self, event):
''' Handle a click received. The x,y location of the click on the canvas is at
(event.x, event.y)
First, we need to translate the event coordinates (ie the x,y of where the click occurred)
into a position on the chess board
add this to a list of clicked positions
every first click is treated as a "from" and every second click as a"to"
so, whenever there are an even number of clicks, use the most recent to two to perform a move
then update the display
'''
j = event.x / TILE_WIDTH
# the / operator is called integer division
# it returns the number of times TILE_WIDTH goes into event.x ignoring any remainder
# eg: 2/2 = 1, 3/2 = 1, 11/5 = 2 and so on
# so, it should return a number between 0 (if x < TILE_WIDTH) though to 7
i = event.y / TILE_WIDTH
self.clickList.append(BoardLocation(i, j))
# just maintain a list of all of the moves
# this list shouldn't be used to replay a series of moves because that is something
# which should be stored in the model - but it wouldn't be much trouble to
# keep a record of moves in the model.
if len(self.clickList) % 2 == 0:
# move complete, execute the move
self.m.move(self.clickList[-2], self.clickList[-1])
# use the second last entry in the clickList and the last entry in the clickList
self.update_display()
def update_display(self, debug_board=False):
self.v.display(self.m.board, debug_board=debug_board)
def parse_move(self, move):
''' Very basic move parsing
given a move in the form ab-cd where a and c are in [a,b,c,d,e,f,g,h]
and b and d are numbers from 1 to 8 convert into BoardLocation instances
for start (ab) and destination (cd)
Does not deal with castling (ie 0-0 or 0-0-0) or bare pawn moves (e4)
or capture d4xe5 etc
No error checking! very fragile
'''
s, d = move.split("-")
i = 8 - int(s[-1]) # board is "upside down" with reference to the representation
j = column_reference.index(s[0])
start = BoardLocation(i, j)
i = 8 - int(d[-1])
j = column_reference.index(d[0])
destination = BoardLocation(i, j)
return start, destination
if __name__ == "__main__":
if not os.path.exists(DATA_DIR):
''' basic check - if there are files missing from the data directory, the
program will still fail '''
dl = raw_input("Cannot find chess images directory. Download from website? (Y/n)")
if dl.lower() == "n":
print("No image files found, quitting.")
exit(0)
print("Creating directory: %s" % os.path.join(os.getcwd(), DATA_DIR))
import urllib
os.mkdir(DATA_DIR)
url_format = "https://python4kids.files.wordpress.com/2013/04/%s"
for k, v in TILES.items():
url = url_format % v
target_filename = os.path.join(DATA_DIR, v)
print("Downloading file: %s" % v)
urllib.urlretrieve(url, target_filename)
parent = tk.Tk()
c = Controller(parent)
c.run(debug_mode=False) | mit | -8,434,570,687,873,066,000 | 38.373984 | 105 | 0.581016 | false |
Azure/azure-sdk-for-python | sdk/edgegateway/azure-mgmt-edgegateway/azure/mgmt/edgegateway/models/arm_base_model.py | 1 | 1384 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class ARMBaseModel(Model):
"""Represents the base class for all object models.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: The path ID that uniquely identifies the object.
:vartype id: str
:ivar name: The object name.
:vartype name: str
:ivar type: The hierarchical type of the object.
:vartype type: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
}
def __init__(self, **kwargs):
super(ARMBaseModel, self).__init__(**kwargs)
self.id = None
self.name = None
self.type = None
| mit | -4,780,398,978,749,978,000 | 29.755556 | 76 | 0.547688 | false |
lightd22/smartDraft | src/data/database_ops.py | 1 | 15164 | import sqlite3
import re
from .champion_info import champion_id_from_name,champion_name_from_id, convert_champion_alias, AliasException
regionsDict = {"NA_LCS":"NA", "EU_LCS":"EU", "LCK":"LCK", "LPL":"LPL",
"LMS":"LMS", "International":"INTL", "NA_ACA": "NA_ACA", "KR_CHAL":"KR_CHAL", "LDL":"LDL"}
internationalEventsDict = {"Mid-Season_Invitational":"MSI",
"Rift_Rivals":"RR","World_Championship":"WRLDS"}
def get_matches_by_id(match_ids, path):
"""
Returns match data for each match_id in the list match_ids
"""
conn = sqlite3.connect(path)
cur = conn.cursor()
match_data = []
for match_id in match_ids:
match = get_match_data(cur, match_id)
match_data.append(match)
conn.close()
return match_data
def get_game_ids_by_tournament(cursor, tournament, patch=None):
"""
getMatchIdsByTournament queries the connected db for game ids which match the
input tournament string.
Args:
cursor (sqlite cursor): cursor used to execute commmands
tournament (string): id string for tournament (ie "2017/EU/Summer_Split")
patch (string, optional): id string for patch to additionally filter
Returns:
gameIds (list(int)): list of gameIds
"""
if patch:
query = "SELECT id FROM game WHERE tournament=? AND patch=? ORDER BY id"
params = (tournament, patch)
else:
query = "SELECT id FROM game WHERE tournament=? ORDER BY id"
params = (tournament,)
cursor.execute(query, params)
response = cursor.fetchall()
vals = []
for r in response:
vals.append(r[0])
return vals
def get_game_ids(cursor, tournament=None, patch=None):
"""
get_game_ids queries the connected db for game ids which match the
input tournament and patch strings.
Args:
cursor (sqlite cursor): cursor used to execute commmands
tournament (string, optional): id string for tournament (ie "2017/EU/Summer_Split")
patch (string, optional): id string for patch to filter for
Returns:
gameIds (list(int)): list of gameIds
"""
if not patch and not tournament:
return []
params = ()
where_clause = []
if tournament:
where_clause.append("tournament=?")
params += (tournament,)
if patch:
where_clause.append("patch=?")
params += (patch,)
query = "SELECT id FROM game WHERE {where_clause} ORDER BY id".format(where_clause=" AND ".join(where_clause))
cursor.execute(query, params)
response = cursor.fetchall()
vals = []
for r in response:
vals.append(r[0])
return vals
def get_match_data(cursor, gameId):
"""
get_match_data queries the connected db for draft data and organizes it into a more convenient
format.
Args:
cursor (sqlite cursor): cursor used to execute commmands
gameId (int): primary key of game to process
Returns:
match (dict): formatted pick/ban phase data for game
"""
match = {"id": gameId ,"winner": None, "blue":{}, "red":{}, "blue_team":None, "red_team":None, "header_id":None, "patch":None}
# Get winning team
query = "SELECT tournament, tourn_game_id, week, patch, winning_team FROM game WHERE id=?"
params = (gameId,)
cursor.execute(query, params)
match["tournament"], match["tourn_game_id"], match["header_id"], match["patch"], match["winner"] = cursor.fetchone()#[0]
# Get ban data
query = "SELECT champion_id, selection_order FROM ban WHERE game_id=? and side_id=? ORDER BY selection_order"
params = (gameId,0)
cursor.execute(query, params)
match["blue"]["bans"] = list(cursor.fetchall())
query = "SELECT champion_id, selection_order FROM ban WHERE game_id=? and side_id=? ORDER BY selection_order"
params = (gameId,1)
cursor.execute(query, params)
match["red"]["bans"] = list(cursor.fetchall())
# Get pick data
query = "SELECT champion_id, position_id, selection_order FROM pick WHERE game_id=? AND side_id=? ORDER BY selection_order"
params = (gameId,0)
cursor.execute(query, params)
match["blue"]["picks"] = list(cursor.fetchall())
query = "SELECT champion_id, position_id, selection_order FROM pick WHERE game_id=? AND side_id=? ORDER BY selection_order"
params = (gameId,1)
cursor.execute(query, params)
match["red"]["picks"] = list(cursor.fetchall())
query = "SELECT display_name FROM team JOIN game ON team.id = blue_teamid WHERE game.id = ?"
params = (gameId,)
cursor.execute(query, params)
match["blue_team"] = cursor.fetchone()[0]
query = "SELECT display_name FROM team JOIN game ON team.id = red_teamid WHERE game.id = ?"
params = (gameId,)
cursor.execute(query, params)
match["red_team"] = cursor.fetchone()[0]
return match
def get_tournament_data(gameData):
"""
get_tournament_data cleans up and combines the region/year/tournament fields in gameData for entry into
the game table. When combined with the game_id field it uniquely identifies the match played.
The format of tournamentData output is 'year/region_abbrv/tournament' (forward slash delimiters)
Args:
gameData (dict): dictonary output from query_wiki()
Returns:
tournamentData (string): formatted and cleaned region/year/split data
"""
tournamentData = "/".join([gameData["year"], regionsDict[gameData["region"]], gameData["tournament"]])
return tournamentData
def get_game_id(cursor,gameData):
"""
get_game_id looks in the game table for an entry with matching tournament and tourn_game_id as the input
gameData and returns the id field. If no such entry is found, it adds this game to the game table and returns the
id field.
Args:
cursor (sqlite cursor): cursor used to execute commmands
gameData (dict): dictionary output from query_wiki()
Returns:
gameId (int): Primary key in game table corresponding to this gameData
"""
tournament = get_tournament_data(gameData)
vals = (tournament,gameData["tourn_game_id"])
gameId = None
while gameId is None:
cursor.execute("SELECT id FROM game WHERE tournament=? AND tourn_game_id=?", vals)
gameId = cursor.fetchone()
if gameId is None:
print("Warning: Game not found. Attempting to add game.")
err = insert_game(cursor,[game])
else:
gameId = gameId[0]
return gameId
def delete_game_from_table(cursor, game_ids, table_name):
"""
Deletes rows corresponding to game_id from table table_name.
Args:
cursor (sqlite cursor): cursor used to execute commmands
game_ids (list(int)): game_ids to be removed from table
table_name (string): name of table to remove rows from
Returns:
status (int): status = 1 if delete was successful, otherwise status = 0
"""
status = 0
assert isinstance(game_ids,list), "game_ids is not a list"
for game_id in game_ids:
query = "SELECT count(*) FROM {table_name} WHERE game_id=?".format(table_name=table_name)
vals = (game_id,)
cursor.execute(query, vals)
print("Found {count} rows for game_id={game_id} to delete from table {table}".format(count=cursor.fetchone()[0], game_id=game_id, table=table_name))
query = "DELETE FROM {table_name} WHERE game_id=?".format(table_name=table_name)
cursor.execute(query, vals)
status = 1
return status
def insert_game(cursor, gameData):
"""
insert_game attempts to format collected gameData from query_wiki() and insert
into the game table in the competitiveGameData.db.
Args:
cursor (sqlite cursor): cursor used to execute commmands
gameData (list(dict)): list of dictionary output from query_wiki()
Returns:
status (int): status = 1 if insert was successful, otherwise status = 0
"""
status = 0
assert isinstance(gameData,list), "gameData is not a list"
for game in gameData:
tournGameId = game["tourn_game_id"] # Which game this is within current tournament
tournamentData = get_tournament_data(game)
# Check to see if game data is already in table
vals = (tournamentData,tournGameId)
cursor.execute("SELECT id FROM game WHERE tournament=? AND tourn_game_id=?", vals)
result = cursor.fetchone()
if result is not None:
print("game {} already exists in table.. skipping".format(result[0]))
else:
# Get blue and red team_ids
blueTeamId = None
redTeamId = None
while (blueTeamId is None or redTeamId is None):
cursor.execute("SELECT id FROM team WHERE display_name=?",(game["blue_team"],))
blueTeamId = cursor.fetchone()
cursor.execute("SELECT id FROM team WHERE display_name=?",(game["red_team"],))
redTeamId = cursor.fetchone()
if (blueTeamId is None) or (redTeamId is None):
print("*WARNING: When inserting game-- team not found. Attempting to add teams")
err = insert_team(cursor, [game])
else:
blueTeamId = blueTeamId[0]
redTeamId = redTeamId[0]
winner = game["winning_team"]
header_id = game["header_id"]
patch = game["patch"]
vals = (tournamentData, tournGameId, header_id, patch, blueTeamId, redTeamId, winner)
cursor.execute("INSERT INTO game(tournament, tourn_game_id, week, patch, blue_teamid, red_teamid, winning_team) VALUES(?,?,?,?,?,?,?)", vals)
status = 1
return status
def insert_team(cursor, gameData):
"""
insert_team attempts to format collected gameData from query_wiki() and insert
into the team table in the competitiveGameData.db.
Args:
cursor (sqlite cursor): cursor used to execute commmands
wikiGameData (list(dict)): dictionary output from query_wiki()
Returns:
status (int): status = 1 if insert was successful, otherwise status = 0
"""
status = 0
assert isinstance(gameData,list), "gameData is not a list"
for game in gameData:
# We don't track all regions (i.e wildcard regions), but they can still appear at
# international tournaments. When this happens we will track the team, but list their
# region as NULL.
if game["region"] is "Inernational":
region = None
else:
region = regionsDict[game["region"]]
teams = [game["blue_team"], game["red_team"]]
for team in teams:
vals = (region,team)
# This only looks for matching display names.. what happens if theres a
# NA TSM and and EU TSM?
cursor.execute("SELECT * FROM team WHERE display_name=?", (team,))
result = cursor.fetchone()
if result is None:
cursor.execute("INSERT INTO team(region, display_name) VALUES(?,?)", vals)
status = 1
return status
def insert_ban(cursor, gameData):
"""
insert_ban attempts to format collected gameData from query_wiki() and insert into the
ban table in the competitiveGameData.db.
Args:
cursor (sqlite cursor): cursor used to execute commmands
gameData (list(dict)): dictionary output from query_wiki()
Returns:
status (int): status = 1 if insert was successful, otherwise status = 0
"""
status = 0
assert isinstance(gameData,list), "gameData is not a list"
teams = ["blue", "red"]
for game in gameData:
tournament = get_tournament_data(game)
vals = (tournament,game["tourn_game_id"])
gameId = get_game_id(cursor,game)
# Check for existing entries in table. Skip if they already exist.
cursor.execute("SELECT game_id FROM ban WHERE game_id=?",(gameId,))
result = cursor.fetchone()
if result is not None:
print("Bans for game {} already exists in table.. skipping".format(result[0]))
else:
for k in range(len(teams)):
bans = game["bans"][teams[k]]
selectionOrder = 0
side = k
for ban in bans:
if ban in ["lossofban","none"]:
# Special case if no ban was submitted in game
banId = None
else:
# print("ban={}".format(ban))
banId = champion_id_from_name(ban)
# If no such champion name is found, try looking for an alias
if banId is None:
banId = champion_id_from_name(convert_champion_alias(ban))
selectionOrder += 1
vals = (gameId,banId,selectionOrder,side)
cursor.execute("INSERT INTO ban(game_id, champion_id, selection_order, side_id) VALUES(?,?,?,?)", vals)
status = 1
return status
def insert_pick(cursor, gameData):
"""
insert_pick formats collected gameData from query_wiki() and inserts it into the pick table of the
competitiveGameData.db.
Args:
cursor (sqlite cursor): cursor used to execute commmands
gameData (list(dict)): list of formatted game data from query_wiki()
Returns:
status (int): status = 1 if insert was successful, otherwise status = 0
"""
status = 0
assert isinstance(gameData,list), "gameData is not a list"
teams = ["blue", "red"]
for game in gameData:
tournament = get_tournament_data(game)
vals = (tournament,game["tourn_game_id"])
gameId = get_game_id(cursor,game)
# Check for existing entries in table. Skip if they already exist.
cursor.execute("SELECT game_id FROM pick WHERE game_id=?",(gameId,))
result = cursor.fetchone()
if result is not None:
print("Picks for game {} already exists in table.. skipping".format(result[0]))
else:
for k in range(len(teams)):
picks = game["picks"][teams[k]]
selectionOrder = 0
side = k
for (pick,position) in picks:
if pick in ["lossofpick","none"]:
# Special case if no pick was submitted to game (not really sure what that would mean
# but being consistent with insert_pick())
pickId = None
else:
pickId = champion_id_from_name(pick)
# If no such champion name is found, try looking for an alias
if pickId is None:
pickId = champion_id_from_name(convert_champion_alias(pick))
selectionOrder += 1
vals = (gameId,pickId,position,selectionOrder,side)
cursor.execute("INSERT INTO pick(game_id, champion_id, position_id, selection_order, side_id) VALUES(?,?,?,?,?)", vals)
status = 1
return status
| apache-2.0 | -8,253,334,377,751,635,000 | 41.122222 | 156 | 0.615339 | false |
mikalstill/nova | nova/conf/cinder.py | 1 | 4043 | # Copyright (c) 2016 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from keystoneauth1 import loading as ks_loading
from oslo_config import cfg
cinder_group = cfg.OptGroup(
'cinder',
title='Cinder Options',
help="Configuration options for the block storage")
cinder_opts = [
cfg.StrOpt('catalog_info',
default='volumev3::publicURL',
regex='(\w+):(\w*):(.*?)',
help="""
Info to match when looking for cinder in the service catalog.
The ``<service_name>`` is optional and omitted by default since it should
not be necessary in most deployments.
Possible values:
* Format is separated values of the form:
<service_type>:<service_name>:<endpoint_type>
Note: Nova does not support the Cinder v2 API since the Nova 17.0.0 Queens
release.
Related options:
* endpoint_template - Setting this option will override catalog_info
"""),
cfg.StrOpt('endpoint_template',
help="""
If this option is set then it will override service catalog lookup with
this template for cinder endpoint
Possible values:
* URL for cinder endpoint API
e.g. http://localhost:8776/v3/%(project_id)s
Note: Nova does not support the Cinder v2 API since the Nova 17.0.0 Queens
release.
Related options:
* catalog_info - If endpoint_template is not set, catalog_info will be used.
"""),
cfg.StrOpt('os_region_name',
help="""
Region name of this node. This is used when picking the URL in the service
catalog.
Possible values:
* Any string representing region name
"""),
cfg.IntOpt('http_retries',
default=3,
min=0,
help="""
Number of times cinderclient should retry on any failed http call.
0 means connection is attempted only once. Setting it to any positive integer
means that on failure connection is retried that many times e.g. setting it
to 3 means total attempts to connect will be 4.
Possible values:
* Any integer value. 0 means connection is attempted only once
"""),
cfg.BoolOpt('cross_az_attach',
default=True,
help="""
Allow attach between instance and volume in different availability zones.
If False, volumes attached to an instance must be in the same availability
zone in Cinder as the instance availability zone in Nova.
This also means care should be taken when booting an instance from a volume
where source is not "volume" because Nova will attempt to create a volume using
the same availability zone as what is assigned to the instance.
If that AZ is not in Cinder (or allow_availability_zone_fallback=False in
cinder.conf), the volume create request will fail and the instance will fail
the build request.
By default there is no availability zone restriction on volume attach.
"""),
]
def register_opts(conf):
conf.register_group(cinder_group)
conf.register_opts(cinder_opts, group=cinder_group)
ks_loading.register_session_conf_options(conf,
cinder_group.name)
ks_loading.register_auth_conf_options(conf, cinder_group.name)
def list_opts():
return {
cinder_group.name: (
cinder_opts +
ks_loading.get_session_conf_options() +
ks_loading.get_auth_common_conf_options() +
ks_loading.get_auth_plugin_conf_options('password') +
ks_loading.get_auth_plugin_conf_options('v2password') +
ks_loading.get_auth_plugin_conf_options('v3password'))
}
| apache-2.0 | 6,942,938,164,454,437,000 | 32.691667 | 79 | 0.697007 | false |
FedoraScientific/salome-paravis | test/VisuPrs/Plot3D/E3.py | 1 | 1493 | # Copyright (C) 2010-2014 CEA/DEN, EDF R&D
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
# See http://www.salome-platform.org/ or email : [email protected]
#
# This case corresponds to: /visu/Plot3D/E3 case
# Create Plot3D for all data of the given MED file
import sys
from paravistest import datadir, pictureext, get_picture_dir
from presentations import CreatePrsForFile, PrsTypeEnum
import pvserver as paravis
# Create presentations
myParavis = paravis.myParavis
# Directory for saving snapshots
picturedir = get_picture_dir("Plot3D/E3")
file = datadir + "hydro_d10.med"
print " --------------------------------- "
print "file ", file
print " --------------------------------- "
print "CreatePrsForFile..."
CreatePrsForFile(myParavis, file, [PrsTypeEnum.PLOT3D], picturedir, pictureext)
| lgpl-2.1 | 8,911,156,577,564,217,000 | 37.282051 | 81 | 0.731413 | false |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.